Return-Path: X-Original-To: apmail-hbase-commits-archive@www.apache.org Delivered-To: apmail-hbase-commits-archive@www.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id 0F58D10C20 for ; Thu, 11 Apr 2013 03:54:31 +0000 (UTC) Received: (qmail 37521 invoked by uid 500); 11 Apr 2013 03:54:29 -0000 Delivered-To: apmail-hbase-commits-archive@hbase.apache.org Received: (qmail 37452 invoked by uid 500); 11 Apr 2013 03:54:29 -0000 Mailing-List: contact commits-help@hbase.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: dev@hbase.apache.org Delivered-To: mailing list commits@hbase.apache.org Received: (qmail 37401 invoked by uid 99); 11 Apr 2013 03:54:27 -0000 Received: from nike.apache.org (HELO nike.apache.org) (192.87.106.230) by apache.org (qpsmtpd/0.29) with ESMTP; Thu, 11 Apr 2013 03:54:27 +0000 X-ASF-Spam-Status: No, hits=-2000.0 required=5.0 tests=ALL_TRUSTED X-Spam-Check-By: apache.org Received: from [140.211.11.4] (HELO eris.apache.org) (140.211.11.4) by apache.org (qpsmtpd/0.29) with ESMTP; Thu, 11 Apr 2013 03:53:54 +0000 Received: from eris.apache.org (localhost [127.0.0.1]) by eris.apache.org (Postfix) with ESMTP id AC4B12388CC5; Thu, 11 Apr 2013 03:53:04 +0000 (UTC) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: svn commit: r1466761 [17/41] - in /hbase/branches/0.95: ./ hbase-client/src/main/java/org/apache/hadoop/hbase/ hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ hbase-p... Date: Thu, 11 Apr 2013 03:52:57 -0000 To: commits@hbase.apache.org From: stack@apache.org X-Mailer: svnmailer-1.0.8-patched Message-Id: <20130411035304.AC4B12388CC5@eris.apache.org> X-Virus-Checked: Checked by ClamAV on apache.org Modified: hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterMonitorProtos.java URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterMonitorProtos.java?rev=1466761&r1=1466760&r2=1466761&view=diff ============================================================================== --- hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterMonitorProtos.java (original) +++ hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterMonitorProtos.java Thu Apr 11 03:52:56 2013 @@ -10,129 +10,50 @@ public final class MasterMonitorProtos { } public interface GetSchemaAlterStatusRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required bytes tableName = 1; - /** - * required bytes tableName = 1; - */ boolean hasTableName(); - /** - * required bytes tableName = 1; - */ com.google.protobuf.ByteString getTableName(); } - /** - * Protobuf type {@code GetSchemaAlterStatusRequest} - */ public static final class GetSchemaAlterStatusRequest extends com.google.protobuf.GeneratedMessage implements GetSchemaAlterStatusRequestOrBuilder { // Use GetSchemaAlterStatusRequest.newBuilder() to construct. - private GetSchemaAlterStatusRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private GetSchemaAlterStatusRequest(Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private GetSchemaAlterStatusRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - + private GetSchemaAlterStatusRequest(boolean noInit) {} + private static final GetSchemaAlterStatusRequest defaultInstance; public static GetSchemaAlterStatusRequest getDefaultInstance() { return defaultInstance; } - + public GetSchemaAlterStatusRequest getDefaultInstanceForType() { return defaultInstance; } - - private final com.google.protobuf.UnknownFieldSet unknownFields; - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private GetSchemaAlterStatusRequest( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - done = true; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - tableName_ = input.readBytes(); - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetSchemaAlterStatusRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetSchemaAlterStatusRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest.Builder.class); - } - - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public GetSchemaAlterStatusRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new GetSchemaAlterStatusRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetSchemaAlterStatusRequest_fieldAccessorTable; } - + private int bitField0_; // required bytes tableName = 1; public static final int TABLENAME_FIELD_NUMBER = 1; private com.google.protobuf.ByteString tableName_; - /** - * required bytes tableName = 1; - */ public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } - /** - * required bytes tableName = 1; - */ public com.google.protobuf.ByteString getTableName() { return tableName_; } - + private void initFields() { tableName_ = com.google.protobuf.ByteString.EMPTY; } @@ -140,7 +61,7 @@ public final class MasterMonitorProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasTableName()) { memoizedIsInitialized = 0; return false; @@ -148,7 +69,7 @@ public final class MasterMonitorProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -157,12 +78,12 @@ public final class MasterMonitorProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -172,14 +93,14 @@ public final class MasterMonitorProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -189,7 +110,7 @@ public final class MasterMonitorProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest) obj; - + boolean result = true; result = result && (hasTableName() == other.hasTableName()); if (hasTableName()) { @@ -200,13 +121,9 @@ public final class MasterMonitorProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - - private int memoizedHashCode = 0; + @java.lang.Override public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasTableName()) { @@ -214,79 +131,89 @@ public final class MasterMonitorProtos { hash = (53 * hash) + getTableName().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); - memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); + return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); + return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } - /** - * Protobuf type {@code GetSchemaAlterStatusRequest} - */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequestOrBuilder { @@ -294,21 +221,18 @@ public final class MasterMonitorProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetSchemaAlterStatusRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetSchemaAlterStatusRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest.Builder.class); + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetSchemaAlterStatusRequest_fieldAccessorTable; } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + + private Builder(BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -319,27 +243,27 @@ public final class MasterMonitorProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); tableName_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetSchemaAlterStatusRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest.getDescriptor(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest result = buildPartial(); if (!result.isInitialized()) { @@ -347,7 +271,17 @@ public final class MasterMonitorProtos { } return result; } - + + private org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest(this); int from_bitField0_ = bitField0_; @@ -360,7 +294,7 @@ public final class MasterMonitorProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest)other); @@ -369,7 +303,7 @@ public final class MasterMonitorProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest.getDefaultInstance()) return this; if (other.hasTableName()) { @@ -378,7 +312,7 @@ public final class MasterMonitorProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasTableName()) { @@ -386,43 +320,49 @@ public final class MasterMonitorProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + tableName_ = input.readBytes(); + break; + } } } - return this; } + private int bitField0_; - + // required bytes tableName = 1; private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; - /** - * required bytes tableName = 1; - */ public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } - /** - * required bytes tableName = 1; - */ public com.google.protobuf.ByteString getTableName() { return tableName_; } - /** - * required bytes tableName = 1; - */ public Builder setTableName(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -432,183 +372,84 @@ public final class MasterMonitorProtos { onChanged(); return this; } - /** - * required bytes tableName = 1; - */ public Builder clearTableName() { bitField0_ = (bitField0_ & ~0x00000001); tableName_ = getDefaultInstance().getTableName(); onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:GetSchemaAlterStatusRequest) } - + static { defaultInstance = new GetSchemaAlterStatusRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:GetSchemaAlterStatusRequest) } - + public interface GetSchemaAlterStatusResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // optional uint32 yetToUpdateRegions = 1; - /** - * optional uint32 yetToUpdateRegions = 1; - */ boolean hasYetToUpdateRegions(); - /** - * optional uint32 yetToUpdateRegions = 1; - */ int getYetToUpdateRegions(); - + // optional uint32 totalRegions = 2; - /** - * optional uint32 totalRegions = 2; - */ boolean hasTotalRegions(); - /** - * optional uint32 totalRegions = 2; - */ int getTotalRegions(); } - /** - * Protobuf type {@code GetSchemaAlterStatusResponse} - */ public static final class GetSchemaAlterStatusResponse extends com.google.protobuf.GeneratedMessage implements GetSchemaAlterStatusResponseOrBuilder { // Use GetSchemaAlterStatusResponse.newBuilder() to construct. - private GetSchemaAlterStatusResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private GetSchemaAlterStatusResponse(Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private GetSchemaAlterStatusResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - + private GetSchemaAlterStatusResponse(boolean noInit) {} + private static final GetSchemaAlterStatusResponse defaultInstance; public static GetSchemaAlterStatusResponse getDefaultInstance() { return defaultInstance; } - + public GetSchemaAlterStatusResponse getDefaultInstanceForType() { return defaultInstance; } - - private final com.google.protobuf.UnknownFieldSet unknownFields; - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private GetSchemaAlterStatusResponse( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - done = true; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - yetToUpdateRegions_ = input.readUInt32(); - break; - } - case 16: { - bitField0_ |= 0x00000002; - totalRegions_ = input.readUInt32(); - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetSchemaAlterStatusResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetSchemaAlterStatusResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse.Builder.class); - } - - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public GetSchemaAlterStatusResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new GetSchemaAlterStatusResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetSchemaAlterStatusResponse_fieldAccessorTable; } - + private int bitField0_; // optional uint32 yetToUpdateRegions = 1; public static final int YETTOUPDATEREGIONS_FIELD_NUMBER = 1; private int yetToUpdateRegions_; - /** - * optional uint32 yetToUpdateRegions = 1; - */ public boolean hasYetToUpdateRegions() { return ((bitField0_ & 0x00000001) == 0x00000001); } - /** - * optional uint32 yetToUpdateRegions = 1; - */ public int getYetToUpdateRegions() { return yetToUpdateRegions_; } - + // optional uint32 totalRegions = 2; public static final int TOTALREGIONS_FIELD_NUMBER = 2; private int totalRegions_; - /** - * optional uint32 totalRegions = 2; - */ public boolean hasTotalRegions() { return ((bitField0_ & 0x00000002) == 0x00000002); } - /** - * optional uint32 totalRegions = 2; - */ public int getTotalRegions() { return totalRegions_; } - + private void initFields() { yetToUpdateRegions_ = 0; totalRegions_ = 0; @@ -617,11 +458,11 @@ public final class MasterMonitorProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -633,12 +474,12 @@ public final class MasterMonitorProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -652,14 +493,14 @@ public final class MasterMonitorProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -669,7 +510,7 @@ public final class MasterMonitorProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse) obj; - + boolean result = true; result = result && (hasYetToUpdateRegions() == other.hasYetToUpdateRegions()); if (hasYetToUpdateRegions()) { @@ -685,13 +526,9 @@ public final class MasterMonitorProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - - private int memoizedHashCode = 0; + @java.lang.Override public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasYetToUpdateRegions()) { @@ -703,79 +540,89 @@ public final class MasterMonitorProtos { hash = (53 * hash) + getTotalRegions(); } hash = (29 * hash) + getUnknownFields().hashCode(); - memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); + return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); + return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } - /** - * Protobuf type {@code GetSchemaAlterStatusResponse} - */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponseOrBuilder { @@ -783,21 +630,18 @@ public final class MasterMonitorProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetSchemaAlterStatusResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetSchemaAlterStatusResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse.Builder.class); + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetSchemaAlterStatusResponse_fieldAccessorTable; } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + + private Builder(BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -808,7 +652,7 @@ public final class MasterMonitorProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); yetToUpdateRegions_ = 0; @@ -817,20 +661,20 @@ public final class MasterMonitorProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetSchemaAlterStatusResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse.getDescriptor(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse result = buildPartial(); if (!result.isInitialized()) { @@ -838,7 +682,17 @@ public final class MasterMonitorProtos { } return result; } - + + private org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse(this); int from_bitField0_ = bitField0_; @@ -855,7 +709,7 @@ public final class MasterMonitorProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse)other); @@ -864,7 +718,7 @@ public final class MasterMonitorProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse.getDefaultInstance()) return this; if (other.hasYetToUpdateRegions()) { @@ -876,261 +730,153 @@ public final class MasterMonitorProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + yetToUpdateRegions_ = input.readUInt32(); + break; + } + case 16: { + bitField0_ |= 0x00000002; + totalRegions_ = input.readUInt32(); + break; + } } } - return this; } + private int bitField0_; - + // optional uint32 yetToUpdateRegions = 1; private int yetToUpdateRegions_ ; - /** - * optional uint32 yetToUpdateRegions = 1; - */ public boolean hasYetToUpdateRegions() { return ((bitField0_ & 0x00000001) == 0x00000001); } - /** - * optional uint32 yetToUpdateRegions = 1; - */ public int getYetToUpdateRegions() { return yetToUpdateRegions_; } - /** - * optional uint32 yetToUpdateRegions = 1; - */ public Builder setYetToUpdateRegions(int value) { bitField0_ |= 0x00000001; yetToUpdateRegions_ = value; onChanged(); return this; } - /** - * optional uint32 yetToUpdateRegions = 1; - */ public Builder clearYetToUpdateRegions() { bitField0_ = (bitField0_ & ~0x00000001); yetToUpdateRegions_ = 0; onChanged(); return this; } - + // optional uint32 totalRegions = 2; private int totalRegions_ ; - /** - * optional uint32 totalRegions = 2; - */ public boolean hasTotalRegions() { return ((bitField0_ & 0x00000002) == 0x00000002); } - /** - * optional uint32 totalRegions = 2; - */ public int getTotalRegions() { return totalRegions_; } - /** - * optional uint32 totalRegions = 2; - */ public Builder setTotalRegions(int value) { bitField0_ |= 0x00000002; totalRegions_ = value; onChanged(); return this; } - /** - * optional uint32 totalRegions = 2; - */ public Builder clearTotalRegions() { bitField0_ = (bitField0_ & ~0x00000002); totalRegions_ = 0; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:GetSchemaAlterStatusResponse) } - + static { defaultInstance = new GetSchemaAlterStatusResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:GetSchemaAlterStatusResponse) } - + public interface GetTableDescriptorsRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // repeated string tableNames = 1; - /** - * repeated string tableNames = 1; - */ - java.util.List - getTableNamesList(); - /** - * repeated string tableNames = 1; - */ + java.util.List getTableNamesList(); int getTableNamesCount(); - /** - * repeated string tableNames = 1; - */ - java.lang.String getTableNames(int index); - /** - * repeated string tableNames = 1; - */ - com.google.protobuf.ByteString - getTableNamesBytes(int index); + String getTableNames(int index); } - /** - * Protobuf type {@code GetTableDescriptorsRequest} - */ public static final class GetTableDescriptorsRequest extends com.google.protobuf.GeneratedMessage implements GetTableDescriptorsRequestOrBuilder { // Use GetTableDescriptorsRequest.newBuilder() to construct. - private GetTableDescriptorsRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private GetTableDescriptorsRequest(Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private GetTableDescriptorsRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - + private GetTableDescriptorsRequest(boolean noInit) {} + private static final GetTableDescriptorsRequest defaultInstance; public static GetTableDescriptorsRequest getDefaultInstance() { return defaultInstance; } - + public GetTableDescriptorsRequest getDefaultInstanceForType() { return defaultInstance; } - - private final com.google.protobuf.UnknownFieldSet unknownFields; - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private GetTableDescriptorsRequest( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - done = true; - } - break; - } - case 10: { - if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { - tableNames_ = new com.google.protobuf.LazyStringArrayList(); - mutable_bitField0_ |= 0x00000001; - } - tableNames_.add(input.readBytes()); - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); - } finally { - if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { - tableNames_ = new com.google.protobuf.UnmodifiableLazyStringList(tableNames_); - } - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetTableDescriptorsRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetTableDescriptorsRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest.Builder.class); - } - - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public GetTableDescriptorsRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new GetTableDescriptorsRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetTableDescriptorsRequest_fieldAccessorTable; } - + // repeated string tableNames = 1; public static final int TABLENAMES_FIELD_NUMBER = 1; private com.google.protobuf.LazyStringList tableNames_; - /** - * repeated string tableNames = 1; - */ - public java.util.List + public java.util.List getTableNamesList() { return tableNames_; } - /** - * repeated string tableNames = 1; - */ public int getTableNamesCount() { return tableNames_.size(); } - /** - * repeated string tableNames = 1; - */ - public java.lang.String getTableNames(int index) { + public String getTableNames(int index) { return tableNames_.get(index); } - /** - * repeated string tableNames = 1; - */ - public com.google.protobuf.ByteString - getTableNamesBytes(int index) { - return tableNames_.getByteString(index); - } - + private void initFields() { tableNames_ = com.google.protobuf.LazyStringArrayList.EMPTY; } @@ -1138,11 +884,11 @@ public final class MasterMonitorProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -1151,12 +897,12 @@ public final class MasterMonitorProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; { int dataSize = 0; @@ -1171,14 +917,14 @@ public final class MasterMonitorProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -1188,7 +934,7 @@ public final class MasterMonitorProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest) obj; - + boolean result = true; result = result && getTableNamesList() .equals(other.getTableNamesList()); @@ -1196,13 +942,9 @@ public final class MasterMonitorProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - - private int memoizedHashCode = 0; + @java.lang.Override public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (getTableNamesCount() > 0) { @@ -1210,79 +952,89 @@ public final class MasterMonitorProtos { hash = (53 * hash) + getTableNamesList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); - memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); + return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); + return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } - /** - * Protobuf type {@code GetTableDescriptorsRequest} - */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequestOrBuilder { @@ -1290,21 +1042,18 @@ public final class MasterMonitorProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetTableDescriptorsRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetTableDescriptorsRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest.Builder.class); + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetTableDescriptorsRequest_fieldAccessorTable; } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + + private Builder(BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -1315,27 +1064,27 @@ public final class MasterMonitorProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); tableNames_ = com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetTableDescriptorsRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest.getDescriptor(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest result = buildPartial(); if (!result.isInitialized()) { @@ -1343,7 +1092,17 @@ public final class MasterMonitorProtos { } return result; } - + + private org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest(this); int from_bitField0_ = bitField0_; @@ -1356,7 +1115,7 @@ public final class MasterMonitorProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest)other); @@ -1365,7 +1124,7 @@ public final class MasterMonitorProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest.getDefaultInstance()) return this; if (!other.tableNames_.isEmpty()) { @@ -1381,30 +1140,45 @@ public final class MasterMonitorProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + ensureTableNamesIsMutable(); + tableNames_.add(input.readBytes()); + break; + } } } - return this; } + private int bitField0_; - + // repeated string tableNames = 1; private com.google.protobuf.LazyStringList tableNames_ = com.google.protobuf.LazyStringArrayList.EMPTY; private void ensureTableNamesIsMutable() { @@ -1413,37 +1187,18 @@ public final class MasterMonitorProtos { bitField0_ |= 0x00000001; } } - /** - * repeated string tableNames = 1; - */ - public java.util.List + public java.util.List getTableNamesList() { return java.util.Collections.unmodifiableList(tableNames_); } - /** - * repeated string tableNames = 1; - */ public int getTableNamesCount() { return tableNames_.size(); } - /** - * repeated string tableNames = 1; - */ - public java.lang.String getTableNames(int index) { + public String getTableNames(int index) { return tableNames_.get(index); } - /** - * repeated string tableNames = 1; - */ - public com.google.protobuf.ByteString - getTableNamesBytes(int index) { - return tableNames_.getByteString(index); - } - /** - * repeated string tableNames = 1; - */ public Builder setTableNames( - int index, java.lang.String value) { + int index, String value) { if (value == null) { throw new NullPointerException(); } @@ -1452,11 +1207,7 @@ public final class MasterMonitorProtos { onChanged(); return this; } - /** - * repeated string tableNames = 1; - */ - public Builder addTableNames( - java.lang.String value) { + public Builder addTableNames(String value) { if (value == null) { throw new NullPointerException(); } @@ -1465,215 +1216,98 @@ public final class MasterMonitorProtos { onChanged(); return this; } - /** - * repeated string tableNames = 1; - */ public Builder addAllTableNames( - java.lang.Iterable values) { + java.lang.Iterable values) { ensureTableNamesIsMutable(); super.addAll(values, tableNames_); onChanged(); return this; } - /** - * repeated string tableNames = 1; - */ public Builder clearTableNames() { tableNames_ = com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } - /** - * repeated string tableNames = 1; - */ - public Builder addTableNamesBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - ensureTableNamesIsMutable(); + void addTableNames(com.google.protobuf.ByteString value) { + ensureTableNamesIsMutable(); tableNames_.add(value); onChanged(); - return this; } - + // @@protoc_insertion_point(builder_scope:GetTableDescriptorsRequest) } - + static { defaultInstance = new GetTableDescriptorsRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:GetTableDescriptorsRequest) } - + public interface GetTableDescriptorsResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // repeated .TableSchema tableSchema = 1; - /** - * repeated .TableSchema tableSchema = 1; - */ java.util.List getTableSchemaList(); - /** - * repeated .TableSchema tableSchema = 1; - */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema(int index); - /** - * repeated .TableSchema tableSchema = 1; - */ int getTableSchemaCount(); - /** - * repeated .TableSchema tableSchema = 1; - */ java.util.List getTableSchemaOrBuilderList(); - /** - * repeated .TableSchema tableSchema = 1; - */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder( int index); } - /** - * Protobuf type {@code GetTableDescriptorsResponse} - */ public static final class GetTableDescriptorsResponse extends com.google.protobuf.GeneratedMessage implements GetTableDescriptorsResponseOrBuilder { // Use GetTableDescriptorsResponse.newBuilder() to construct. - private GetTableDescriptorsResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private GetTableDescriptorsResponse(Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private GetTableDescriptorsResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - + private GetTableDescriptorsResponse(boolean noInit) {} + private static final GetTableDescriptorsResponse defaultInstance; public static GetTableDescriptorsResponse getDefaultInstance() { return defaultInstance; } - + public GetTableDescriptorsResponse getDefaultInstanceForType() { return defaultInstance; } - - private final com.google.protobuf.UnknownFieldSet unknownFields; - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private GetTableDescriptorsResponse( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - done = true; - } - break; - } - case 10: { - if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { - tableSchema_ = new java.util.ArrayList(); - mutable_bitField0_ |= 0x00000001; - } - tableSchema_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.PARSER, extensionRegistry)); - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); - } finally { - if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { - tableSchema_ = java.util.Collections.unmodifiableList(tableSchema_); - } - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetTableDescriptorsResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetTableDescriptorsResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse.Builder.class); - } - - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public GetTableDescriptorsResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new GetTableDescriptorsResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetTableDescriptorsResponse_fieldAccessorTable; } - + // repeated .TableSchema tableSchema = 1; public static final int TABLESCHEMA_FIELD_NUMBER = 1; private java.util.List tableSchema_; - /** - * repeated .TableSchema tableSchema = 1; - */ public java.util.List getTableSchemaList() { return tableSchema_; } - /** - * repeated .TableSchema tableSchema = 1; - */ public java.util.List getTableSchemaOrBuilderList() { return tableSchema_; } - /** - * repeated .TableSchema tableSchema = 1; - */ public int getTableSchemaCount() { return tableSchema_.size(); } - /** - * repeated .TableSchema tableSchema = 1; - */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema(int index) { return tableSchema_.get(index); } - /** - * repeated .TableSchema tableSchema = 1; - */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder( int index) { return tableSchema_.get(index); } - + private void initFields() { tableSchema_ = java.util.Collections.emptyList(); } @@ -1681,7 +1315,7 @@ public final class MasterMonitorProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + for (int i = 0; i < getTableSchemaCount(); i++) { if (!getTableSchema(i).isInitialized()) { memoizedIsInitialized = 0; @@ -1691,7 +1325,7 @@ public final class MasterMonitorProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -1700,12 +1334,12 @@ public final class MasterMonitorProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; for (int i = 0; i < tableSchema_.size(); i++) { size += com.google.protobuf.CodedOutputStream @@ -1715,14 +1349,14 @@ public final class MasterMonitorProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -1732,7 +1366,7 @@ public final class MasterMonitorProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse) obj; - + boolean result = true; result = result && getTableSchemaList() .equals(other.getTableSchemaList()); @@ -1740,13 +1374,9 @@ public final class MasterMonitorProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - - private int memoizedHashCode = 0; + @java.lang.Override public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (getTableSchemaCount() > 0) { @@ -1754,79 +1384,89 @@ public final class MasterMonitorProtos { hash = (53 * hash) + getTableSchemaList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); - memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); + return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); + return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } - /** - * Protobuf type {@code GetTableDescriptorsResponse} - */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponseOrBuilder { @@ -1834,21 +1474,18 @@ public final class MasterMonitorProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetTableDescriptorsResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetTableDescriptorsResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse.Builder.class); + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetTableDescriptorsResponse_fieldAccessorTable; } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + + private Builder(BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -1860,7 +1497,7 @@ public final class MasterMonitorProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (tableSchemaBuilder_ == null) { @@ -1871,20 +1508,20 @@ public final class MasterMonitorProtos { } return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetTableDescriptorsResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse.getDescriptor(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse result = buildPartial(); if (!result.isInitialized()) { @@ -1892,7 +1529,17 @@ public final class MasterMonitorProtos { } return result; } - + + private org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse(this); int from_bitField0_ = bitField0_; @@ -1908,7 +1555,7 @@ public final class MasterMonitorProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse)other); @@ -1917,7 +1564,7 @@ public final class MasterMonitorProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse.getDefaultInstance()) return this; if (tableSchemaBuilder_ == null) { @@ -1949,7 +1596,7 @@ public final class MasterMonitorProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { for (int i = 0; i < getTableSchemaCount(); i++) { if (!getTableSchema(i).isInitialized()) { @@ -1959,26 +1606,42 @@ public final class MasterMonitorProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; [... 1640 lines stripped ...]