Return-Path: X-Original-To: archive-asf-public-internal@cust-asf2.ponee.io Delivered-To: archive-asf-public-internal@cust-asf2.ponee.io Received: from cust-asf.ponee.io (cust-asf.ponee.io [163.172.22.183]) by cust-asf2.ponee.io (Postfix) with ESMTP id 52FF2200CD7 for ; Mon, 17 Jul 2017 19:58:13 +0200 (CEST) Received: by cust-asf.ponee.io (Postfix) id 516FE1655C5; Mon, 17 Jul 2017 17:58:13 +0000 (UTC) Delivered-To: archive-asf-public@cust-asf.ponee.io Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by cust-asf.ponee.io (Postfix) with SMTP id 09B7A1655B5 for ; Mon, 17 Jul 2017 19:58:10 +0200 (CEST) Received: (qmail 76766 invoked by uid 500); 17 Jul 2017 17:58:07 -0000 Mailing-List: contact commits-help@hbase.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: dev@hbase.apache.org Delivered-To: mailing list commits@hbase.apache.org Received: (qmail 68642 invoked by uid 99); 17 Jul 2017 17:57:56 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Mon, 17 Jul 2017 17:57:56 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id 53514F554E; Mon, 17 Jul 2017 17:57:55 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: appy@apache.org To: commits@hbase.apache.org Date: Mon, 17 Jul 2017 17:58:37 -0000 Message-Id: <759cf725dc574a6f96ad40af956ffbf5@git.apache.org> In-Reply-To: <7015d67bb4404c3487d0d943c7817331@git.apache.org> References: <7015d67bb4404c3487d0d943c7817331@git.apache.org> X-Mailer: ASF-Git Admin Mailer Subject: [44/94] [abbrv] [partial] hbase git commit: Revert "HBASE-17056 Remove checked in PB generated files Selective add of dependency on" Revert for now. Build unstable and some interesting issues around CLASSPATH archived-at: Mon, 17 Jul 2017 17:58:13 -0000 http://git-wip-us.apache.org/repos/asf/hbase/blob/6786b2b6/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AggregateProtos.java ---------------------------------------------------------------------- diff --git a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AggregateProtos.java b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AggregateProtos.java new file mode 100644 index 0000000..508790c --- /dev/null +++ b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AggregateProtos.java @@ -0,0 +1,2375 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: Aggregate.proto + +package org.apache.hadoop.hbase.protobuf.generated; + +public final class AggregateProtos { + private AggregateProtos() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + } + public interface AggregateRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required string interpreter_class_name = 1; + /** + * required string interpreter_class_name = 1; + * + *
+     ** The request passed to the AggregateService consists of three parts
+     *  (1) the (canonical) classname of the ColumnInterpreter implementation
+     *  (2) the Scan query
+     *  (3) any bytes required to construct the ColumnInterpreter object
+     *      properly
+     * 
+ */ + boolean hasInterpreterClassName(); + /** + * required string interpreter_class_name = 1; + * + *
+     ** The request passed to the AggregateService consists of three parts
+     *  (1) the (canonical) classname of the ColumnInterpreter implementation
+     *  (2) the Scan query
+     *  (3) any bytes required to construct the ColumnInterpreter object
+     *      properly
+     * 
+ */ + java.lang.String getInterpreterClassName(); + /** + * required string interpreter_class_name = 1; + * + *
+     ** The request passed to the AggregateService consists of three parts
+     *  (1) the (canonical) classname of the ColumnInterpreter implementation
+     *  (2) the Scan query
+     *  (3) any bytes required to construct the ColumnInterpreter object
+     *      properly
+     * 
+ */ + com.google.protobuf.ByteString + getInterpreterClassNameBytes(); + + // required .hbase.pb.Scan scan = 2; + /** + * required .hbase.pb.Scan scan = 2; + */ + boolean hasScan(); + /** + * required .hbase.pb.Scan scan = 2; + */ + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan(); + /** + * required .hbase.pb.Scan scan = 2; + */ + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder(); + + // optional bytes interpreter_specific_bytes = 3; + /** + * optional bytes interpreter_specific_bytes = 3; + */ + boolean hasInterpreterSpecificBytes(); + /** + * optional bytes interpreter_specific_bytes = 3; + */ + com.google.protobuf.ByteString getInterpreterSpecificBytes(); + } + /** + * Protobuf type {@code hbase.pb.AggregateRequest} + */ + public static final class AggregateRequest extends + com.google.protobuf.GeneratedMessage + implements AggregateRequestOrBuilder { + // Use AggregateRequest.newBuilder() to construct. + private AggregateRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private AggregateRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final AggregateRequest defaultInstance; + public static AggregateRequest getDefaultInstance() { + return defaultInstance; + } + + public AggregateRequest getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private AggregateRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + interpreterClassName_ = input.readBytes(); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder subBuilder = null; + if (((bitField0_ & 0x00000002) == 0x00000002)) { + subBuilder = scan_.toBuilder(); + } + scan_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(scan_); + scan_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000002; + break; + } + case 26: { + bitField0_ |= 0x00000004; + interpreterSpecificBytes_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_hbase_pb_AggregateRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_hbase_pb_AggregateRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.class, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public AggregateRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new AggregateRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // required string interpreter_class_name = 1; + public static final int INTERPRETER_CLASS_NAME_FIELD_NUMBER = 1; + private java.lang.Object interpreterClassName_; + /** + * required string interpreter_class_name = 1; + * + *
+     ** The request passed to the AggregateService consists of three parts
+     *  (1) the (canonical) classname of the ColumnInterpreter implementation
+     *  (2) the Scan query
+     *  (3) any bytes required to construct the ColumnInterpreter object
+     *      properly
+     * 
+ */ + public boolean hasInterpreterClassName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required string interpreter_class_name = 1; + * + *
+     ** The request passed to the AggregateService consists of three parts
+     *  (1) the (canonical) classname of the ColumnInterpreter implementation
+     *  (2) the Scan query
+     *  (3) any bytes required to construct the ColumnInterpreter object
+     *      properly
+     * 
+ */ + public java.lang.String getInterpreterClassName() { + java.lang.Object ref = interpreterClassName_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + interpreterClassName_ = s; + } + return s; + } + } + /** + * required string interpreter_class_name = 1; + * + *
+     ** The request passed to the AggregateService consists of three parts
+     *  (1) the (canonical) classname of the ColumnInterpreter implementation
+     *  (2) the Scan query
+     *  (3) any bytes required to construct the ColumnInterpreter object
+     *      properly
+     * 
+ */ + public com.google.protobuf.ByteString + getInterpreterClassNameBytes() { + java.lang.Object ref = interpreterClassName_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + interpreterClassName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // required .hbase.pb.Scan scan = 2; + public static final int SCAN_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan scan_; + /** + * required .hbase.pb.Scan scan = 2; + */ + public boolean hasScan() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * required .hbase.pb.Scan scan = 2; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan() { + return scan_; + } + /** + * required .hbase.pb.Scan scan = 2; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder() { + return scan_; + } + + // optional bytes interpreter_specific_bytes = 3; + public static final int INTERPRETER_SPECIFIC_BYTES_FIELD_NUMBER = 3; + private com.google.protobuf.ByteString interpreterSpecificBytes_; + /** + * optional bytes interpreter_specific_bytes = 3; + */ + public boolean hasInterpreterSpecificBytes() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * optional bytes interpreter_specific_bytes = 3; + */ + public com.google.protobuf.ByteString getInterpreterSpecificBytes() { + return interpreterSpecificBytes_; + } + + private void initFields() { + interpreterClassName_ = ""; + scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); + interpreterSpecificBytes_ = com.google.protobuf.ByteString.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasInterpreterClassName()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasScan()) { + memoizedIsInitialized = 0; + return false; + } + if (!getScan().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, getInterpreterClassNameBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeMessage(2, scan_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeBytes(3, interpreterSpecificBytes_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, getInterpreterClassNameBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, scan_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(3, interpreterSpecificBytes_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest other = (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest) obj; + + boolean result = true; + result = result && (hasInterpreterClassName() == other.hasInterpreterClassName()); + if (hasInterpreterClassName()) { + result = result && getInterpreterClassName() + .equals(other.getInterpreterClassName()); + } + result = result && (hasScan() == other.hasScan()); + if (hasScan()) { + result = result && getScan() + .equals(other.getScan()); + } + result = result && (hasInterpreterSpecificBytes() == other.hasInterpreterSpecificBytes()); + if (hasInterpreterSpecificBytes()) { + result = result && getInterpreterSpecificBytes() + .equals(other.getInterpreterSpecificBytes()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasInterpreterClassName()) { + hash = (37 * hash) + INTERPRETER_CLASS_NAME_FIELD_NUMBER; + hash = (53 * hash) + getInterpreterClassName().hashCode(); + } + if (hasScan()) { + hash = (37 * hash) + SCAN_FIELD_NUMBER; + hash = (53 * hash) + getScan().hashCode(); + } + if (hasInterpreterSpecificBytes()) { + hash = (37 * hash) + INTERPRETER_SPECIFIC_BYTES_FIELD_NUMBER; + hash = (53 * hash) + getInterpreterSpecificBytes().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code hbase.pb.AggregateRequest} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_hbase_pb_AggregateRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_hbase_pb_AggregateRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.class, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getScanFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + interpreterClassName_ = ""; + bitField0_ = (bitField0_ & ~0x00000001); + if (scanBuilder_ == null) { + scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); + } else { + scanBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + interpreterSpecificBytes_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000004); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_hbase_pb_AggregateRequest_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest build() { + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest result = new org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.interpreterClassName_ = interpreterClassName_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + if (scanBuilder_ == null) { + result.scan_ = scan_; + } else { + result.scan_ = scanBuilder_.build(); + } + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.interpreterSpecificBytes_ = interpreterSpecificBytes_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance()) return this; + if (other.hasInterpreterClassName()) { + bitField0_ |= 0x00000001; + interpreterClassName_ = other.interpreterClassName_; + onChanged(); + } + if (other.hasScan()) { + mergeScan(other.getScan()); + } + if (other.hasInterpreterSpecificBytes()) { + setInterpreterSpecificBytes(other.getInterpreterSpecificBytes()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasInterpreterClassName()) { + + return false; + } + if (!hasScan()) { + + return false; + } + if (!getScan().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // required string interpreter_class_name = 1; + private java.lang.Object interpreterClassName_ = ""; + /** + * required string interpreter_class_name = 1; + * + *
+       ** The request passed to the AggregateService consists of three parts
+       *  (1) the (canonical) classname of the ColumnInterpreter implementation
+       *  (2) the Scan query
+       *  (3) any bytes required to construct the ColumnInterpreter object
+       *      properly
+       * 
+ */ + public boolean hasInterpreterClassName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required string interpreter_class_name = 1; + * + *
+       ** The request passed to the AggregateService consists of three parts
+       *  (1) the (canonical) classname of the ColumnInterpreter implementation
+       *  (2) the Scan query
+       *  (3) any bytes required to construct the ColumnInterpreter object
+       *      properly
+       * 
+ */ + public java.lang.String getInterpreterClassName() { + java.lang.Object ref = interpreterClassName_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + interpreterClassName_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * required string interpreter_class_name = 1; + * + *
+       ** The request passed to the AggregateService consists of three parts
+       *  (1) the (canonical) classname of the ColumnInterpreter implementation
+       *  (2) the Scan query
+       *  (3) any bytes required to construct the ColumnInterpreter object
+       *      properly
+       * 
+ */ + public com.google.protobuf.ByteString + getInterpreterClassNameBytes() { + java.lang.Object ref = interpreterClassName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + interpreterClassName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * required string interpreter_class_name = 1; + * + *
+       ** The request passed to the AggregateService consists of three parts
+       *  (1) the (canonical) classname of the ColumnInterpreter implementation
+       *  (2) the Scan query
+       *  (3) any bytes required to construct the ColumnInterpreter object
+       *      properly
+       * 
+ */ + public Builder setInterpreterClassName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + interpreterClassName_ = value; + onChanged(); + return this; + } + /** + * required string interpreter_class_name = 1; + * + *
+       ** The request passed to the AggregateService consists of three parts
+       *  (1) the (canonical) classname of the ColumnInterpreter implementation
+       *  (2) the Scan query
+       *  (3) any bytes required to construct the ColumnInterpreter object
+       *      properly
+       * 
+ */ + public Builder clearInterpreterClassName() { + bitField0_ = (bitField0_ & ~0x00000001); + interpreterClassName_ = getDefaultInstance().getInterpreterClassName(); + onChanged(); + return this; + } + /** + * required string interpreter_class_name = 1; + * + *
+       ** The request passed to the AggregateService consists of three parts
+       *  (1) the (canonical) classname of the ColumnInterpreter implementation
+       *  (2) the Scan query
+       *  (3) any bytes required to construct the ColumnInterpreter object
+       *      properly
+       * 
+ */ + public Builder setInterpreterClassNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + interpreterClassName_ = value; + onChanged(); + return this; + } + + // required .hbase.pb.Scan scan = 2; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder> scanBuilder_; + /** + * required .hbase.pb.Scan scan = 2; + */ + public boolean hasScan() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * required .hbase.pb.Scan scan = 2; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan() { + if (scanBuilder_ == null) { + return scan_; + } else { + return scanBuilder_.getMessage(); + } + } + /** + * required .hbase.pb.Scan scan = 2; + */ + public Builder setScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value) { + if (scanBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + scan_ = value; + onChanged(); + } else { + scanBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + return this; + } + /** + * required .hbase.pb.Scan scan = 2; + */ + public Builder setScan( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder builderForValue) { + if (scanBuilder_ == null) { + scan_ = builderForValue.build(); + onChanged(); + } else { + scanBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + return this; + } + /** + * required .hbase.pb.Scan scan = 2; + */ + public Builder mergeScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value) { + if (scanBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002) && + scan_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance()) { + scan_ = + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.newBuilder(scan_).mergeFrom(value).buildPartial(); + } else { + scan_ = value; + } + onChanged(); + } else { + scanBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000002; + return this; + } + /** + * required .hbase.pb.Scan scan = 2; + */ + public Builder clearScan() { + if (scanBuilder_ == null) { + scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); + onChanged(); + } else { + scanBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + /** + * required .hbase.pb.Scan scan = 2; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder getScanBuilder() { + bitField0_ |= 0x00000002; + onChanged(); + return getScanFieldBuilder().getBuilder(); + } + /** + * required .hbase.pb.Scan scan = 2; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder() { + if (scanBuilder_ != null) { + return scanBuilder_.getMessageOrBuilder(); + } else { + return scan_; + } + } + /** + * required .hbase.pb.Scan scan = 2; + */ + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder> + getScanFieldBuilder() { + if (scanBuilder_ == null) { + scanBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder>( + scan_, + getParentForChildren(), + isClean()); + scan_ = null; + } + return scanBuilder_; + } + + // optional bytes interpreter_specific_bytes = 3; + private com.google.protobuf.ByteString interpreterSpecificBytes_ = com.google.protobuf.ByteString.EMPTY; + /** + * optional bytes interpreter_specific_bytes = 3; + */ + public boolean hasInterpreterSpecificBytes() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * optional bytes interpreter_specific_bytes = 3; + */ + public com.google.protobuf.ByteString getInterpreterSpecificBytes() { + return interpreterSpecificBytes_; + } + /** + * optional bytes interpreter_specific_bytes = 3; + */ + public Builder setInterpreterSpecificBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; + interpreterSpecificBytes_ = value; + onChanged(); + return this; + } + /** + * optional bytes interpreter_specific_bytes = 3; + */ + public Builder clearInterpreterSpecificBytes() { + bitField0_ = (bitField0_ & ~0x00000004); + interpreterSpecificBytes_ = getDefaultInstance().getInterpreterSpecificBytes(); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:hbase.pb.AggregateRequest) + } + + static { + defaultInstance = new AggregateRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:hbase.pb.AggregateRequest) + } + + public interface AggregateResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated bytes first_part = 1; + /** + * repeated bytes first_part = 1; + * + *
+     **
+     * The AggregateService methods all have a response that either is a Pair
+     * or a simple object. When it is a Pair both first_part and second_part
+     * have defined values (and the second_part is not present in the response
+     * when the response is not a pair). Refer to the AggregateImplementation 
+     * class for an overview of the AggregateResponse object constructions. 
+     * 
+ */ + java.util.List getFirstPartList(); + /** + * repeated bytes first_part = 1; + * + *
+     **
+     * The AggregateService methods all have a response that either is a Pair
+     * or a simple object. When it is a Pair both first_part and second_part
+     * have defined values (and the second_part is not present in the response
+     * when the response is not a pair). Refer to the AggregateImplementation 
+     * class for an overview of the AggregateResponse object constructions. 
+     * 
+ */ + int getFirstPartCount(); + /** + * repeated bytes first_part = 1; + * + *
+     **
+     * The AggregateService methods all have a response that either is a Pair
+     * or a simple object. When it is a Pair both first_part and second_part
+     * have defined values (and the second_part is not present in the response
+     * when the response is not a pair). Refer to the AggregateImplementation 
+     * class for an overview of the AggregateResponse object constructions. 
+     * 
+ */ + com.google.protobuf.ByteString getFirstPart(int index); + + // optional bytes second_part = 2; + /** + * optional bytes second_part = 2; + */ + boolean hasSecondPart(); + /** + * optional bytes second_part = 2; + */ + com.google.protobuf.ByteString getSecondPart(); + } + /** + * Protobuf type {@code hbase.pb.AggregateResponse} + */ + public static final class AggregateResponse extends + com.google.protobuf.GeneratedMessage + implements AggregateResponseOrBuilder { + // Use AggregateResponse.newBuilder() to construct. + private AggregateResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private AggregateResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final AggregateResponse defaultInstance; + public static AggregateResponse getDefaultInstance() { + return defaultInstance; + } + + public AggregateResponse getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private AggregateResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + firstPart_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + firstPart_.add(input.readBytes()); + break; + } + case 18: { + bitField0_ |= 0x00000001; + secondPart_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + firstPart_ = java.util.Collections.unmodifiableList(firstPart_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_hbase_pb_AggregateResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_hbase_pb_AggregateResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.class, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public AggregateResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new AggregateResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // repeated bytes first_part = 1; + public static final int FIRST_PART_FIELD_NUMBER = 1; + private java.util.List firstPart_; + /** + * repeated bytes first_part = 1; + * + *
+     **
+     * The AggregateService methods all have a response that either is a Pair
+     * or a simple object. When it is a Pair both first_part and second_part
+     * have defined values (and the second_part is not present in the response
+     * when the response is not a pair). Refer to the AggregateImplementation 
+     * class for an overview of the AggregateResponse object constructions. 
+     * 
+ */ + public java.util.List + getFirstPartList() { + return firstPart_; + } + /** + * repeated bytes first_part = 1; + * + *
+     **
+     * The AggregateService methods all have a response that either is a Pair
+     * or a simple object. When it is a Pair both first_part and second_part
+     * have defined values (and the second_part is not present in the response
+     * when the response is not a pair). Refer to the AggregateImplementation 
+     * class for an overview of the AggregateResponse object constructions. 
+     * 
+ */ + public int getFirstPartCount() { + return firstPart_.size(); + } + /** + * repeated bytes first_part = 1; + * + *
+     **
+     * The AggregateService methods all have a response that either is a Pair
+     * or a simple object. When it is a Pair both first_part and second_part
+     * have defined values (and the second_part is not present in the response
+     * when the response is not a pair). Refer to the AggregateImplementation 
+     * class for an overview of the AggregateResponse object constructions. 
+     * 
+ */ + public com.google.protobuf.ByteString getFirstPart(int index) { + return firstPart_.get(index); + } + + // optional bytes second_part = 2; + public static final int SECOND_PART_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString secondPart_; + /** + * optional bytes second_part = 2; + */ + public boolean hasSecondPart() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * optional bytes second_part = 2; + */ + public com.google.protobuf.ByteString getSecondPart() { + return secondPart_; + } + + private void initFields() { + firstPart_ = java.util.Collections.emptyList(); + secondPart_ = com.google.protobuf.ByteString.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < firstPart_.size(); i++) { + output.writeBytes(1, firstPart_.get(i)); + } + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(2, secondPart_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + { + int dataSize = 0; + for (int i = 0; i < firstPart_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeBytesSizeNoTag(firstPart_.get(i)); + } + size += dataSize; + size += 1 * getFirstPartList().size(); + } + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, secondPart_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse other = (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse) obj; + + boolean result = true; + result = result && getFirstPartList() + .equals(other.getFirstPartList()); + result = result && (hasSecondPart() == other.hasSecondPart()); + if (hasSecondPart()) { + result = result && getSecondPart() + .equals(other.getSecondPart()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getFirstPartCount() > 0) { + hash = (37 * hash) + FIRST_PART_FIELD_NUMBER; + hash = (53 * hash) + getFirstPartList().hashCode(); + } + if (hasSecondPart()) { + hash = (37 * hash) + SECOND_PART_FIELD_NUMBER; + hash = (53 * hash) + getSecondPart().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code hbase.pb.AggregateResponse} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_hbase_pb_AggregateResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_hbase_pb_AggregateResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.class, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + firstPart_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + secondPart_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_hbase_pb_AggregateResponse_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse build() { + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse result = new org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + firstPart_ = java.util.Collections.unmodifiableList(firstPart_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.firstPart_ = firstPart_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000001; + } + result.secondPart_ = secondPart_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance()) return this; + if (!other.firstPart_.isEmpty()) { + if (firstPart_.isEmpty()) { + firstPart_ = other.firstPart_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureFirstPartIsMutable(); + firstPart_.addAll(other.firstPart_); + } + onChanged(); + } + if (other.hasSecondPart()) { + setSecondPart(other.getSecondPart()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // repeated bytes first_part = 1; + private java.util.List firstPart_ = java.util.Collections.emptyList(); + private void ensureFirstPartIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + firstPart_ = new java.util.ArrayList(firstPart_); + bitField0_ |= 0x00000001; + } + } + /** + * repeated bytes first_part = 1; + * + *
+       **
+       * The AggregateService methods all have a response that either is a Pair
+       * or a simple object. When it is a Pair both first_part and second_part
+       * have defined values (and the second_part is not present in the response
+       * when the response is not a pair). Refer to the AggregateImplementation 
+       * class for an overview of the AggregateResponse object constructions. 
+       * 
+ */ + public java.util.List + getFirstPartList() { + return java.util.Collections.unmodifiableList(firstPart_); + } + /** + * repeated bytes first_part = 1; + * + *
+       **
+       * The AggregateService methods all have a response that either is a Pair
+       * or a simple object. When it is a Pair both first_part and second_part
+       * have defined values (and the second_part is not present in the response
+       * when the response is not a pair). Refer to the AggregateImplementation 
+       * class for an overview of the AggregateResponse object constructions. 
+       * 
+ */ + public int getFirstPartCount() { + return firstPart_.size(); + } + /** + * repeated bytes first_part = 1; + * + *
+       **
+       * The AggregateService methods all have a response that either is a Pair
+       * or a simple object. When it is a Pair both first_part and second_part
+       * have defined values (and the second_part is not present in the response
+       * when the response is not a pair). Refer to the AggregateImplementation 
+       * class for an overview of the AggregateResponse object constructions. 
+       * 
+ */ + public com.google.protobuf.ByteString getFirstPart(int index) { + return firstPart_.get(index); + } + /** + * repeated bytes first_part = 1; + * + *
+       **
+       * The AggregateService methods all have a response that either is a Pair
+       * or a simple object. When it is a Pair both first_part and second_part
+       * have defined values (and the second_part is not present in the response
+       * when the response is not a pair). Refer to the AggregateImplementation 
+       * class for an overview of the AggregateResponse object constructions. 
+       * 
+ */ + public Builder setFirstPart( + int index, com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureFirstPartIsMutable(); + firstPart_.set(index, value); + onChanged(); + return this; + } + /** + * repeated bytes first_part = 1; + * + *
+       **
+       * The AggregateService methods all have a response that either is a Pair
+       * or a simple object. When it is a Pair both first_part and second_part
+       * have defined values (and the second_part is not present in the response
+       * when the response is not a pair). Refer to the AggregateImplementation 
+       * class for an overview of the AggregateResponse object constructions. 
+       * 
+ */ + public Builder addFirstPart(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureFirstPartIsMutable(); + firstPart_.add(value); + onChanged(); + return this; + } + /** + * repeated bytes first_part = 1; + * + *
+       **
+       * The AggregateService methods all have a response that either is a Pair
+       * or a simple object. When it is a Pair both first_part and second_part
+       * have defined values (and the second_part is not present in the response
+       * when the response is not a pair). Refer to the AggregateImplementation 
+       * class for an overview of the AggregateResponse object constructions. 
+       * 
+ */ + public Builder addAllFirstPart( + java.lang.Iterable values) { + ensureFirstPartIsMutable(); + super.addAll(values, firstPart_); + onChanged(); + return this; + } + /** + * repeated bytes first_part = 1; + * + *
+       **
+       * The AggregateService methods all have a response that either is a Pair
+       * or a simple object. When it is a Pair both first_part and second_part
+       * have defined values (and the second_part is not present in the response
+       * when the response is not a pair). Refer to the AggregateImplementation 
+       * class for an overview of the AggregateResponse object constructions. 
+       * 
+ */ + public Builder clearFirstPart() { + firstPart_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + + // optional bytes second_part = 2; + private com.google.protobuf.ByteString secondPart_ = com.google.protobuf.ByteString.EMPTY; + /** + * optional bytes second_part = 2; + */ + public boolean hasSecondPart() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * optional bytes second_part = 2; + */ + public com.google.protobuf.ByteString getSecondPart() { + return secondPart_; + } + /** + * optional bytes second_part = 2; + */ + public Builder setSecondPart(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + secondPart_ = value; + onChanged(); + return this; + } + /** + * optional bytes second_part = 2; + */ + public Builder clearSecondPart() { + bitField0_ = (bitField0_ & ~0x00000002); + secondPart_ = getDefaultInstance().getSecondPart(); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:hbase.pb.AggregateResponse) + } + + static { + defaultInstance = new AggregateResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:hbase.pb.AggregateResponse) + } + + /** + * Protobuf service {@code hbase.pb.AggregateService} + * + *
+   ** Refer to the AggregateImplementation class for an overview of the 
+   *  AggregateService method implementations and their functionality.
+   * 
+ */ + public static abstract class AggregateService + implements com.google.protobuf.Service { + protected AggregateService() {} + + public interface Interface { + /** + * rpc GetMax(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse); + */ + public abstract void getMax( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request, + com.google.protobuf.RpcCallback done); + + /** + * rpc GetMin(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse); + */ + public abstract void getMin( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request, + com.google.protobuf.RpcCallback done); + + /** + * rpc GetSum(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse); + */ + public abstract void getSum( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request, + com.google.protobuf.RpcCallback done); + + /** + * rpc GetRowNum(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse); + */ + public abstract void getRowNum( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request, + com.google.protobuf.RpcCallback done); + + /** + * rpc GetAvg(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse); + */ + public abstract void getAvg( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request, + com.google.protobuf.RpcCallback done); + + /** + * rpc GetStd(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse); + */ + public abstract void getStd( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request, + com.google.protobuf.RpcCallback done); + + /** + * rpc GetMedian(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse); + */ + public abstract void getMedian( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request, + com.google.protobuf.RpcCallback done); + + } + + public static com.google.protobuf.Service newReflectiveService( + final Interface impl) { + return new AggregateService() { + @java.lang.Override + public void getMax( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request, + com.google.protobuf.RpcCallback done) { + impl.getMax(controller, request, done); + } + + @java.lang.Override + public void getMin( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request, + com.google.protobuf.RpcCallback done) { + impl.getMin(controller, request, done); + } + + @java.lang.Override + public void getSum( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request, + com.google.protobuf.RpcCallback done) { + impl.getSum(controller, request, done); + } + + @java.lang.Override + public void getRowNum( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request, + com.google.protobuf.RpcCallback done) { + impl.getRowNum(controller, request, done); + } + + @java.lang.Override + public void getAvg( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request, + com.google.protobuf.RpcCallback done) { + impl.getAvg(controller, request, done); + } + + @java.lang.Override + public void getStd( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request, + com.google.protobuf.RpcCallback done) { + impl.getStd(controller, request, done); + } + + @java.lang.Override + public void getMedian( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request, + com.google.protobuf.RpcCallback done) { + impl.getMedian(controller, request, done); + } + + }; + } + + public static com.google.protobuf.BlockingService + newReflectiveBlockingService(final BlockingInterface impl) { + return new com.google.protobuf.BlockingService() { + public final com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptorForType() { + return getDescriptor(); + } + + public final com.google.protobuf.Message callBlockingMethod( + com.google.protobuf.Descriptors.MethodDescriptor method, + com.google.protobuf.RpcController controller, + com.google.protobuf.Message request) + throws com.google.protobuf.ServiceException { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.callBlockingMethod() given method descriptor for " + + "wrong service type."); + } + switch(method.getIndex()) { + case 0: + return impl.getMax(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)request); + case 1: + return impl.getMin(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)request); + case 2: + return impl.getSum(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)request); + case 3: + return impl.getRowNum(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)request); + case 4: + return impl.getAvg(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)request); + case 5: + return impl.getStd(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)request); + case 6: + return impl.getMedian(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)request); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getRequestPrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getRequestPrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance(); + case 3: + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance(); + case 4: + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance(); + case 5: + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance(); + case 6: + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getResponsePrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getResponsePrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(); + case 3: + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(); + case 4: + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(); + case 5: + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(); + case 6: + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + }; + } + + /** + * rpc GetMax(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse); + */ + public abstract void getMax( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request, + com.google.protobuf.RpcCallback done); + + /** + * rpc GetMin(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse); + */ + public abstract void getMin( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request, + com.google.protobuf.RpcCallback done); + + /** + * rpc GetSum(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse); + */ + public abstract void getSum( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request, + com.google.protobuf.RpcCallback done); + + /** + * rpc GetRowNum(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse); + */ + public abstract void getRowNum( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request, + com.google.protobuf.RpcCallback done); + + /** + * rpc GetAvg(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse); + */ + public abstract void getAvg( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request, + com.google.protobuf.RpcCallback done); + + /** + * rpc GetStd(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse); + */ + public abstract void getStd( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request, + com.google.protobuf.RpcCallback done); + + /** + * rpc GetMedian(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse); + */ + public abstract void getMedian( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request, + com.google.protobuf.RpcCallback done); + + public static final + com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.getDescriptor().getServices().get(0); + } + public final com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptorForType() { + return getDescriptor(); + } + + public final void callMethod( + com.google.protobuf.Descriptors.MethodDescriptor method, + com.google.protobuf.RpcController controller, + com.google.protobuf.Message request, + com.google.protobuf.RpcCallback< + com.google.protobuf.Message> done) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.callMethod() given method descriptor for wrong " + + "service type."); + } + switch(method.getIndex()) { + case 0: + this.getMax(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 1: + this.getMin(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 2: + this.getSum(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 3: + this.getRowNum(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 4: + this.getAvg(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 5: + this.getStd(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 6: + this.getMedian(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getRequestPrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getRequestPrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance(); + case 3: + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance(); + case 4: + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance(); + case 5: + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance(); + case 6: + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getResponsePrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getResponsePrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(); + case 3: + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(); + case 4: + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(); + case 5: + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(); + case 6: + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public static Stub newStub( + com.google.protobuf.RpcChannel channel) { + return new Stub(channel); + } + + public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateService implements Interface { + private Stub(com.google.protobuf.RpcChannel channel) { + this.channel = channel; + } + + private final com.google.protobuf.RpcChannel channel; + + public com.google.protobuf.RpcChannel getChannel() { + return channel; + } + + public void getMax( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(0), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance())); + } + + public void getMin( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(1), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance())); + } + + public void getSum( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(2), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.prot