Return-Path: X-Original-To: archive-asf-public-internal@cust-asf2.ponee.io Delivered-To: archive-asf-public-internal@cust-asf2.ponee.io Received: from cust-asf.ponee.io (cust-asf.ponee.io [163.172.22.183]) by cust-asf2.ponee.io (Postfix) with ESMTP id D8622200CD8 for ; Wed, 2 Aug 2017 18:33:48 +0200 (CEST) Received: by cust-asf.ponee.io (Postfix) id D763F1683F9; Wed, 2 Aug 2017 16:33:48 +0000 (UTC) Delivered-To: archive-asf-public@cust-asf.ponee.io Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by cust-asf.ponee.io (Postfix) with SMTP id 8BD12168327 for ; Wed, 2 Aug 2017 18:33:46 +0200 (CEST) Received: (qmail 83924 invoked by uid 500); 2 Aug 2017 16:33:39 -0000 Mailing-List: contact commits-help@hbase.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: dev@hbase.apache.org Delivered-To: mailing list commits@hbase.apache.org Received: (qmail 81572 invoked by uid 99); 2 Aug 2017 16:33:37 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Wed, 02 Aug 2017 16:33:37 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id 440D2F5535; Wed, 2 Aug 2017 16:33:33 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: stack@apache.org To: commits@hbase.apache.org Date: Wed, 02 Aug 2017 16:33:55 -0000 Message-Id: <5aa18bf4374643efbba7000ae5d4d40f@git.apache.org> In-Reply-To: <10a7e8e36f5f402fac5438c07862584c@git.apache.org> References: <10a7e8e36f5f402fac5438c07862584c@git.apache.org> X-Mailer: ASF-Git Admin Mailer Subject: [23/51] [partial] hbase git commit: HBASE-17056 Remove checked in PB generated files archived-at: Wed, 02 Aug 2017 16:33:49 -0000 http://git-wip-us.apache.org/repos/asf/hbase/blob/7a6de1bd/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Int32Value.java ---------------------------------------------------------------------- diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Int32Value.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Int32Value.java deleted file mode 100644 index 476b086..0000000 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Int32Value.java +++ /dev/null @@ -1,451 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/protobuf/wrappers.proto - -package org.apache.hadoop.hbase.shaded.com.google.protobuf; - -/** - *
- * Wrapper message for `int32`.
- * The JSON representation for `Int32Value` is JSON number.
- * 
- * - * Protobuf type {@code google.protobuf.Int32Value} - */ -public final class Int32Value extends - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements - // @@protoc_insertion_point(message_implements:google.protobuf.Int32Value) - Int32ValueOrBuilder { - // Use Int32Value.newBuilder() to construct. - private Int32Value(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder builder) { - super(builder); - } - private Int32Value() { - value_ = 0; - } - - @java.lang.Override - public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.getDefaultInstance(); - } - private Int32Value( - org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - this(); - int mutable_bitField0_ = 0; - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!input.skipField(tag)) { - done = true; - } - break; - } - case 8: { - - value_ = input.readInt32(); - break; - } - } - } - } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( - e).setUnfinishedMessage(this); - } finally { - makeExtensionsImmutable(); - } - } - public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_Int32Value_descriptor; - } - - protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_Int32Value_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value.Builder.class); - } - - public static final int VALUE_FIELD_NUMBER = 1; - private int value_; - /** - *
-   * The int32 value.
-   * 
- * - * int32 value = 1; - */ - public int getValue() { - return value_; - } - - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - if (value_ != 0) { - output.writeInt32(1, value_); - } - } - - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - if (value_ != 0) { - size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream - .computeInt32Size(1, value_); - } - memoizedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value other = (org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value) obj; - - boolean result = true; - result = result && (getValue() - == other.getValue()); - return result; - } - - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptor().hashCode(); - hash = (37 * hash) + VALUE_FIELD_NUMBER; - hash = (53 * hash) + getValue(); - hash = (29 * hash) + unknownFields.hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value parseFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value parseFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value parseFrom(byte[] data) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value parseFrom( - byte[] data, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value parseFrom(java.io.InputStream input) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value parseFrom( - java.io.InputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value parseDelimitedFrom( - java.io.InputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input, extensionRegistry); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value parseFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value parseFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType( - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - *
-   * Wrapper message for `int32`.
-   * The JSON representation for `Int32Value` is JSON number.
-   * 
- * - * Protobuf type {@code google.protobuf.Int32Value} - */ - public static final class Builder extends - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder implements - // @@protoc_insertion_point(builder_implements:google.protobuf.Int32Value) - org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32ValueOrBuilder { - public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_Int32Value_descriptor; - } - - protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_Int32Value_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value.Builder.class); - } - - // Construct using org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .alwaysUseFieldBuilders) { - } - } - public Builder clear() { - super.clear(); - value_ = 0; - - return this; - } - - public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_Int32Value_descriptor; - } - - public org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value getDefaultInstanceForType() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value build() { - org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value buildPartial() { - org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value result = new org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value(this); - result.value_ = value_; - onBuilt(); - return result; - } - - public Builder clone() { - return (Builder) super.clone(); - } - public Builder setField( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, - Object value) { - return (Builder) super.setField(field, value); - } - public Builder clearField( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { - return (Builder) super.clearField(field); - } - public Builder clearOneof( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { - return (Builder) super.clearOneof(oneof); - } - public Builder setRepeatedField( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, - int index, Object value) { - return (Builder) super.setRepeatedField(field, index, value); - } - public Builder addRepeatedField( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, - Object value) { - return (Builder) super.addRepeatedField(field, value); - } - public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value) { - return mergeFrom((org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value other) { - if (other == org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value.getDefaultInstance()) return this; - if (other.getValue() != 0) { - setValue(other.getValue()); - } - onChanged(); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value) e.getUnfinishedMessage(); - throw e.unwrapIOException(); - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - - private int value_ ; - /** - *
-     * The int32 value.
-     * 
- * - * int32 value = 1; - */ - public int getValue() { - return value_; - } - /** - *
-     * The int32 value.
-     * 
- * - * int32 value = 1; - */ - public Builder setValue(int value) { - - value_ = value; - onChanged(); - return this; - } - /** - *
-     * The int32 value.
-     * 
- * - * int32 value = 1; - */ - public Builder clearValue() { - - value_ = 0; - onChanged(); - return this; - } - public final Builder setUnknownFields( - final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - public final Builder mergeUnknownFields( - final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - - // @@protoc_insertion_point(builder_scope:google.protobuf.Int32Value) - } - - // @@protoc_insertion_point(class_scope:google.protobuf.Int32Value) - private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value(); - } - - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser - PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser() { - public Int32Value parsePartialFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - return new Int32Value(input, extensionRegistry); - } - }; - - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - public org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - -} - http://git-wip-us.apache.org/repos/asf/hbase/blob/7a6de1bd/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Int32ValueOrBuilder.java ---------------------------------------------------------------------- diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Int32ValueOrBuilder.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Int32ValueOrBuilder.java deleted file mode 100644 index f7b0b25..0000000 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Int32ValueOrBuilder.java +++ /dev/null @@ -1,18 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/protobuf/wrappers.proto - -package org.apache.hadoop.hbase.shaded.com.google.protobuf; - -public interface Int32ValueOrBuilder extends - // @@protoc_insertion_point(interface_extends:google.protobuf.Int32Value) - org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { - - /** - *
-   * The int32 value.
-   * 
- * - * int32 value = 1; - */ - int getValue(); -} http://git-wip-us.apache.org/repos/asf/hbase/blob/7a6de1bd/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Int64Value.java ---------------------------------------------------------------------- diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Int64Value.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Int64Value.java deleted file mode 100644 index fce2b73..0000000 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Int64Value.java +++ /dev/null @@ -1,452 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/protobuf/wrappers.proto - -package org.apache.hadoop.hbase.shaded.com.google.protobuf; - -/** - *
- * Wrapper message for `int64`.
- * The JSON representation for `Int64Value` is JSON string.
- * 
- * - * Protobuf type {@code google.protobuf.Int64Value} - */ -public final class Int64Value extends - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements - // @@protoc_insertion_point(message_implements:google.protobuf.Int64Value) - Int64ValueOrBuilder { - // Use Int64Value.newBuilder() to construct. - private Int64Value(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder builder) { - super(builder); - } - private Int64Value() { - value_ = 0L; - } - - @java.lang.Override - public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.getDefaultInstance(); - } - private Int64Value( - org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - this(); - int mutable_bitField0_ = 0; - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!input.skipField(tag)) { - done = true; - } - break; - } - case 8: { - - value_ = input.readInt64(); - break; - } - } - } - } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( - e).setUnfinishedMessage(this); - } finally { - makeExtensionsImmutable(); - } - } - public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_Int64Value_descriptor; - } - - protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_Int64Value_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value.Builder.class); - } - - public static final int VALUE_FIELD_NUMBER = 1; - private long value_; - /** - *
-   * The int64 value.
-   * 
- * - * int64 value = 1; - */ - public long getValue() { - return value_; - } - - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - if (value_ != 0L) { - output.writeInt64(1, value_); - } - } - - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - if (value_ != 0L) { - size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream - .computeInt64Size(1, value_); - } - memoizedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value other = (org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value) obj; - - boolean result = true; - result = result && (getValue() - == other.getValue()); - return result; - } - - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptor().hashCode(); - hash = (37 * hash) + VALUE_FIELD_NUMBER; - hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashLong( - getValue()); - hash = (29 * hash) + unknownFields.hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value parseFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value parseFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value parseFrom(byte[] data) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value parseFrom( - byte[] data, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value parseFrom(java.io.InputStream input) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value parseFrom( - java.io.InputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value parseDelimitedFrom( - java.io.InputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input, extensionRegistry); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value parseFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value parseFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType( - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - *
-   * Wrapper message for `int64`.
-   * The JSON representation for `Int64Value` is JSON string.
-   * 
- * - * Protobuf type {@code google.protobuf.Int64Value} - */ - public static final class Builder extends - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder implements - // @@protoc_insertion_point(builder_implements:google.protobuf.Int64Value) - org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64ValueOrBuilder { - public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_Int64Value_descriptor; - } - - protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_Int64Value_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value.Builder.class); - } - - // Construct using org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .alwaysUseFieldBuilders) { - } - } - public Builder clear() { - super.clear(); - value_ = 0L; - - return this; - } - - public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_Int64Value_descriptor; - } - - public org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value getDefaultInstanceForType() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value build() { - org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value buildPartial() { - org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value result = new org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value(this); - result.value_ = value_; - onBuilt(); - return result; - } - - public Builder clone() { - return (Builder) super.clone(); - } - public Builder setField( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, - Object value) { - return (Builder) super.setField(field, value); - } - public Builder clearField( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { - return (Builder) super.clearField(field); - } - public Builder clearOneof( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { - return (Builder) super.clearOneof(oneof); - } - public Builder setRepeatedField( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, - int index, Object value) { - return (Builder) super.setRepeatedField(field, index, value); - } - public Builder addRepeatedField( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, - Object value) { - return (Builder) super.addRepeatedField(field, value); - } - public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value) { - return mergeFrom((org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value other) { - if (other == org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value.getDefaultInstance()) return this; - if (other.getValue() != 0L) { - setValue(other.getValue()); - } - onChanged(); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value) e.getUnfinishedMessage(); - throw e.unwrapIOException(); - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - - private long value_ ; - /** - *
-     * The int64 value.
-     * 
- * - * int64 value = 1; - */ - public long getValue() { - return value_; - } - /** - *
-     * The int64 value.
-     * 
- * - * int64 value = 1; - */ - public Builder setValue(long value) { - - value_ = value; - onChanged(); - return this; - } - /** - *
-     * The int64 value.
-     * 
- * - * int64 value = 1; - */ - public Builder clearValue() { - - value_ = 0L; - onChanged(); - return this; - } - public final Builder setUnknownFields( - final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - public final Builder mergeUnknownFields( - final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - - // @@protoc_insertion_point(builder_scope:google.protobuf.Int64Value) - } - - // @@protoc_insertion_point(class_scope:google.protobuf.Int64Value) - private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value(); - } - - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser - PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser() { - public Int64Value parsePartialFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - return new Int64Value(input, extensionRegistry); - } - }; - - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - public org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - -} - http://git-wip-us.apache.org/repos/asf/hbase/blob/7a6de1bd/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Int64ValueOrBuilder.java ---------------------------------------------------------------------- diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Int64ValueOrBuilder.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Int64ValueOrBuilder.java deleted file mode 100644 index 291f5ed..0000000 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Int64ValueOrBuilder.java +++ /dev/null @@ -1,18 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/protobuf/wrappers.proto - -package org.apache.hadoop.hbase.shaded.com.google.protobuf; - -public interface Int64ValueOrBuilder extends - // @@protoc_insertion_point(interface_extends:google.protobuf.Int64Value) - org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { - - /** - *
-   * The int64 value.
-   * 
- * - * int64 value = 1; - */ - long getValue(); -} http://git-wip-us.apache.org/repos/asf/hbase/blob/7a6de1bd/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/IntArrayList.java ---------------------------------------------------------------------- diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/IntArrayList.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/IntArrayList.java deleted file mode 100644 index ba8b7ec..0000000 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/IntArrayList.java +++ /dev/null @@ -1,272 +0,0 @@ -// Protocol Buffers - Google's data interchange format -// Copyright 2008 Google Inc. All rights reserved. -// https://developers.google.com/protocol-buffers/ -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -package org.apache.hadoop.hbase.shaded.com.google.protobuf; - -import org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.IntList; - -import java.util.Arrays; -import java.util.Collection; -import java.util.RandomAccess; - -/** - * An implementation of {@link IntList} on top of a primitive array. - * - * @author dweis@google.com (Daniel Weis) - */ -final class IntArrayList - extends AbstractProtobufList - implements IntList, RandomAccess { - - private static final IntArrayList EMPTY_LIST = new IntArrayList(); - static { - EMPTY_LIST.makeImmutable(); - } - - public static IntArrayList emptyList() { - return EMPTY_LIST; - } - - /** - * The backing store for the list. - */ - private int[] array; - - /** - * The size of the list distinct from the length of the array. That is, it is the number of - * elements set in the list. - */ - private int size; - - /** - * Constructs a new mutable {@code IntArrayList} with default capacity. - */ - IntArrayList() { - this(new int[DEFAULT_CAPACITY], 0); - } - - /** - * Constructs a new mutable {@code IntArrayList} - * containing the same elements as {@code other}. - */ - private IntArrayList(int[] other, int size) { - array = other; - this.size = size; - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (!(o instanceof IntArrayList)) { - return super.equals(o); - } - IntArrayList other = (IntArrayList) o; - if (size != other.size) { - return false; - } - - final int[] arr = other.array; - for (int i = 0; i < size; i++) { - if (array[i] != arr[i]) { - return false; - } - } - - return true; - } - - @Override - public int hashCode() { - int result = 1; - for (int i = 0; i < size; i++) { - result = (31 * result) + array[i]; - } - return result; - } - - @Override - public IntList mutableCopyWithCapacity(int capacity) { - if (capacity < size) { - throw new IllegalArgumentException(); - } - return new IntArrayList(Arrays.copyOf(array, capacity), size); - } - - @Override - public Integer get(int index) { - return getInt(index); - } - - @Override - public int getInt(int index) { - ensureIndexInRange(index); - return array[index]; - } - - @Override - public int size() { - return size; - } - - @Override - public Integer set(int index, Integer element) { - return setInt(index, element); - } - - @Override - public int setInt(int index, int element) { - ensureIsMutable(); - ensureIndexInRange(index); - int previousValue = array[index]; - array[index] = element; - return previousValue; - } - - @Override - public void add(int index, Integer element) { - addInt(index, element); - } - - /** - * Like {@link #add(Integer)} but more efficient in that it doesn't box the element. - */ - @Override - public void addInt(int element) { - addInt(size, element); - } - - /** - * Like {@link #add(int, Integer)} but more efficient in that it doesn't box the element. - */ - private void addInt(int index, int element) { - ensureIsMutable(); - if (index < 0 || index > size) { - throw new IndexOutOfBoundsException(makeOutOfBoundsExceptionMessage(index)); - } - - if (size < array.length) { - // Shift everything over to make room - System.arraycopy(array, index, array, index + 1, size - index); - } else { - // Resize to 1.5x the size - int length = ((size * 3) / 2) + 1; - int[] newArray = new int[length]; - - // Copy the first part directly - System.arraycopy(array, 0, newArray, 0, index); - - // Copy the rest shifted over by one to make room - System.arraycopy(array, index, newArray, index + 1, size - index); - array = newArray; - } - - array[index] = element; - size++; - modCount++; - } - - @Override - public boolean addAll(Collection collection) { - ensureIsMutable(); - - if (collection == null) { - throw new NullPointerException(); - } - - // We specialize when adding another IntArrayList to avoid boxing elements. - if (!(collection instanceof IntArrayList)) { - return super.addAll(collection); - } - - IntArrayList list = (IntArrayList) collection; - if (list.size == 0) { - return false; - } - - int overflow = Integer.MAX_VALUE - size; - if (overflow < list.size) { - // We can't actually represent a list this large. - throw new OutOfMemoryError(); - } - - int newSize = size + list.size; - if (newSize > array.length) { - array = Arrays.copyOf(array, newSize); - } - - System.arraycopy(list.array, 0, array, size, list.size); - size = newSize; - modCount++; - return true; - } - - @Override - public boolean remove(Object o) { - ensureIsMutable(); - for (int i = 0; i < size; i++) { - if (o.equals(array[i])) { - System.arraycopy(array, i + 1, array, i, size - i); - size--; - modCount++; - return true; - } - } - return false; - } - - @Override - public Integer remove(int index) { - ensureIsMutable(); - ensureIndexInRange(index); - int value = array[index]; - System.arraycopy(array, index + 1, array, index, size - index); - size--; - modCount++; - return value; - } - - /** - * Ensures that the provided {@code index} is within the range of {@code [0, size]}. Throws an - * {@link IndexOutOfBoundsException} if it is not. - * - * @param index the index to verify is in range - */ - private void ensureIndexInRange(int index) { - if (index < 0 || index >= size) { - throw new IndexOutOfBoundsException(makeOutOfBoundsExceptionMessage(index)); - } - } - - private String makeOutOfBoundsExceptionMessage(int index) { - return "Index:" + index + ", Size:" + size; - } -} http://git-wip-us.apache.org/repos/asf/hbase/blob/7a6de1bd/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Internal.java ---------------------------------------------------------------------- diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Internal.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Internal.java deleted file mode 100644 index 8f5c229..0000000 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Internal.java +++ /dev/null @@ -1,751 +0,0 @@ -// Protocol Buffers - Google's data interchange format -// Copyright 2008 Google Inc. All rights reserved. -// https://developers.google.com/protocol-buffers/ -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -package org.apache.hadoop.hbase.shaded.com.google.protobuf; - -import java.io.IOException; -import java.lang.reflect.Method; -import java.nio.ByteBuffer; -import java.nio.charset.Charset; -import java.util.AbstractList; -import java.util.AbstractMap; -import java.util.AbstractSet; -import java.util.Arrays; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.RandomAccess; -import java.util.Set; - -/** - * The classes contained within are used internally by the Protocol Buffer - * library and generated message implementations. They are public only because - * those generated messages do not reside in the {@code protobuf} package. - * Others should not use this class directly. - * - * @author kenton@google.com (Kenton Varda) - */ -public final class Internal { - - private Internal() {} - - static final Charset UTF_8 = Charset.forName("UTF-8"); - static final Charset ISO_8859_1 = Charset.forName("ISO-8859-1"); - - /** - * Throws an appropriate {@link NullPointerException} if the given objects is {@code null}. - */ - static T checkNotNull(T obj, String message) { - if (obj == null) { - throw new NullPointerException(message); - } - return obj; - } - - /** - * Helper called by generated code to construct default values for string - * fields. - *

- * The protocol compiler does not actually contain a UTF-8 decoder -- it - * just pushes UTF-8-encoded text around without touching it. The one place - * where this presents a problem is when generating Java string literals. - * Unicode characters in the string literal would normally need to be encoded - * using a Unicode escape sequence, which would require decoding them. - * To get around this, protoc instead embeds the UTF-8 bytes into the - * generated code and leaves it to the runtime library to decode them. - *

- * It gets worse, though. If protoc just generated a byte array, like: - * new byte[] {0x12, 0x34, 0x56, 0x78} - * Java actually generates *code* which allocates an array and then fills - * in each value. This is much less efficient than just embedding the bytes - * directly into the bytecode. To get around this, we need another - * work-around. String literals are embedded directly, so protoc actually - * generates a string literal corresponding to the bytes. The easiest way - * to do this is to use the ISO-8859-1 character set, which corresponds to - * the first 256 characters of the Unicode range. Protoc can then use - * good old CEscape to generate the string. - *

- * So we have a string literal which represents a set of bytes which - * represents another string. This function -- stringDefaultValue -- - * converts from the generated string to the string we actually want. The - * generated code calls this automatically. - */ - public static String stringDefaultValue(String bytes) { - return new String(bytes.getBytes(ISO_8859_1), UTF_8); - } - - /** - * Helper called by generated code to construct default values for bytes - * fields. - *

- * This is a lot like {@link #stringDefaultValue}, but for bytes fields. - * In this case we only need the second of the two hacks -- allowing us to - * embed raw bytes as a string literal with ISO-8859-1 encoding. - */ - public static ByteString bytesDefaultValue(String bytes) { - return ByteString.copyFrom(bytes.getBytes(ISO_8859_1)); - } - /** - * Helper called by generated code to construct default values for bytes - * fields. - *

- * This is like {@link #bytesDefaultValue}, but returns a byte array. - */ - public static byte[] byteArrayDefaultValue(String bytes) { - return bytes.getBytes(ISO_8859_1); - } - - /** - * Helper called by generated code to construct default values for bytes - * fields. - *

- * This is like {@link #bytesDefaultValue}, but returns a ByteBuffer. - */ - public static ByteBuffer byteBufferDefaultValue(String bytes) { - return ByteBuffer.wrap(byteArrayDefaultValue(bytes)); - } - - /** - * Create a new ByteBuffer and copy all the content of {@code source} - * ByteBuffer to the new ByteBuffer. The new ByteBuffer's limit and - * capacity will be source.capacity(), and its position will be 0. - * Note that the state of {@code source} ByteBuffer won't be changed. - */ - public static ByteBuffer copyByteBuffer(ByteBuffer source) { - // Make a duplicate of the source ByteBuffer and read data from the - // duplicate. This is to avoid affecting the source ByteBuffer's state. - ByteBuffer temp = source.duplicate(); - // We want to copy all the data in the source ByteBuffer, not just the - // remaining bytes. - temp.clear(); - ByteBuffer result = ByteBuffer.allocate(temp.capacity()); - result.put(temp); - result.clear(); - return result; - } - - /** - * Helper called by generated code to determine if a byte array is a valid - * UTF-8 encoded string such that the original bytes can be converted to - * a String object and then back to a byte array round tripping the bytes - * without loss. More precisely, returns {@code true} whenever: - *

   {@code
-   * Arrays.equals(byteString.toByteArray(),
-   *     new String(byteString.toByteArray(), "UTF-8").getBytes("UTF-8"))
-   * }
- * - *

This method rejects "overlong" byte sequences, as well as - * 3-byte sequences that would map to a surrogate character, in - * accordance with the restricted definition of UTF-8 introduced in - * Unicode 3.1. Note that the UTF-8 decoder included in Oracle's - * JDK has been modified to also reject "overlong" byte sequences, - * but currently (2011) still accepts 3-byte surrogate character - * byte sequences. - * - *

See the Unicode Standard,
- * Table 3-6. UTF-8 Bit Distribution,
- * Table 3-7. Well Formed UTF-8 Byte Sequences. - * - *

As of 2011-02, this method simply returns the result of {@link - * ByteString#isValidUtf8()}. Calling that method directly is preferred. - * - * @param byteString the string to check - * @return whether the byte array is round trippable - */ - public static boolean isValidUtf8(ByteString byteString) { - return byteString.isValidUtf8(); - } - - /** - * Like {@link #isValidUtf8(ByteString)} but for byte arrays. - */ - public static boolean isValidUtf8(byte[] byteArray) { - return Utf8.isValidUtf8(byteArray); - } - - /** - * Helper method to get the UTF-8 bytes of a string. - */ - public static byte[] toByteArray(String value) { - return value.getBytes(UTF_8); - } - - /** - * Helper method to convert a byte array to a string using UTF-8 encoding. - */ - public static String toStringUtf8(byte[] bytes) { - return new String(bytes, UTF_8); - } - - /** - * Interface for an enum value or value descriptor, to be used in FieldSet. - * The lite library stores enum values directly in FieldSets but the full - * library stores EnumValueDescriptors in order to better support reflection. - */ - public interface EnumLite { - int getNumber(); - } - - /** - * Interface for an object which maps integers to {@link EnumLite}s. - * {@link Descriptors.EnumDescriptor} implements this interface by mapping - * numbers to {@link Descriptors.EnumValueDescriptor}s. Additionally, - * every generated enum type has a static method internalGetValueMap() which - * returns an implementation of this type that maps numbers to enum values. - */ - public interface EnumLiteMap { - T findValueByNumber(int number); - } - - /** - * Helper method for implementing {@link Message#hashCode()} for longs. - * @see Long#hashCode() - */ - public static int hashLong(long n) { - return (int) (n ^ (n >>> 32)); - } - - /** - * Helper method for implementing {@link Message#hashCode()} for - * booleans. - * @see Boolean#hashCode() - */ - public static int hashBoolean(boolean b) { - return b ? 1231 : 1237; - } - - /** - * Helper method for implementing {@link Message#hashCode()} for enums. - *

- * This is needed because {@link java.lang.Enum#hashCode()} is final, but we - * need to use the field number as the hash code to ensure compatibility - * between statically and dynamically generated enum objects. - */ - public static int hashEnum(EnumLite e) { - return e.getNumber(); - } - - /** - * Helper method for implementing {@link Message#hashCode()} for - * enum lists. - */ - public static int hashEnumList(List list) { - int hash = 1; - for (EnumLite e : list) { - hash = 31 * hash + hashEnum(e); - } - return hash; - } - - /** - * Helper method for implementing {@link Message#equals(Object)} for bytes field. - */ - public static boolean equals(List a, List b) { - if (a.size() != b.size()) return false; - for (int i = 0; i < a.size(); ++i) { - if (!Arrays.equals(a.get(i), b.get(i))) { - return false; - } - } - return true; - } - - /** - * Helper method for implementing {@link Message#hashCode()} for bytes field. - */ - public static int hashCode(List list) { - int hash = 1; - for (byte[] bytes : list) { - hash = 31 * hash + hashCode(bytes); - } - return hash; - } - - /** - * Helper method for implementing {@link Message#hashCode()} for bytes field. - */ - public static int hashCode(byte[] bytes) { - // The hash code for a byte array should be the same as the hash code for a - // ByteString with the same content. This is to ensure that the generated - // hashCode() method will return the same value as the pure reflection - // based hashCode() method. - return Internal.hashCode(bytes, 0, bytes.length); - } - - /** - * Helper method for implementing {@link LiteralByteString#hashCode()}. - */ - static int hashCode(byte[] bytes, int offset, int length) { - // The hash code for a byte array should be the same as the hash code for a - // ByteString with the same content. This is to ensure that the generated - // hashCode() method will return the same value as the pure reflection - // based hashCode() method. - int h = Internal.partialHash(length, bytes, offset, length); - return h == 0 ? 1 : h; - } - - /** - * Helper method for continuously hashing bytes. - */ - static int partialHash(int h, byte[] bytes, int offset, int length) { - for (int i = offset; i < offset + length; i++) { - h = h * 31 + bytes[i]; - } - return h; - } - - /** - * Helper method for implementing {@link Message#equals(Object)} for bytes - * field. - */ - public static boolean equalsByteBuffer(ByteBuffer a, ByteBuffer b) { - if (a.capacity() != b.capacity()) { - return false; - } - // ByteBuffer.equals() will only compare the remaining bytes, but we want to - // compare all the content. - return a.duplicate().clear().equals(b.duplicate().clear()); - } - - /** - * Helper method for implementing {@link Message#equals(Object)} for bytes - * field. - */ - public static boolean equalsByteBuffer( - List a, List b) { - if (a.size() != b.size()) { - return false; - } - for (int i = 0; i < a.size(); ++i) { - if (!equalsByteBuffer(a.get(i), b.get(i))) { - return false; - } - } - return true; - } - - /** - * Helper method for implementing {@link Message#hashCode()} for bytes - * field. - */ - public static int hashCodeByteBuffer(List list) { - int hash = 1; - for (ByteBuffer bytes : list) { - hash = 31 * hash + hashCodeByteBuffer(bytes); - } - return hash; - } - - private static final int DEFAULT_BUFFER_SIZE = 4096; - - /** - * Helper method for implementing {@link Message#hashCode()} for bytes - * field. - */ - public static int hashCodeByteBuffer(ByteBuffer bytes) { - if (bytes.hasArray()) { - // Fast path. - int h = partialHash(bytes.capacity(), bytes.array(), bytes.arrayOffset(), bytes.capacity()); - return h == 0 ? 1 : h; - } else { - // Read the data into a temporary byte array before calculating the - // hash value. - final int bufferSize = bytes.capacity() > DEFAULT_BUFFER_SIZE - ? DEFAULT_BUFFER_SIZE : bytes.capacity(); - final byte[] buffer = new byte[bufferSize]; - final ByteBuffer duplicated = bytes.duplicate(); - duplicated.clear(); - int h = bytes.capacity(); - while (duplicated.remaining() > 0) { - final int length = duplicated.remaining() <= bufferSize ? - duplicated.remaining() : bufferSize; - duplicated.get(buffer, 0, length); - h = partialHash(h, buffer, 0, length); - } - return h == 0 ? 1 : h; - } - } - - @SuppressWarnings("unchecked") - public static T getDefaultInstance(Class clazz) { - try { - Method method = clazz.getMethod("getDefaultInstance"); - return (T) method.invoke(method); - } catch (Exception e) { - throw new RuntimeException( - "Failed to get default instance for " + clazz, e); - } - } - - /** - * An empty byte array constant used in generated code. - */ - public static final byte[] EMPTY_BYTE_ARRAY = new byte[0]; - - /** - * An empty byte array constant used in generated code. - */ - public static final ByteBuffer EMPTY_BYTE_BUFFER = - ByteBuffer.wrap(EMPTY_BYTE_ARRAY); - - /** An empty coded input stream constant used in generated code. */ - public static final CodedInputStream EMPTY_CODED_INPUT_STREAM = - CodedInputStream.newInstance(EMPTY_BYTE_ARRAY); - - - /** - * Provides an immutable view of {@code List} around a {@code List}. - * - * Protobuf internal. Used in protobuf generated code only. - */ - public static class ListAdapter extends AbstractList { - /** - * Convert individual elements of the List from F to T. - */ - public interface Converter { - T convert(F from); - } - - private final List fromList; - private final Converter converter; - - public ListAdapter(List fromList, Converter converter) { - this.fromList = fromList; - this.converter = converter; - } - - @Override - public T get(int index) { - return converter.convert(fromList.get(index)); - } - - @Override - public int size() { - return fromList.size(); - } - } - - /** - * Wrap around a {@code Map} and provide a {@code Map} - * interface. - */ - public static class MapAdapter extends AbstractMap { - /** - * An interface used to convert between two types. - */ - public interface Converter { - B doForward(A object); - A doBackward(B object); - } - - public static Converter newEnumConverter( - final EnumLiteMap enumMap, final T unrecognizedValue) { - return new Converter() { - @Override - public T doForward(Integer value) { - T result = enumMap.findValueByNumber(value); - return result == null ? unrecognizedValue : result; - } - - @Override - public Integer doBackward(T value) { - return value.getNumber(); - } - }; - } - - private final Map realMap; - private final Converter valueConverter; - - public MapAdapter(Map realMap, - Converter valueConverter) { - this.realMap = realMap; - this.valueConverter = valueConverter; - } - - @SuppressWarnings("unchecked") - @Override - public V get(Object key) { - RealValue result = realMap.get(key); - if (result == null) { - return null; - } - return valueConverter.doForward(result); - } - - @Override - public V put(K key, V value) { - RealValue oldValue = realMap.put(key, valueConverter.doBackward(value)); - if (oldValue == null) { - return null; - } - return valueConverter.doForward(oldValue); - } - - @Override - public Set> entrySet() { - return new SetAdapter(realMap.entrySet()); - } - - private class SetAdapter extends AbstractSet> { - private final Set> realSet; - public SetAdapter(Set> realSet) { - this.realSet = realSet; - } - - @Override - public Iterator> iterator() { - return new IteratorAdapter(realSet.iterator()); - } - - @Override - public int size() { - return realSet.size(); - } - } - - private class IteratorAdapter implements Iterator> { - private final Iterator> realIterator; - - public IteratorAdapter( - Iterator> realIterator) { - this.realIterator = realIterator; - } - - @Override - public boolean hasNext() { - return realIterator.hasNext(); - } - - @Override - public java.util.Map.Entry next() { - return new EntryAdapter(realIterator.next()); - } - - @Override - public void remove() { - realIterator.remove(); - } - } - - private class EntryAdapter implements Map.Entry { - private final Map.Entry realEntry; - - public EntryAdapter(Map.Entry realEntry) { - this.realEntry = realEntry; - } - - @Override - public K getKey() { - return realEntry.getKey(); - } - - @Override - public V getValue() { - return valueConverter.doForward(realEntry.getValue()); - } - - @Override - public V setValue(V value) { - RealValue oldValue = realEntry.setValue( - valueConverter.doBackward(value)); - if (oldValue == null) { - return null; - } - return valueConverter.doForward(oldValue); - } - } - } - - /** - * Extends {@link List} to add the capability to make the list immutable and inspect if it is - * modifiable. - *

- * All implementations must support efficient random access. - */ - public static interface ProtobufList extends List, RandomAccess { - - /** - * Makes this list immutable. All subsequent modifications will throw an - * {@link UnsupportedOperationException}. - */ - void makeImmutable(); - - /** - * Returns whether this list can be modified via the publicly accessible {@link List} methods. - */ - boolean isModifiable(); - - /** - * Returns a mutable clone of this list with the specified capacity. - */ - ProtobufList mutableCopyWithCapacity(int capacity); - } - - /** - * A {@link java.util.List} implementation that avoids boxing the elements into Integers if - * possible. Does not support null elements. - */ - public static interface IntList extends ProtobufList { - - /** - * Like {@link #get(int)} but more efficient in that it doesn't box the returned value. - */ - int getInt(int index); - - /** - * Like {@link #add(Object)} but more efficient in that it doesn't box the element. - */ - void addInt(int element); - - /** - * Like {@link #set(int, Object)} but more efficient in that it doesn't box the element. - */ - int setInt(int index, int element); - - /** - * Returns a mutable clone of this list with the specified capacity. - */ - @Override - IntList mutableCopyWithCapacity(int capacity); - } - - /** - * A {@link java.util.List} implementation that avoids boxing the elements into Booleans if - * possible. Does not support null elements. - */ - public static interface BooleanList extends ProtobufList { - - /** - * Like {@link #get(int)} but more efficient in that it doesn't box the returned value. - */ - boolean getBoolean(int index); - - /** - * Like {@link #add(Object)} but more efficient in that it doesn't box the element. - */ - void addBoolean(boolean element); - - /** - * Like {@link #set(int, Object)} but more efficient in that it doesn't box the element. - */ - boolean setBoolean(int index, boolean element); - - /** - * Returns a mutable clone of this list with the specified capacity. - */ - @Override - BooleanList mutableCopyWithCapacity(int capacity); - } - - /** - * A {@link java.util.List} implementation that avoids boxing the elements into Longs if - * possible. Does not support null elements. - */ - public static interface LongList extends ProtobufList { - - /** - * Like {@link #get(int)} but more efficient in that it doesn't box the returned value. - */ - long getLong(int index); - - /** - * Like {@link #add(Object)} but more efficient in that it doesn't box the element. - */ - void addLong(long element); - - /** - * Like {@link #set(int, Object)} but more efficient in that it doesn't box the element. - */ - long setLong(int index, long element); - - /** - * Returns a mutable clone of this list with the specified capacity. - */ - @Override - LongList mutableCopyWithCapacity(int capacity); - } - - /** - * A {@link java.util.List} implementation that avoids boxing the elements into Doubles if - * possible. Does not support null elements. - */ - public static interface DoubleList extends ProtobufList { - - /** - * Like {@link #get(int)} but more efficient in that it doesn't box the returned value. - */ - double getDouble(int index); - - /** - * Like {@link #add(Object)} but more efficient in that it doesn't box the element. - */ - void addDouble(double element); - - /** - * Like {@link #set(int, Object)} but more efficient in that it doesn't box the element. - */ - double setDouble(int index, double element); - - /** - * Returns a mutable clone of this list with the specified capacity. - */ - @Override - DoubleList mutableCopyWithCapacity(int capacity); - } - - /** - * A {@link java.util.List} implementation that avoids boxing the elements into Floats if - * possible. Does not support null elements. - */ - public static interface FloatList extends ProtobufList { - - /** - * Like {@link #get(int)} but more efficient in that it doesn't box the returned value. - */ - float getFloat(int index); - - /** - * Like {@link #add(Object)} but more efficient in that it doesn't box the element. - */ - void addFloat(float element); - - /** - * Like {@link #set(int, Object)} but more efficient in that it doesn't box the element. - */ - float setFloat(int index, float element); - - /** - * Returns a mutable clone of this list with the specified capacity. - */ - @Override - FloatList mutableCopyWithCapacity(int capacity); - } -} http://git-wip-us.apache.org/repos/asf/hbase/blob/7a6de1bd/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/InvalidProtocolBufferException.java ---------------------------------------------------------------------- diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/InvalidProtocolBufferException.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/InvalidProtocolBufferException.java deleted file mode 100644 index 2682242..0000000 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/InvalidProtocolBufferException.java +++ /dev/null @@ -1,146 +0,0 @@ -// Protocol Buffers - Google's data interchange format -// Copyright 2008 Google Inc. All rights reserved. -// https://developers.google.com/protocol-buffers/ -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -package org.apache.hadoop.hbase.shaded.com.google.protobuf; - -import java.io.IOException; - -/** - * Thrown when a protocol message being parsed is invalid in some way, - * e.g. it contains a malformed varint or a negative byte length. - * - * @author kenton@google.com Kenton Varda - */ -public class InvalidProtocolBufferException extends IOException { - private static final long serialVersionUID = -1616151763072450476L; - private MessageLite unfinishedMessage = null; - - public InvalidProtocolBufferException(final String description) { - super(description); - } - - public InvalidProtocolBufferException(IOException e) { - super(e.getMessage(), e); - } - - /** - * Attaches an unfinished message to the exception to support best-effort - * parsing in {@code Parser} interface. - * - * @return this - */ - public InvalidProtocolBufferException setUnfinishedMessage( - MessageLite unfinishedMessage) { - this.unfinishedMessage = unfinishedMessage; - return this; - } - - /** - * Returns the unfinished message attached to the exception, or null if - * no message is attached. - */ - public MessageLite getUnfinishedMessage() { - return unfinishedMessage; - } - - /** - * Unwraps the underlying {@link IOException} if this exception was caused by an I/O - * problem. Otherwise, returns {@code this}. - */ - public IOException unwrapIOException() { - return getCause() instanceof IOException ? (IOException) getCause() : this; - } - - static InvalidProtocolBufferException truncatedMessage() { - return new InvalidProtocolBufferException( - "While parsing a protocol message, the input ended unexpectedly " + - "in the middle of a field. This could mean either that the " + - "input has been truncated or that an embedded message " + - "misreported its own length."); - } - - static InvalidProtocolBufferException negativeSize() { - return new InvalidProtocolBufferException( - "CodedInputStream encountered an embedded string or message " + - "which claimed to have negative size."); - } - - static InvalidProtocolBufferException malformedVarint() { - return new InvalidProtocolBufferException( - "CodedInputStream encountered a malformed varint."); - } - - static InvalidProtocolBufferException invalidTag() { - return new InvalidProtocolBufferException( - "Protocol message contained an invalid tag (zero)."); - } - - static InvalidProtocolBufferException invalidEndTag() { - return new InvalidProtocolBufferException( - "Protocol message end-group tag did not match expected tag."); - } - - static InvalidWireTypeException invalidWireType() { - return new InvalidWireTypeException( - "Protocol message tag had invalid wire type."); - } - - /** - * Exception indicating that and unexpected wire type was encountered for a field. - */ - @ExperimentalApi - public static class InvalidWireTypeException extends InvalidProtocolBufferException { - private static final long serialVersionUID = 3283890091615336259L; - - public InvalidWireTypeException(String description) { - super(description); - } - } - - static InvalidProtocolBufferException recursionLimitExceeded() { - return new InvalidProtocolBufferException( - "Protocol message had too many levels of nesting. May be malicious. " + - "Use CodedInputStream.setRecursionLimit() to increase the depth limit."); - } - - static InvalidProtocolBufferException sizeLimitExceeded() { - return new InvalidProtocolBufferException( - "Protocol message was too large. May be malicious. " + - "Use CodedInputStream.setSizeLimit() to increase the size limit."); - } - - static InvalidProtocolBufferException parseFailure() { - return new InvalidProtocolBufferException("Failed to parse the message."); - } - - static InvalidProtocolBufferException invalidUtf8() { - return new InvalidProtocolBufferException("Protocol message had invalid UTF-8."); - } -} http://git-wip-us.apache.org/repos/asf/hbase/blob/7a6de1bd/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/LazyField.java ---------------------------------------------------------------------- diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/LazyField.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/LazyField.java deleted file mode 100644 index 886d596..0000000 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/LazyField.java +++ /dev/null @@ -1,154 +0,0 @@ -// Protocol Buffers - Google's data interchange format -// Copyright 2008 Google Inc. All rights reserved. -// https://developers.google.com/protocol-buffers/ -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -package org.apache.hadoop.hbase.shaded.com.google.protobuf; - -import java.util.Iterator; -import java.util.Map.Entry; - -/** - * LazyField encapsulates the logic of lazily parsing message fields. It stores - * the message in a ByteString initially and then parse it on-demand. - * - * Most of key methods are implemented in {@link LazyFieldLite} but this class - * can contain default instance of the message to provide {@code hashCode()}, - * {@code euqals()} and {@code toString()}. - * - * @author xiangl@google.com (Xiang Li) - */ -public class LazyField extends LazyFieldLite { - - /** - * Carry a message's default instance which is used by {@code hashCode()}, {@code euqals()} and - * {@code toString()}. - */ - private final MessageLite defaultInstance; - - public LazyField(MessageLite defaultInstance, - ExtensionRegistryLite extensionRegistry, ByteString bytes) { - super(extensionRegistry, bytes); - - this.defaultInstance = defaultInstance; - } - - @Override - public boolean containsDefaultInstance() { - return super.containsDefaultInstance() || value == defaultInstance; - } - - public MessageLite getValue() { - return getValue(defaultInstance); - } - - @Override - public int hashCode() { - return getValue().hashCode(); - } - - @Override - public boolean equals(Object obj) { - return getValue().equals(obj); - } - - @Override - public String toString() { - return getValue().toString(); - } - - // ==================================================== - - /** - * LazyEntry and LazyIterator are used to encapsulate the LazyField, when - * users iterate all fields from FieldSet. - */ - static class LazyEntry implements Entry { - private Entry entry; - - private LazyEntry(Entry entry) { - this.entry = entry; - } - - @Override - public K getKey() { - return entry.getKey(); - } - - @Override - public Object getValue() { - LazyField field = entry.getValue(); - if (field == null) { - return null; - } - return field.getValue(); - } - - public LazyField getField() { - return entry.getValue(); - } - - @Override - public Object setValue(Object value) { - if (!(value instanceof MessageLite)) { - throw new IllegalArgumentException( - "LazyField now only used for MessageSet, " - + "and the value of MessageSet must be an instance of MessageLite"); - } - return entry.getValue().setValue((MessageLite) value); - } - } - - static class LazyIterator implements Iterator> { - private Iterator> iterator; - - public LazyIterator(Iterator> iterator) { - this.iterator = iterator; - } - - @Override - public boolean hasNext() { - return iterator.hasNext(); - } - - @Override - @SuppressWarnings("unchecked") - public Entry next() { - Entry entry = iterator.next(); - if (entry.getValue() instanceof LazyField) { - return new LazyEntry((Entry) entry); - } - return (Entry) entry; - } - - @Override - public void remove() { - iterator.remove(); - } - } -}