hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From st...@apache.org
Subject [34/51] [partial] hbase git commit: Revert "HBASE-17056 Remove checked in PB generated files Selective add of dependency on" Revert for now. Build unstable and some interesting issues around CLASSPATH
Date Fri, 07 Jul 2017 05:01:08 GMT
http://git-wip-us.apache.org/repos/asf/hbase/blob/6786b2b6/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/DescriptorProtos.java
----------------------------------------------------------------------
diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/DescriptorProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/DescriptorProtos.java
new file mode 100644
index 0000000..0468e6c
--- /dev/null
+++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/DescriptorProtos.java
@@ -0,0 +1,39141 @@
+// Generated by the protocol buffer compiler.  DO NOT EDIT!
+// source: google/protobuf/descriptor.proto
+
+package org.apache.hadoop.hbase.shaded.com.google.protobuf;
+
+public final class DescriptorProtos {
+  private DescriptorProtos() {}
+  public static void registerAllExtensions(
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite registry) {
+  }
+
+  public static void registerAllExtensions(
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistry registry) {
+    registerAllExtensions(
+        (org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite) registry);
+  }
+  public interface FileDescriptorSetOrBuilder extends
+      // @@protoc_insertion_point(interface_extends:google.protobuf.FileDescriptorSet)
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder {
+
+    /**
+     * <code>repeated .google.protobuf.FileDescriptorProto file = 1;</code>
+     */
+    java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto> 
+        getFileList();
+    /**
+     * <code>repeated .google.protobuf.FileDescriptorProto file = 1;</code>
+     */
+    org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto getFile(int index);
+    /**
+     * <code>repeated .google.protobuf.FileDescriptorProto file = 1;</code>
+     */
+    int getFileCount();
+    /**
+     * <code>repeated .google.protobuf.FileDescriptorProto file = 1;</code>
+     */
+    java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProtoOrBuilder> 
+        getFileOrBuilderList();
+    /**
+     * <code>repeated .google.protobuf.FileDescriptorProto file = 1;</code>
+     */
+    org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProtoOrBuilder getFileOrBuilder(
+        int index);
+  }
+  /**
+   * <pre>
+   * The protocol compiler can output a FileDescriptorSet containing the .proto
+   * files it parses.
+   * </pre>
+   *
+   * Protobuf type {@code google.protobuf.FileDescriptorSet}
+   */
+  public  static final class FileDescriptorSet extends
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements
+      // @@protoc_insertion_point(message_implements:google.protobuf.FileDescriptorSet)
+      FileDescriptorSetOrBuilder {
+    // Use FileDescriptorSet.newBuilder() to construct.
+    private FileDescriptorSet(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
+      super(builder);
+    }
+    private FileDescriptorSet() {
+      file_ = java.util.Collections.emptyList();
+    }
+
+    @java.lang.Override
+    public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet
+    getUnknownFields() {
+      return this.unknownFields;
+    }
+    private FileDescriptorSet(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+      this();
+      int mutable_bitField0_ = 0;
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder();
+      try {
+        boolean done = false;
+        while (!done) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              done = true;
+              break;
+            default: {
+              if (!parseUnknownField(input, unknownFields,
+                                     extensionRegistry, tag)) {
+                done = true;
+              }
+              break;
+            }
+            case 10: {
+              if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
+                file_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto>();
+                mutable_bitField0_ |= 0x00000001;
+              }
+              file_.add(
+                  input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto.PARSER, extensionRegistry));
+              break;
+            }
+          }
+        }
+      } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
+        throw e.setUnfinishedMessage(this);
+      } catch (java.io.IOException e) {
+        throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
+            e).setUnfinishedMessage(this);
+      } finally {
+        if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
+          file_ = java.util.Collections.unmodifiableList(file_);
+        }
+        this.unknownFields = unknownFields.build();
+        makeExtensionsImmutable();
+      }
+    }
+    public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_FileDescriptorSet_descriptor;
+    }
+
+    protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_FileDescriptorSet_fieldAccessorTable
+          .ensureFieldAccessorsInitialized(
+              org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet.Builder.class);
+    }
+
+    public static final int FILE_FIELD_NUMBER = 1;
+    private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto> file_;
+    /**
+     * <code>repeated .google.protobuf.FileDescriptorProto file = 1;</code>
+     */
+    public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto> getFileList() {
+      return file_;
+    }
+    /**
+     * <code>repeated .google.protobuf.FileDescriptorProto file = 1;</code>
+     */
+    public java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProtoOrBuilder> 
+        getFileOrBuilderList() {
+      return file_;
+    }
+    /**
+     * <code>repeated .google.protobuf.FileDescriptorProto file = 1;</code>
+     */
+    public int getFileCount() {
+      return file_.size();
+    }
+    /**
+     * <code>repeated .google.protobuf.FileDescriptorProto file = 1;</code>
+     */
+    public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto getFile(int index) {
+      return file_.get(index);
+    }
+    /**
+     * <code>repeated .google.protobuf.FileDescriptorProto file = 1;</code>
+     */
+    public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProtoOrBuilder getFileOrBuilder(
+        int index) {
+      return file_.get(index);
+    }
+
+    private byte memoizedIsInitialized = -1;
+    public final boolean isInitialized() {
+      byte isInitialized = memoizedIsInitialized;
+      if (isInitialized == 1) return true;
+      if (isInitialized == 0) return false;
+
+      for (int i = 0; i < getFileCount(); i++) {
+        if (!getFile(i).isInitialized()) {
+          memoizedIsInitialized = 0;
+          return false;
+        }
+      }
+      memoizedIsInitialized = 1;
+      return true;
+    }
+
+    public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output)
+                        throws java.io.IOException {
+      for (int i = 0; i < file_.size(); i++) {
+        output.writeMessage(1, file_.get(i));
+      }
+      unknownFields.writeTo(output);
+    }
+
+    public int getSerializedSize() {
+      int size = memoizedSize;
+      if (size != -1) return size;
+
+      size = 0;
+      for (int i = 0; i < file_.size(); i++) {
+        size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
+          .computeMessageSize(1, file_.get(i));
+      }
+      size += unknownFields.getSerializedSize();
+      memoizedSize = size;
+      return size;
+    }
+
+    private static final long serialVersionUID = 0L;
+    @java.lang.Override
+    public boolean equals(final java.lang.Object obj) {
+      if (obj == this) {
+       return true;
+      }
+      if (!(obj instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet)) {
+        return super.equals(obj);
+      }
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet other = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet) obj;
+
+      boolean result = true;
+      result = result && getFileList()
+          .equals(other.getFileList());
+      result = result && unknownFields.equals(other.unknownFields);
+      return result;
+    }
+
+    @java.lang.Override
+    public int hashCode() {
+      if (memoizedHashCode != 0) {
+        return memoizedHashCode;
+      }
+      int hash = 41;
+      hash = (19 * hash) + getDescriptor().hashCode();
+      if (getFileCount() > 0) {
+        hash = (37 * hash) + FILE_FIELD_NUMBER;
+        hash = (53 * hash) + getFileList().hashCode();
+      }
+      hash = (29 * hash) + unknownFields.hashCode();
+      memoizedHashCode = hash;
+      return hash;
+    }
+
+    public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet parseFrom(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data)
+        throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet parseFrom(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data,
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet parseFrom(byte[] data)
+        throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet parseFrom(
+        byte[] data,
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet parseFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input);
+    }
+    public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet parseFrom(
+        java.io.InputStream input,
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input, extensionRegistry);
+    }
+    public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet parseDelimitedFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+          .parseDelimitedWithIOException(PARSER, input);
+    }
+    public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet parseDelimitedFrom(
+        java.io.InputStream input,
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
+    }
+    public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet parseFrom(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input)
+        throws java.io.IOException {
+      return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input);
+    }
+    public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet parseFrom(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input, extensionRegistry);
+    }
+
+    public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder() {
+      return DEFAULT_INSTANCE.toBuilder();
+    }
+    public static Builder newBuilder(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet prototype) {
+      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
+    }
+    public Builder toBuilder() {
+      return this == DEFAULT_INSTANCE
+          ? new Builder() : new Builder().mergeFrom(this);
+    }
+
+    @java.lang.Override
+    protected Builder newBuilderForType(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+      Builder builder = new Builder(parent);
+      return builder;
+    }
+    /**
+     * <pre>
+     * The protocol compiler can output a FileDescriptorSet containing the .proto
+     * files it parses.
+     * </pre>
+     *
+     * Protobuf type {@code google.protobuf.FileDescriptorSet}
+     */
+    public static final class Builder extends
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
+        // @@protoc_insertion_point(builder_implements:google.protobuf.FileDescriptorSet)
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSetOrBuilder {
+      public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
+          getDescriptor() {
+        return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_FileDescriptorSet_descriptor;
+      }
+
+      protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+          internalGetFieldAccessorTable() {
+        return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_FileDescriptorSet_fieldAccessorTable
+            .ensureFieldAccessorsInitialized(
+                org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet.Builder.class);
+      }
+
+      // Construct using org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet.newBuilder()
+      private Builder() {
+        maybeForceBuilderInitialization();
+      }
+
+      private Builder(
+          org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+        super(parent);
+        maybeForceBuilderInitialization();
+      }
+      private void maybeForceBuilderInitialization() {
+        if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+                .alwaysUseFieldBuilders) {
+          getFileFieldBuilder();
+        }
+      }
+      public Builder clear() {
+        super.clear();
+        if (fileBuilder_ == null) {
+          file_ = java.util.Collections.emptyList();
+          bitField0_ = (bitField0_ & ~0x00000001);
+        } else {
+          fileBuilder_.clear();
+        }
+        return this;
+      }
+
+      public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
+          getDescriptorForType() {
+        return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_FileDescriptorSet_descriptor;
+      }
+
+      public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet getDefaultInstanceForType() {
+        return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet.getDefaultInstance();
+      }
+
+      public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet build() {
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(result);
+        }
+        return result;
+      }
+
+      public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet buildPartial() {
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet result = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet(this);
+        int from_bitField0_ = bitField0_;
+        if (fileBuilder_ == null) {
+          if (((bitField0_ & 0x00000001) == 0x00000001)) {
+            file_ = java.util.Collections.unmodifiableList(file_);
+            bitField0_ = (bitField0_ & ~0x00000001);
+          }
+          result.file_ = file_;
+        } else {
+          result.file_ = fileBuilder_.build();
+        }
+        onBuilt();
+        return result;
+      }
+
+      public Builder clone() {
+        return (Builder) super.clone();
+      }
+      public Builder setField(
+          org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
+          Object value) {
+        return (Builder) super.setField(field, value);
+      }
+      public Builder clearField(
+          org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) {
+        return (Builder) super.clearField(field);
+      }
+      public Builder clearOneof(
+          org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
+        return (Builder) super.clearOneof(oneof);
+      }
+      public Builder setRepeatedField(
+          org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
+          int index, Object value) {
+        return (Builder) super.setRepeatedField(field, index, value);
+      }
+      public Builder addRepeatedField(
+          org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
+          Object value) {
+        return (Builder) super.addRepeatedField(field, value);
+      }
+      public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) {
+        if (other instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet) {
+          return mergeFrom((org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet)other);
+        } else {
+          super.mergeFrom(other);
+          return this;
+        }
+      }
+
+      public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet other) {
+        if (other == org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet.getDefaultInstance()) return this;
+        if (fileBuilder_ == null) {
+          if (!other.file_.isEmpty()) {
+            if (file_.isEmpty()) {
+              file_ = other.file_;
+              bitField0_ = (bitField0_ & ~0x00000001);
+            } else {
+              ensureFileIsMutable();
+              file_.addAll(other.file_);
+            }
+            onChanged();
+          }
+        } else {
+          if (!other.file_.isEmpty()) {
+            if (fileBuilder_.isEmpty()) {
+              fileBuilder_.dispose();
+              fileBuilder_ = null;
+              file_ = other.file_;
+              bitField0_ = (bitField0_ & ~0x00000001);
+              fileBuilder_ = 
+                org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
+                   getFileFieldBuilder() : null;
+            } else {
+              fileBuilder_.addAllMessages(other.file_);
+            }
+          }
+        }
+        this.mergeUnknownFields(other.unknownFields);
+        onChanged();
+        return this;
+      }
+
+      public final boolean isInitialized() {
+        for (int i = 0; i < getFileCount(); i++) {
+          if (!getFile(i).isInitialized()) {
+            return false;
+          }
+        }
+        return true;
+      }
+
+      public Builder mergeFrom(
+          org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
+          org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet parsedMessage = null;
+        try {
+          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+        } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
+          parsedMessage = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet) e.getUnfinishedMessage();
+          throw e.unwrapIOException();
+        } finally {
+          if (parsedMessage != null) {
+            mergeFrom(parsedMessage);
+          }
+        }
+        return this;
+      }
+      private int bitField0_;
+
+      private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto> file_ =
+        java.util.Collections.emptyList();
+      private void ensureFileIsMutable() {
+        if (!((bitField0_ & 0x00000001) == 0x00000001)) {
+          file_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto>(file_);
+          bitField0_ |= 0x00000001;
+         }
+      }
+
+      private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3<
+          org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProtoOrBuilder> fileBuilder_;
+
+      /**
+       * <code>repeated .google.protobuf.FileDescriptorProto file = 1;</code>
+       */
+      public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto> getFileList() {
+        if (fileBuilder_ == null) {
+          return java.util.Collections.unmodifiableList(file_);
+        } else {
+          return fileBuilder_.getMessageList();
+        }
+      }
+      /**
+       * <code>repeated .google.protobuf.FileDescriptorProto file = 1;</code>
+       */
+      public int getFileCount() {
+        if (fileBuilder_ == null) {
+          return file_.size();
+        } else {
+          return fileBuilder_.getCount();
+        }
+      }
+      /**
+       * <code>repeated .google.protobuf.FileDescriptorProto file = 1;</code>
+       */
+      public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto getFile(int index) {
+        if (fileBuilder_ == null) {
+          return file_.get(index);
+        } else {
+          return fileBuilder_.getMessage(index);
+        }
+      }
+      /**
+       * <code>repeated .google.protobuf.FileDescriptorProto file = 1;</code>
+       */
+      public Builder setFile(
+          int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto value) {
+        if (fileBuilder_ == null) {
+          if (value == null) {
+            throw new NullPointerException();
+          }
+          ensureFileIsMutable();
+          file_.set(index, value);
+          onChanged();
+        } else {
+          fileBuilder_.setMessage(index, value);
+        }
+        return this;
+      }
+      /**
+       * <code>repeated .google.protobuf.FileDescriptorProto file = 1;</code>
+       */
+      public Builder setFile(
+          int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto.Builder builderForValue) {
+        if (fileBuilder_ == null) {
+          ensureFileIsMutable();
+          file_.set(index, builderForValue.build());
+          onChanged();
+        } else {
+          fileBuilder_.setMessage(index, builderForValue.build());
+        }
+        return this;
+      }
+      /**
+       * <code>repeated .google.protobuf.FileDescriptorProto file = 1;</code>
+       */
+      public Builder addFile(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto value) {
+        if (fileBuilder_ == null) {
+          if (value == null) {
+            throw new NullPointerException();
+          }
+          ensureFileIsMutable();
+          file_.add(value);
+          onChanged();
+        } else {
+          fileBuilder_.addMessage(value);
+        }
+        return this;
+      }
+      /**
+       * <code>repeated .google.protobuf.FileDescriptorProto file = 1;</code>
+       */
+      public Builder addFile(
+          int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto value) {
+        if (fileBuilder_ == null) {
+          if (value == null) {
+            throw new NullPointerException();
+          }
+          ensureFileIsMutable();
+          file_.add(index, value);
+          onChanged();
+        } else {
+          fileBuilder_.addMessage(index, value);
+        }
+        return this;
+      }
+      /**
+       * <code>repeated .google.protobuf.FileDescriptorProto file = 1;</code>
+       */
+      public Builder addFile(
+          org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto.Builder builderForValue) {
+        if (fileBuilder_ == null) {
+          ensureFileIsMutable();
+          file_.add(builderForValue.build());
+          onChanged();
+        } else {
+          fileBuilder_.addMessage(builderForValue.build());
+        }
+        return this;
+      }
+      /**
+       * <code>repeated .google.protobuf.FileDescriptorProto file = 1;</code>
+       */
+      public Builder addFile(
+          int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto.Builder builderForValue) {
+        if (fileBuilder_ == null) {
+          ensureFileIsMutable();
+          file_.add(index, builderForValue.build());
+          onChanged();
+        } else {
+          fileBuilder_.addMessage(index, builderForValue.build());
+        }
+        return this;
+      }
+      /**
+       * <code>repeated .google.protobuf.FileDescriptorProto file = 1;</code>
+       */
+      public Builder addAllFile(
+          java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto> values) {
+        if (fileBuilder_ == null) {
+          ensureFileIsMutable();
+          org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll(
+              values, file_);
+          onChanged();
+        } else {
+          fileBuilder_.addAllMessages(values);
+        }
+        return this;
+      }
+      /**
+       * <code>repeated .google.protobuf.FileDescriptorProto file = 1;</code>
+       */
+      public Builder clearFile() {
+        if (fileBuilder_ == null) {
+          file_ = java.util.Collections.emptyList();
+          bitField0_ = (bitField0_ & ~0x00000001);
+          onChanged();
+        } else {
+          fileBuilder_.clear();
+        }
+        return this;
+      }
+      /**
+       * <code>repeated .google.protobuf.FileDescriptorProto file = 1;</code>
+       */
+      public Builder removeFile(int index) {
+        if (fileBuilder_ == null) {
+          ensureFileIsMutable();
+          file_.remove(index);
+          onChanged();
+        } else {
+          fileBuilder_.remove(index);
+        }
+        return this;
+      }
+      /**
+       * <code>repeated .google.protobuf.FileDescriptorProto file = 1;</code>
+       */
+      public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto.Builder getFileBuilder(
+          int index) {
+        return getFileFieldBuilder().getBuilder(index);
+      }
+      /**
+       * <code>repeated .google.protobuf.FileDescriptorProto file = 1;</code>
+       */
+      public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProtoOrBuilder getFileOrBuilder(
+          int index) {
+        if (fileBuilder_ == null) {
+          return file_.get(index);  } else {
+          return fileBuilder_.getMessageOrBuilder(index);
+        }
+      }
+      /**
+       * <code>repeated .google.protobuf.FileDescriptorProto file = 1;</code>
+       */
+      public java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProtoOrBuilder> 
+           getFileOrBuilderList() {
+        if (fileBuilder_ != null) {
+          return fileBuilder_.getMessageOrBuilderList();
+        } else {
+          return java.util.Collections.unmodifiableList(file_);
+        }
+      }
+      /**
+       * <code>repeated .google.protobuf.FileDescriptorProto file = 1;</code>
+       */
+      public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto.Builder addFileBuilder() {
+        return getFileFieldBuilder().addBuilder(
+            org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto.getDefaultInstance());
+      }
+      /**
+       * <code>repeated .google.protobuf.FileDescriptorProto file = 1;</code>
+       */
+      public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto.Builder addFileBuilder(
+          int index) {
+        return getFileFieldBuilder().addBuilder(
+            index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto.getDefaultInstance());
+      }
+      /**
+       * <code>repeated .google.protobuf.FileDescriptorProto file = 1;</code>
+       */
+      public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto.Builder> 
+           getFileBuilderList() {
+        return getFileFieldBuilder().getBuilderList();
+      }
+      private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3<
+          org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProtoOrBuilder> 
+          getFileFieldBuilder() {
+        if (fileBuilder_ == null) {
+          fileBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3<
+              org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProtoOrBuilder>(
+                  file_,
+                  ((bitField0_ & 0x00000001) == 0x00000001),
+                  getParentForChildren(),
+                  isClean());
+          file_ = null;
+        }
+        return fileBuilder_;
+      }
+      public final Builder setUnknownFields(
+          final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
+        return super.setUnknownFields(unknownFields);
+      }
+
+      public final Builder mergeUnknownFields(
+          final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
+        return super.mergeUnknownFields(unknownFields);
+      }
+
+
+      // @@protoc_insertion_point(builder_scope:google.protobuf.FileDescriptorSet)
+    }
+
+    // @@protoc_insertion_point(class_scope:google.protobuf.FileDescriptorSet)
+    private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet DEFAULT_INSTANCE;
+    static {
+      DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet();
+    }
+
+    public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet getDefaultInstance() {
+      return DEFAULT_INSTANCE;
+    }
+
+    @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<FileDescriptorSet>
+        PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<FileDescriptorSet>() {
+      public FileDescriptorSet parsePartialFrom(
+          org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
+          org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+          return new FileDescriptorSet(input, extensionRegistry);
+      }
+    };
+
+    public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<FileDescriptorSet> parser() {
+      return PARSER;
+    }
+
+    @java.lang.Override
+    public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<FileDescriptorSet> getParserForType() {
+      return PARSER;
+    }
+
+    public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet getDefaultInstanceForType() {
+      return DEFAULT_INSTANCE;
+    }
+
+  }
+
+  public interface FileDescriptorProtoOrBuilder extends
+      // @@protoc_insertion_point(interface_extends:google.protobuf.FileDescriptorProto)
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder {
+
+    /**
+     * <pre>
+     * file name, relative to root of source tree
+     * </pre>
+     *
+     * <code>optional string name = 1;</code>
+     */
+    boolean hasName();
+    /**
+     * <pre>
+     * file name, relative to root of source tree
+     * </pre>
+     *
+     * <code>optional string name = 1;</code>
+     */
+    java.lang.String getName();
+    /**
+     * <pre>
+     * file name, relative to root of source tree
+     * </pre>
+     *
+     * <code>optional string name = 1;</code>
+     */
+    org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
+        getNameBytes();
+
+    /**
+     * <pre>
+     * e.g. "foo", "foo.bar", etc.
+     * </pre>
+     *
+     * <code>optional string package = 2;</code>
+     */
+    boolean hasPackage();
+    /**
+     * <pre>
+     * e.g. "foo", "foo.bar", etc.
+     * </pre>
+     *
+     * <code>optional string package = 2;</code>
+     */
+    java.lang.String getPackage();
+    /**
+     * <pre>
+     * e.g. "foo", "foo.bar", etc.
+     * </pre>
+     *
+     * <code>optional string package = 2;</code>
+     */
+    org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
+        getPackageBytes();
+
+    /**
+     * <pre>
+     * Names of files imported by this file.
+     * </pre>
+     *
+     * <code>repeated string dependency = 3;</code>
+     */
+    java.util.List<java.lang.String>
+        getDependencyList();
+    /**
+     * <pre>
+     * Names of files imported by this file.
+     * </pre>
+     *
+     * <code>repeated string dependency = 3;</code>
+     */
+    int getDependencyCount();
+    /**
+     * <pre>
+     * Names of files imported by this file.
+     * </pre>
+     *
+     * <code>repeated string dependency = 3;</code>
+     */
+    java.lang.String getDependency(int index);
+    /**
+     * <pre>
+     * Names of files imported by this file.
+     * </pre>
+     *
+     * <code>repeated string dependency = 3;</code>
+     */
+    org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
+        getDependencyBytes(int index);
+
+    /**
+     * <pre>
+     * Indexes of the public imported files in the dependency list above.
+     * </pre>
+     *
+     * <code>repeated int32 public_dependency = 10;</code>
+     */
+    java.util.List<java.lang.Integer> getPublicDependencyList();
+    /**
+     * <pre>
+     * Indexes of the public imported files in the dependency list above.
+     * </pre>
+     *
+     * <code>repeated int32 public_dependency = 10;</code>
+     */
+    int getPublicDependencyCount();
+    /**
+     * <pre>
+     * Indexes of the public imported files in the dependency list above.
+     * </pre>
+     *
+     * <code>repeated int32 public_dependency = 10;</code>
+     */
+    int getPublicDependency(int index);
+
+    /**
+     * <pre>
+     * Indexes of the weak imported files in the dependency list.
+     * For Google-internal migration only. Do not use.
+     * </pre>
+     *
+     * <code>repeated int32 weak_dependency = 11;</code>
+     */
+    java.util.List<java.lang.Integer> getWeakDependencyList();
+    /**
+     * <pre>
+     * Indexes of the weak imported files in the dependency list.
+     * For Google-internal migration only. Do not use.
+     * </pre>
+     *
+     * <code>repeated int32 weak_dependency = 11;</code>
+     */
+    int getWeakDependencyCount();
+    /**
+     * <pre>
+     * Indexes of the weak imported files in the dependency list.
+     * For Google-internal migration only. Do not use.
+     * </pre>
+     *
+     * <code>repeated int32 weak_dependency = 11;</code>
+     */
+    int getWeakDependency(int index);
+
+    /**
+     * <pre>
+     * All top-level definitions in this file.
+     * </pre>
+     *
+     * <code>repeated .google.protobuf.DescriptorProto message_type = 4;</code>
+     */
+    java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto> 
+        getMessageTypeList();
+    /**
+     * <pre>
+     * All top-level definitions in this file.
+     * </pre>
+     *
+     * <code>repeated .google.protobuf.DescriptorProto message_type = 4;</code>
+     */
+    org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto getMessageType(int index);
+    /**
+     * <pre>
+     * All top-level definitions in this file.
+     * </pre>
+     *
+     * <code>repeated .google.protobuf.DescriptorProto message_type = 4;</code>
+     */
+    int getMessageTypeCount();
+    /**
+     * <pre>
+     * All top-level definitions in this file.
+     * </pre>
+     *
+     * <code>repeated .google.protobuf.DescriptorProto message_type = 4;</code>
+     */
+    java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProtoOrBuilder> 
+        getMessageTypeOrBuilderList();
+    /**
+     * <pre>
+     * All top-level definitions in this file.
+     * </pre>
+     *
+     * <code>repeated .google.protobuf.DescriptorProto message_type = 4;</code>
+     */
+    org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProtoOrBuilder getMessageTypeOrBuilder(
+        int index);
+
+    /**
+     * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 5;</code>
+     */
+    java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto> 
+        getEnumTypeList();
+    /**
+     * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 5;</code>
+     */
+    org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto getEnumType(int index);
+    /**
+     * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 5;</code>
+     */
+    int getEnumTypeCount();
+    /**
+     * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 5;</code>
+     */
+    java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProtoOrBuilder> 
+        getEnumTypeOrBuilderList();
+    /**
+     * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 5;</code>
+     */
+    org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProtoOrBuilder getEnumTypeOrBuilder(
+        int index);
+
+    /**
+     * <code>repeated .google.protobuf.ServiceDescriptorProto service = 6;</code>
+     */
+    java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto> 
+        getServiceList();
+    /**
+     * <code>repeated .google.protobuf.ServiceDescriptorProto service = 6;</code>
+     */
+    org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto getService(int index);
+    /**
+     * <code>repeated .google.protobuf.ServiceDescriptorProto service = 6;</code>
+     */
+    int getServiceCount();
+    /**
+     * <code>repeated .google.protobuf.ServiceDescriptorProto service = 6;</code>
+     */
+    java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProtoOrBuilder> 
+        getServiceOrBuilderList();
+    /**
+     * <code>repeated .google.protobuf.ServiceDescriptorProto service = 6;</code>
+     */
+    org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProtoOrBuilder getServiceOrBuilder(
+        int index);
+
+    /**
+     * <code>repeated .google.protobuf.FieldDescriptorProto extension = 7;</code>
+     */
+    java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto> 
+        getExtensionList();
+    /**
+     * <code>repeated .google.protobuf.FieldDescriptorProto extension = 7;</code>
+     */
+    org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto getExtension(int index);
+    /**
+     * <code>repeated .google.protobuf.FieldDescriptorProto extension = 7;</code>
+     */
+    int getExtensionCount();
+    /**
+     * <code>repeated .google.protobuf.FieldDescriptorProto extension = 7;</code>
+     */
+    java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProtoOrBuilder> 
+        getExtensionOrBuilderList();
+    /**
+     * <code>repeated .google.protobuf.FieldDescriptorProto extension = 7;</code>
+     */
+    org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProtoOrBuilder getExtensionOrBuilder(
+        int index);
+
+    /**
+     * <code>optional .google.protobuf.FileOptions options = 8;</code>
+     */
+    boolean hasOptions();
+    /**
+     * <code>optional .google.protobuf.FileOptions options = 8;</code>
+     */
+    org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions getOptions();
+    /**
+     * <code>optional .google.protobuf.FileOptions options = 8;</code>
+     */
+    org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptionsOrBuilder getOptionsOrBuilder();
+
+    /**
+     * <pre>
+     * This field contains optional information about the original source code.
+     * You may safely remove this entire field without harming runtime
+     * functionality of the descriptors -- the information is needed only by
+     * development tools.
+     * </pre>
+     *
+     * <code>optional .google.protobuf.SourceCodeInfo source_code_info = 9;</code>
+     */
+    boolean hasSourceCodeInfo();
+    /**
+     * <pre>
+     * This field contains optional information about the original source code.
+     * You may safely remove this entire field without harming runtime
+     * functionality of the descriptors -- the information is needed only by
+     * development tools.
+     * </pre>
+     *
+     * <code>optional .google.protobuf.SourceCodeInfo source_code_info = 9;</code>
+     */
+    org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo getSourceCodeInfo();
+    /**
+     * <pre>
+     * This field contains optional information about the original source code.
+     * You may safely remove this entire field without harming runtime
+     * functionality of the descriptors -- the information is needed only by
+     * development tools.
+     * </pre>
+     *
+     * <code>optional .google.protobuf.SourceCodeInfo source_code_info = 9;</code>
+     */
+    org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfoOrBuilder getSourceCodeInfoOrBuilder();
+
+    /**
+     * <pre>
+     * The syntax of the proto file.
+     * The supported values are "proto2" and "proto3".
+     * </pre>
+     *
+     * <code>optional string syntax = 12;</code>
+     */
+    boolean hasSyntax();
+    /**
+     * <pre>
+     * The syntax of the proto file.
+     * The supported values are "proto2" and "proto3".
+     * </pre>
+     *
+     * <code>optional string syntax = 12;</code>
+     */
+    java.lang.String getSyntax();
+    /**
+     * <pre>
+     * The syntax of the proto file.
+     * The supported values are "proto2" and "proto3".
+     * </pre>
+     *
+     * <code>optional string syntax = 12;</code>
+     */
+    org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
+        getSyntaxBytes();
+  }
+  /**
+   * <pre>
+   * Describes a complete .proto file.
+   * </pre>
+   *
+   * Protobuf type {@code google.protobuf.FileDescriptorProto}
+   */
+  public  static final class FileDescriptorProto extends
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements
+      // @@protoc_insertion_point(message_implements:google.protobuf.FileDescriptorProto)
+      FileDescriptorProtoOrBuilder {
+    // Use FileDescriptorProto.newBuilder() to construct.
+    private FileDescriptorProto(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
+      super(builder);
+    }
+    private FileDescriptorProto() {
+      name_ = "";
+      package_ = "";
+      dependency_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList.EMPTY;
+      publicDependency_ = java.util.Collections.emptyList();
+      weakDependency_ = java.util.Collections.emptyList();
+      messageType_ = java.util.Collections.emptyList();
+      enumType_ = java.util.Collections.emptyList();
+      service_ = java.util.Collections.emptyList();
+      extension_ = java.util.Collections.emptyList();
+      syntax_ = "";
+    }
+
+    @java.lang.Override
+    public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet
+    getUnknownFields() {
+      return this.unknownFields;
+    }
+    private FileDescriptorProto(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+      this();
+      int mutable_bitField0_ = 0;
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder();
+      try {
+        boolean done = false;
+        while (!done) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              done = true;
+              break;
+            default: {
+              if (!parseUnknownField(input, unknownFields,
+                                     extensionRegistry, tag)) {
+                done = true;
+              }
+              break;
+            }
+            case 10: {
+              org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes();
+              bitField0_ |= 0x00000001;
+              name_ = bs;
+              break;
+            }
+            case 18: {
+              org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes();
+              bitField0_ |= 0x00000002;
+              package_ = bs;
+              break;
+            }
+            case 26: {
+              org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes();
+              if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
+                dependency_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList();
+                mutable_bitField0_ |= 0x00000004;
+              }
+              dependency_.add(bs);
+              break;
+            }
+            case 34: {
+              if (!((mutable_bitField0_ & 0x00000020) == 0x00000020)) {
+                messageType_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto>();
+                mutable_bitField0_ |= 0x00000020;
+              }
+              messageType_.add(
+                  input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.PARSER, extensionRegistry));
+              break;
+            }
+            case 42: {
+              if (!((mutable_bitField0_ & 0x00000040) == 0x00000040)) {
+                enumType_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto>();
+                mutable_bitField0_ |= 0x00000040;
+              }
+              enumType_.add(
+                  input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto.PARSER, extensionRegistry));
+              break;
+            }
+            case 50: {
+              if (!((mutable_bitField0_ & 0x00000080) == 0x00000080)) {
+                service_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto>();
+                mutable_bitField0_ |= 0x00000080;
+              }
+              service_.add(
+                  input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto.PARSER, extensionRegistry));
+              break;
+            }
+            case 58: {
+              if (!((mutable_bitField0_ & 0x00000100) == 0x00000100)) {
+                extension_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto>();
+                mutable_bitField0_ |= 0x00000100;
+              }
+              extension_.add(
+                  input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.PARSER, extensionRegistry));
+              break;
+            }
+            case 66: {
+              org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions.Builder subBuilder = null;
+              if (((bitField0_ & 0x00000004) == 0x00000004)) {
+                subBuilder = options_.toBuilder();
+              }
+              options_ = input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions.PARSER, extensionRegistry);
+              if (subBuilder != null) {
+                subBuilder.mergeFrom(options_);
+                options_ = subBuilder.buildPartial();
+              }
+              bitField0_ |= 0x00000004;
+              break;
+            }
+            case 74: {
+              org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Builder subBuilder = null;
+              if (((bitField0_ & 0x00000008) == 0x00000008)) {
+                subBuilder = sourceCodeInfo_.toBuilder();
+              }
+              sourceCodeInfo_ = input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.PARSER, extensionRegistry);
+              if (subBuilder != null) {
+                subBuilder.mergeFrom(sourceCodeInfo_);
+                sourceCodeInfo_ = subBuilder.buildPartial();
+              }
+              bitField0_ |= 0x00000008;
+              break;
+            }
+            case 80: {
+              if (!((mutable_bitField0_ & 0x00000008) == 0x00000008)) {
+                publicDependency_ = new java.util.ArrayList<java.lang.Integer>();
+                mutable_bitField0_ |= 0x00000008;
+              }
+              publicDependency_.add(input.readInt32());
+              break;
+            }
+            case 82: {
+              int length = input.readRawVarint32();
+              int limit = input.pushLimit(length);
+              if (!((mutable_bitField0_ & 0x00000008) == 0x00000008) && input.getBytesUntilLimit() > 0) {
+                publicDependency_ = new java.util.ArrayList<java.lang.Integer>();
+                mutable_bitField0_ |= 0x00000008;
+              }
+              while (input.getBytesUntilLimit() > 0) {
+                publicDependency_.add(input.readInt32());
+              }
+              input.popLimit(limit);
+              break;
+            }
+            case 88: {
+              if (!((mutable_bitField0_ & 0x00000010) == 0x00000010)) {
+                weakDependency_ = new java.util.ArrayList<java.lang.Integer>();
+                mutable_bitField0_ |= 0x00000010;
+              }
+              weakDependency_.add(input.readInt32());
+              break;
+            }
+            case 90: {
+              int length = input.readRawVarint32();
+              int limit = input.pushLimit(length);
+              if (!((mutable_bitField0_ & 0x00000010) == 0x00000010) && input.getBytesUntilLimit() > 0) {
+                weakDependency_ = new java.util.ArrayList<java.lang.Integer>();
+                mutable_bitField0_ |= 0x00000010;
+              }
+              while (input.getBytesUntilLimit() > 0) {
+                weakDependency_.add(input.readInt32());
+              }
+              input.popLimit(limit);
+              break;
+            }
+            case 98: {
+              org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes();
+              bitField0_ |= 0x00000010;
+              syntax_ = bs;
+              break;
+            }
+          }
+        }
+      } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
+        throw e.setUnfinishedMessage(this);
+      } catch (java.io.IOException e) {
+        throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
+            e).setUnfinishedMessage(this);
+      } finally {
+        if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
+          dependency_ = dependency_.getUnmodifiableView();
+        }
+        if (((mutable_bitField0_ & 0x00000020) == 0x00000020)) {
+          messageType_ = java.util.Collections.unmodifiableList(messageType_);
+        }
+        if (((mutable_bitField0_ & 0x00000040) == 0x00000040)) {
+          enumType_ = java.util.Collections.unmodifiableList(enumType_);
+        }
+        if (((mutable_bitField0_ & 0x00000080) == 0x00000080)) {
+          service_ = java.util.Collections.unmodifiableList(service_);
+        }
+        if (((mutable_bitField0_ & 0x00000100) == 0x00000100)) {
+          extension_ = java.util.Collections.unmodifiableList(extension_);
+        }
+        if (((mutable_bitField0_ & 0x00000008) == 0x00000008)) {
+          publicDependency_ = java.util.Collections.unmodifiableList(publicDependency_);
+        }
+        if (((mutable_bitField0_ & 0x00000010) == 0x00000010)) {
+          weakDependency_ = java.util.Collections.unmodifiableList(weakDependency_);
+        }
+        this.unknownFields = unknownFields.build();
+        makeExtensionsImmutable();
+      }
+    }
+    public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_FileDescriptorProto_descriptor;
+    }
+
+    protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_FileDescriptorProto_fieldAccessorTable
+          .ensureFieldAccessorsInitialized(
+              org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto.Builder.class);
+    }
+
+    private int bitField0_;
+    public static final int NAME_FIELD_NUMBER = 1;
+    private volatile java.lang.Object name_;
+    /**
+     * <pre>
+     * file name, relative to root of source tree
+     * </pre>
+     *
+     * <code>optional string name = 1;</code>
+     */
+    public boolean hasName() {
+      return ((bitField0_ & 0x00000001) == 0x00000001);
+    }
+    /**
+     * <pre>
+     * file name, relative to root of source tree
+     * </pre>
+     *
+     * <code>optional string name = 1;</code>
+     */
+    public java.lang.String getName() {
+      java.lang.Object ref = name_;
+      if (ref instanceof java.lang.String) {
+        return (java.lang.String) ref;
+      } else {
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = 
+            (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
+        java.lang.String s = bs.toStringUtf8();
+        if (bs.isValidUtf8()) {
+          name_ = s;
+        }
+        return s;
+      }
+    }
+    /**
+     * <pre>
+     * file name, relative to root of source tree
+     * </pre>
+     *
+     * <code>optional string name = 1;</code>
+     */
+    public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
+        getNameBytes() {
+      java.lang.Object ref = name_;
+      if (ref instanceof java.lang.String) {
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = 
+            org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
+                (java.lang.String) ref);
+        name_ = b;
+        return b;
+      } else {
+        return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
+      }
+    }
+
+    public static final int PACKAGE_FIELD_NUMBER = 2;
+    private volatile java.lang.Object package_;
+    /**
+     * <pre>
+     * e.g. "foo", "foo.bar", etc.
+     * </pre>
+     *
+     * <code>optional string package = 2;</code>
+     */
+    public boolean hasPackage() {
+      return ((bitField0_ & 0x00000002) == 0x00000002);
+    }
+    /**
+     * <pre>
+     * e.g. "foo", "foo.bar", etc.
+     * </pre>
+     *
+     * <code>optional string package = 2;</code>
+     */
+    public java.lang.String getPackage() {
+      java.lang.Object ref = package_;
+      if (ref instanceof java.lang.String) {
+        return (java.lang.String) ref;
+      } else {
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = 
+            (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
+        java.lang.String s = bs.toStringUtf8();
+        if (bs.isValidUtf8()) {
+          package_ = s;
+        }
+        return s;
+      }
+    }
+    /**
+     * <pre>
+     * e.g. "foo", "foo.bar", etc.
+     * </pre>
+     *
+     * <code>optional string package = 2;</code>
+     */
+    public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
+        getPackageBytes() {
+      java.lang.Object ref = package_;
+      if (ref instanceof java.lang.String) {
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = 
+            org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
+                (java.lang.String) ref);
+        package_ = b;
+        return b;
+      } else {
+        return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
+      }
+    }
+
+    public static final int DEPENDENCY_FIELD_NUMBER = 3;
+    private org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringList dependency_;
+    /**
+     * <pre>
+     * Names of files imported by this file.
+     * </pre>
+     *
+     * <code>repeated string dependency = 3;</code>
+     */
+    public org.apache.hadoop.hbase.shaded.com.google.protobuf.ProtocolStringList
+        getDependencyList() {
+      return dependency_;
+    }
+    /**
+     * <pre>
+     * Names of files imported by this file.
+     * </pre>
+     *
+     * <code>repeated string dependency = 3;</code>
+     */
+    public int getDependencyCount() {
+      return dependency_.size();
+    }
+    /**
+     * <pre>
+     * Names of files imported by this file.
+     * </pre>
+     *
+     * <code>repeated string dependency = 3;</code>
+     */
+    public java.lang.String getDependency(int index) {
+      return dependency_.get(index);
+    }
+    /**
+     * <pre>
+     * Names of files imported by this file.
+     * </pre>
+     *
+     * <code>repeated string dependency = 3;</code>
+     */
+    public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
+        getDependencyBytes(int index) {
+      return dependency_.getByteString(index);
+    }
+
+    public static final int PUBLIC_DEPENDENCY_FIELD_NUMBER = 10;
+    private java.util.List<java.lang.Integer> publicDependency_;
+    /**
+     * <pre>
+     * Indexes of the public imported files in the dependency list above.
+     * </pre>
+     *
+     * <code>repeated int32 public_dependency = 10;</code>
+     */
+    public java.util.List<java.lang.Integer>
+        getPublicDependencyList() {
+      return publicDependency_;
+    }
+    /**
+     * <pre>
+     * Indexes of the public imported files in the dependency list above.
+     * </pre>
+     *
+     * <code>repeated int32 public_dependency = 10;</code>
+     */
+    public int getPublicDependencyCount() {
+      return publicDependency_.size();
+    }
+    /**
+     * <pre>
+     * Indexes of the public imported files in the dependency list above.
+     * </pre>
+     *
+     * <code>repeated int32 public_dependency = 10;</code>
+     */
+    public int getPublicDependency(int index) {
+      return publicDependency_.get(index);
+    }
+
+    public static final int WEAK_DEPENDENCY_FIELD_NUMBER = 11;
+    private java.util.List<java.lang.Integer> weakDependency_;
+    /**
+     * <pre>
+     * Indexes of the weak imported files in the dependency list.
+     * For Google-internal migration only. Do not use.
+     * </pre>
+     *
+     * <code>repeated int32 weak_dependency = 11;</code>
+     */
+    public java.util.List<java.lang.Integer>
+        getWeakDependencyList() {
+      return weakDependency_;
+    }
+    /**
+     * <pre>
+     * Indexes of the weak imported files in the dependency list.
+     * For Google-internal migration only. Do not use.
+     * </pre>
+     *
+     * <code>repeated int32 weak_dependency = 11;</code>
+     */
+    public int getWeakDependencyCount() {
+      return weakDependency_.size();
+    }
+    /**
+     * <pre>
+     * Indexes of the weak imported files in the dependency list.
+     * For Google-internal migration only. Do not use.
+     * </pre>
+     *
+     * <code>repeated int32 weak_dependency = 11;</code>
+     */
+    public int getWeakDependency(int index) {
+      return weakDependency_.get(index);
+    }
+
+    public static final int MESSAGE_TYPE_FIELD_NUMBER = 4;
+    private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto> messageType_;
+    /**
+     * <pre>
+     * All top-level definitions in this file.
+     * </pre>
+     *
+     * <code>repeated .google.protobuf.DescriptorProto message_type = 4;</code>
+     */
+    public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto> getMessageTypeList() {
+      return messageType_;
+    }
+    /**
+     * <pre>
+     * All top-level definitions in this file.
+     * </pre>
+     *
+     * <code>repeated .google.protobuf.DescriptorProto message_type = 4;</code>
+     */
+    public java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProtoOrBuilder> 
+        getMessageTypeOrBuilderList() {
+      return messageType_;
+    }
+    /**
+     * <pre>
+     * All top-level definitions in this file.
+     * </pre>
+     *
+     * <code>repeated .google.protobuf.DescriptorProto message_type = 4;</code>
+     */
+    public int getMessageTypeCount() {
+      return messageType_.size();
+    }
+    /**
+     * <pre>
+     * All top-level definitions in this file.
+     * </pre>
+     *
+     * <code>repeated .google.protobuf.DescriptorProto message_type = 4;</code>
+     */
+    public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto getMessageType(int index) {
+      return messageType_.get(index);
+    }
+    /**
+     * <pre>
+     * All top-level definitions in this file.
+     * </pre>
+     *
+     * <code>repeated .google.protobuf.DescriptorProto message_type = 4;</code>
+     */
+    public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProtoOrBuilder getMessageTypeOrBuilder(
+        int index) {
+      return messageType_.get(index);
+    }
+
+    public static final int ENUM_TYPE_FIELD_NUMBER = 5;
+    private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto> enumType_;
+    /**
+     * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 5;</code>
+     */
+    public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto> getEnumTypeList() {
+      return enumType_;
+    }
+    /**
+     * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 5;</code>
+     */
+    public java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProtoOrBuilder> 
+        getEnumTypeOrBuilderList() {
+      return enumType_;
+    }
+    /**
+     * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 5;</code>
+     */
+    public int getEnumTypeCount() {
+      return enumType_.size();
+    }
+    /**
+     * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 5;</code>
+     */
+    public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto getEnumType(int index) {
+      return enumType_.get(index);
+    }
+    /**
+     * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 5;</code>
+     */
+    public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProtoOrBuilder getEnumTypeOrBuilder(
+        int index) {
+      return enumType_.get(index);
+    }
+
+    public static final int SERVICE_FIELD_NUMBER = 6;
+    private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto> service_;
+    /**
+     * <code>repeated .google.protobuf.ServiceDescriptorProto service = 6;</code>
+     */
+    public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto> getServiceList() {
+      return service_;
+    }
+    /**
+     * <code>repeated .google.protobuf.ServiceDescriptorProto service = 6;</code>
+     */
+    public java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProtoOrBuilder> 
+        getServiceOrBuilderList() {
+      return service_;
+    }
+    /**
+     * <code>repeated .google.protobuf.ServiceDescriptorProto service = 6;</code>
+     */
+    public int getServiceCount() {
+      return service_.size();
+    }
+    /**
+     * <code>repeated .google.protobuf.ServiceDescriptorProto service = 6;</code>
+     */
+    public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto getService(int index) {
+      return service_.get(index);
+    }
+    /**
+     * <code>repeated .google.protobuf.ServiceDescriptorProto service = 6;</code>
+     */
+    public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProtoOrBuilder getServiceOrBuilder(
+        int index) {
+      return service_.get(index);
+    }
+
+    public static final int EXTENSION_FIELD_NUMBER = 7;
+    private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto> extension_;
+    /**
+     * <code>repeated .google.protobuf.FieldDescriptorProto extension = 7;</code>
+     */
+    public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto> getExtensionList() {
+      return extension_;
+    }
+    /**
+     * <code>repeated .google.protobuf.FieldDescriptorProto extension = 7;</code>
+     */
+    public java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProtoOrBuilder> 
+        getExtensionOrBuilderList() {
+      return extension_;
+    }
+    /**
+     * <code>repeated .google.protobuf.FieldDescriptorProto extension = 7;</code>
+     */
+    public int getExtensionCount() {
+      return extension_.size();
+    }
+    /**
+     * <code>repeated .google.protobuf.FieldDescriptorProto extension = 7;</code>
+     */
+    public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto getExtension(int index) {
+      return extension_.get(index);
+    }
+    /**
+     * <code>repeated .google.protobuf.FieldDescriptorProto extension = 7;</code>
+     */
+    public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProtoOrBuilder getExtensionOrBuilder(
+        int index) {
+      return extension_.get(index);
+    }
+
+    public static final int OPTIONS_FIELD_NUMBER = 8;
+    private org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions options_;
+    /**
+     * <code>optional .google.protobuf.FileOptions options = 8;</code>
+     */
+    public boolean hasOptions() {
+      return ((bitField0_ & 0x00000004) == 0x00000004);
+    }
+    /**
+     * <code>optional .google.protobuf.FileOptions options = 8;</code>
+     */
+    public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions getOptions() {
+      return options_ == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions.getDefaultInstance() : options_;
+    }
+    /**
+     * <code>optional .google.protobuf.FileOptions options = 8;</code>
+     */
+    public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptionsOrBuilder getOptionsOrBuilder() {
+      return options_ == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions.getDefaultInstance() : options_;
+    }
+
+    public static final int SOURCE_CODE_INFO_FIELD_NUMBER = 9;
+    private org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo sourceCodeInfo_;
+    /**
+     * <pre>
+     * This field contains optional information about the original source code.
+     * You may safely remove this entire field without harming runtime
+     * functionality of the descriptors -- the information is needed only by
+     * development tools.
+     * </pre>
+     *
+     * <code>optional .google.protobuf.SourceCodeInfo source_code_info = 9;</code>
+     */
+    public boolean hasSourceCodeInfo() {
+      return ((bitField0_ & 0x00000008) == 0x00000008);
+    }
+    /**
+     * <pre>
+     * This field contains optional information about the original source code.
+     * You may safely remove this entire field without harming runtime
+     * functionality of the descriptors -- the information is needed only by
+     * development tools.
+     * </pre>
+     *
+     * <code>optional .google.protobuf.SourceCodeInfo source_code_info = 9;</code>
+     */
+    public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo getSourceCodeInfo() {
+      return sourceCodeInfo_ == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.getDefaultInstance() : sourceCodeInfo_;
+    }
+    /**
+     * <pre>
+     * This field contains optional information about the original source code.
+     * You may safely remove this entire field without harming runtime
+     * functionality of the descriptors -- the information is needed only by
+     * development tools.
+     * </pre>
+     *
+     * <code>optional .google.protobuf.SourceCodeInfo source_code_info = 9;</code>
+     */
+    public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfoOrBuilder getSourceCodeInfoOrBuilder() {
+      return sourceCodeInfo_ == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.getDefaultInstance() : sourceCodeInfo_;
+    }
+
+    public static final int SYNTAX_FIELD_NUMBER = 12;
+    private volatile java.lang.Object syntax_;
+    /**
+     * <pre>
+     * The syntax of the proto file.
+     * The supported values are "proto2" and "proto3".
+     * </pre>
+     *
+     * <code>optional string syntax = 12;</code>
+     */
+    public boolean hasSyntax() {
+      return ((bitField0_ & 0x00000010) == 0x00000010);
+    }
+    /**
+     * <pre>
+     * The syntax of the proto file.
+     * The supported values are "proto2" and "proto3".
+     * </pre>
+     *
+     * <code>optional string syntax = 12;</code>
+     */
+    public java.lang.String getSyntax() {
+      java.lang.Object ref = syntax_;
+      if (ref instanceof java.lang.String) {
+        return (java.lang.String) ref;
+      } else {
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = 
+            (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
+        java.lang.String s = bs.toStringUtf8();
+        if (bs.isValidUtf8()) {
+          syntax_ = s;
+        }
+        return s;
+      }
+    }
+    /**
+     * <pre>
+     * The syntax of the proto file.
+     * The supported values are "proto2" and "proto3".
+     * </pre>
+     *
+     * <code>optional string syntax = 12;</code>
+     */
+    public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
+        getSyntaxBytes() {
+      java.lang.Object ref = syntax_;
+      if (ref instanceof java.lang.String) {
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = 
+            org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
+                (java.lang.String) ref);
+        syntax_ = b;
+        return b;
+      } else {
+        return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
+      }
+    }
+
+    private byte memoizedIsInitialized = -1;
+    public final boolean isInitialized() {
+      byte isInitialized = memoizedIsInitialized;
+      if (isInitialized == 1) return true;
+      if (isInitialized == 0) return false;
+
+      for (int i = 0; i < getMessageTypeCount(); i++) {
+        if (!getMessageType(i).isInitialized()) {
+          memoizedIsInitialized = 0;
+          return false;
+        }
+      }
+      for (int i = 0; i < getEnumTypeCount(); i++) {
+        if (!getEnumType(i).isInitialized()) {
+          memoizedIsInitialized = 0;
+          return false;
+        }
+      }
+      for (int i = 0; i < getServiceCount(); i++) {
+        if (!getService(i).isInitialized()) {
+          memoizedIsInitialized = 0;
+          return false;
+        }
+      }
+      for (int i = 0; i < getExtensionCount(); i++) {
+        if (!getExtension(i).isInitialized()) {
+          memoizedIsInitialized = 0;
+          return false;
+        }
+      }
+      if (hasOptions()) {
+        if (!getOptions().isInitialized()) {
+          memoizedIsInitialized = 0;
+          return false;
+        }
+      }
+      memoizedIsInitialized = 1;
+      return true;
+    }
+
+    public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output)
+                        throws java.io.IOException {
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
+      }
+      if (((bitField0_ & 0x00000002) == 0x00000002)) {
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 2, package_);
+      }
+      for (int i = 0; i < dependency_.size(); i++) {
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 3, dependency_.getRaw(i));
+      }
+      for (int i = 0; i < messageType_.size(); i++) {
+        output.writeMessage(4, messageType_.get(i));
+      }
+      for (int i = 0; i < enumType_.size(); i++) {
+        output.writeMessage(5, enumType_.get(i));
+      }
+      for (int i = 0; i < service_.size(); i++) {
+        output.writeMessage(6, service_.get(i));
+      }
+      for (int i = 0; i < extension_.size(); i++) {
+        output.writeMessage(7, extension_.get(i));
+      }
+      if (((bitField0_ & 0x00000004) == 0x00000004)) {
+        output.writeMessage(8, getOptions());
+      }
+      if (((bitField0_ & 0x00000008) == 0x00000008)) {
+        output.writeMessage(9, getSourceCodeInfo());
+      }
+      for (int i = 0; i < publicDependency_.size(); i++) {
+        output.writeInt32(10, publicDependency_.get(i));
+      }
+      for (int i = 0; i < weakDependency_.size(); i++) {
+        output.writeInt32(11, weakDependency_.get(i));
+      }
+      if (((bitField0_ & 0x00000010) == 0x00000010)) {
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 12, syntax_);
+      }
+      unknownFields.writeTo(output);
+    }
+
+    public int getSerializedSize() {
+      int size = memoizedSize;
+      if (size != -1) return size;
+
+      size = 0;
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
+      }
+      if (((bitField0_ & 0x00000002) == 0x00000002)) {
+        size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(2, package_);
+      }
+      {
+        int dataSize = 0;
+        for (int i = 0; i < dependency_.size(); i++) {
+          dataSize += computeStringSizeNoTag(dependency_.getRaw(i));
+        }
+        size += dataSize;
+        size += 1 * getDependencyList().size();
+      }
+      for (int i = 0; i < messageType_.size(); i++) {
+        size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
+          .computeMessageSize(4, messageType_.get(i));
+      }
+      for (int i = 0; i < enumType_.size(); i++) {
+        size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
+          .computeMessageSize(5, enumType_.get(i));
+      }
+      for (int i = 0; i < service_.size(); i++) {
+        size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
+          .computeMessageSize(6, service_.get(i));
+      }
+      for (int i = 0; i < extension_.size(); i++) {
+        size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
+          .computeMessageSize(7, extension_.get(i));
+      }
+      if (((bitField0_ & 0x00000004) == 0x00000004)) {
+        size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
+          .computeMessageSize(8, getOptions());
+      }
+      if (((bitField0_ & 0x00000008) == 0x00000008)) {
+        size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
+          .computeMessageSize(9, getSourceCodeInfo());
+      }
+      {
+        int dataSize = 0;
+        for (int i = 0; i < publicDependency_.size(); i++) {
+          dataSize += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
+            .computeInt32SizeNoTag(publicDependency_.get(i));
+        }
+        size += dataSize;
+        size += 1 * getPublicDependencyList().size();
+      }
+      {
+        int dataSize = 0;
+        for (int i = 0; i < weakDependency_.size(); i++) {
+          dataSize += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
+            .computeInt32SizeNoTag(weakDependency_.get(i));
+        }
+        size += dataSize;
+        size += 1 * getWeakDependencyList().size();
+      }
+      if (((bitField0_ & 0x00000010) == 0x00000010)) {
+        size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(12, syntax_);
+      }
+      size += unknownFields.getSerializedSize();
+      memoizedSize = size;
+      return size;
+    }
+
+    private static final long serialVersionUID = 0L;
+    @java.lang.Override
+    public boolean equals(final java.lang.Object obj) {
+      if (obj == this) {
+       return true;
+      }
+      if (!(obj instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto)) {
+        return super.equals(obj);
+      }
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto other = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto) obj;
+
+      boolean result = true;
+      result = result && (hasName() == other.hasName());
+      if (hasName()) {
+        result = result && getName()
+            .equals(other.getName());
+      }
+      result = result && (hasPackage() == other.hasPackage());
+      if (hasPackage()) {
+        result = result && getPackage()
+            .equals(other.getPackage());
+      }
+      result = result && getDependencyList()
+          .equals(other.getDependencyList());
+      result = result && getPublicDependencyList()
+          .equals(other.getPublicDependencyList());
+      result = result && getWeakDependencyList()
+          .equals(other.getWeakDependencyList());
+      result = result && getMessageTypeList()
+          .equals(other.getMessageTypeList());
+      result = result && getEnumTypeList()
+          .equals(other.getEnumTypeList());
+      result = result && getServiceList()
+          .equals(other.getServiceList());
+      result = result && getExtensionList()
+          .equals(other.getExtensionList());
+      result = result && (hasOptions() == other.hasOptions());
+      if (hasOptions()) {
+        result = result && getOptions()
+            .equals(other.getOptions());
+      }
+      result = result && (hasSourceCodeInfo() == other.hasSourceCodeInfo());
+      if (hasSourceCodeInfo()) {
+        result = result && getSourceCodeInfo()
+            .equals(other.getSourceCodeInfo());
+      }
+      result = result && (hasSyntax() == other.hasSyntax());
+      if (hasSyntax()) {
+        result = result && getSyntax()
+            .equals(other.getSyntax());
+      }
+      result = result && unknownFields.equals(other.unknownFields);
+      return result;
+    }
+
+    @java.lang.Override
+    public int hashCode() {
+      if (memoizedHashCode != 0) {
+        return memoizedHashCode;
+      }
+      int hash = 41;
+      hash = (19 * hash) + getDescriptor().hashCode();
+      if (hasName()) {
+        hash = (37 * hash) + NAME_FIELD_NUMBER;
+        hash = (53 * hash) + getName().hashCode();
+      }
+      if (hasPackage()) {
+        hash = (37 * hash) + PACKAGE_FIELD_NUMBER;
+        hash = (53 * hash) + getPackage().hashCode();
+      }
+      if (getDependencyCount() > 0) {
+        hash = (37 * hash) + DEPENDENCY_FIELD_NUMBER;
+        hash = (53 * hash) + getDependencyList().hashCode();
+      }
+      if (getPublicDependencyCount() > 0) {
+        hash = (37 * hash) + PUBLIC_DEPENDENCY_FIELD_NUMBER;
+        hash = (53 * hash) + getPublicDependencyList().hashCode();
+      }
+      if (getWeakDependencyCount() > 0) {
+        hash = (37 * hash) + WEAK_DEPENDENCY_FIELD_NUMBER;
+        hash = (53 * hash) + getWeakDependencyList().hashCode();
+      }
+      if (getMessageTypeCount() > 0) {
+        hash = (37 * hash) + MESSAGE_TYPE_FIELD_NUMBER;
+        hash = (53 * hash) + getMessageTypeList().hashCode();
+      }
+      if (getEnumTypeCount() > 0) {
+        hash = (37 * hash) + ENUM_TYPE_FIELD_NUMBER;
+        hash = (53 * hash) + getEnumTypeList().hashCode();
+      }
+      if (getServiceCount() > 0) {
+        hash = (37 * hash) + SERVICE_FIELD_NUMBER;
+        hash = (53 * hash) + getServiceList().hashCode();
+      }
+      if (getExtensionCount() > 0) {
+        hash = (37 * hash) + EXTENSION_FIELD_NUMBER;
+        hash = (53 * hash) + getExtensionList().hashCode();
+      }
+      if (hasOptions()) {
+        hash = (37 * hash) + OPTIONS_FIELD_NUMBER;
+        hash = (53 * hash) + getOptions().hashCode();
+      }
+      if (hasSourceCodeInfo()) {
+        hash = (37 * hash) + SOURCE_CODE_INFO_FIELD_NUMBER;
+        hash = (53 * hash) + getSourceCodeInfo().hashCode();
+      }
+      if (hasSyntax()) {
+        hash = (37 * hash) + SYNTAX_FIELD_NUMBER;
+        hash = (53 * hash) + getSyntax().hashCode();
+      }
+      hash = (29 * hash) + unknownFields.hashCode();
+      memoizedHashCode = hash;
+      return hash;
+    }
+
+    public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto parseFrom(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data)
+        throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto parseFrom(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data,
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto parseFrom(byte[] data)
+        throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto parseFrom(
+        byte[] data,
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto parseFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input);
+    }
+    public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto parseFrom(
+        java.io.InputStream input,
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input, extensionRegistry);
+    }
+    public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto parseDelimitedFrom(jav

<TRUNCATED>

Mime
View raw message