hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From st...@apache.org
Subject hbase git commit: Revert "HBASE-16993 BucketCache throw java.io.IOException: Invalid HFile block magic when DATA_BLOCK_ENCODING set to DIFF" Revert of premature push.
Date Thu, 10 Nov 2016 01:08:33 GMT
Repository: hbase
Updated Branches:
  refs/heads/master 1462cf77e -> 8192a6b6e


Revert "HBASE-16993 BucketCache throw java.io.IOException: Invalid HFile block magic when
DATA_BLOCK_ENCODING set to DIFF"
Revert of premature push.

This reverts commit de3a51263d16b14c334b44306751b16b59173b86.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/8192a6b6
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/8192a6b6
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/8192a6b6

Branch: refs/heads/master
Commit: 8192a6b6ee16676662ca05aa83e6eea62da08533
Parents: 1462cf7
Author: Michael Stack <stack@apache.org>
Authored: Wed Nov 9 17:08:14 2016 -0800
Committer: Michael Stack <stack@apache.org>
Committed: Wed Nov 9 17:08:14 2016 -0800

----------------------------------------------------------------------
 .../protobuf/generated/BucketCacheProtos.java   | 565 -------------------
 .../src/main/protobuf/BucketCache.proto         |  33 --
 hbase-server/pom.xml                            |   4 -
 .../hbase/io/hfile/bucket/BucketCache.java      |  81 +--
 .../hbase/io/hfile/bucket/TestBucketCache.java  |  45 +-
 5 files changed, 43 insertions(+), 685 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/8192a6b6/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/BucketCacheProtos.java
----------------------------------------------------------------------
diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/BucketCacheProtos.java
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/BucketCacheProtos.java
deleted file mode 100644
index d86cd38..0000000
--- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/BucketCacheProtos.java
+++ /dev/null
@@ -1,565 +0,0 @@
-// Generated by the protocol buffer compiler.  DO NOT EDIT!
-// source: BucketCache.proto
-
-package org.apache.hadoop.hbase.shaded.protobuf.generated;
-
-public final class BucketCacheProtos {
-  private BucketCacheProtos() {}
-  public static void registerAllExtensions(
-      org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite registry)
{
-  }
-
-  public static void registerAllExtensions(
-      org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistry registry) {
-    registerAllExtensions(
-        (org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite) registry);
-  }
-  public interface PersistedCacheMetadataOrBuilder extends
-      // @@protoc_insertion_point(interface_extends:hbase.pb.PersistedCacheMetadata)
-      org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder {
-
-    /**
-     * <pre>
-     * Set version to be zero
-     * </pre>
-     *
-     * <code>optional uint32 version = 1 [default = 0];</code>
-     */
-    boolean hasVersion();
-    /**
-     * <pre>
-     * Set version to be zero
-     * </pre>
-     *
-     * <code>optional uint32 version = 1 [default = 0];</code>
-     */
-    int getVersion();
-  }
-  /**
-   * <pre>
-   **
-   * Metadata written out as preamble when we persist cache content.
-   * </pre>
-   *
-   * Protobuf type {@code hbase.pb.PersistedCacheMetadata}
-   */
-  public  static final class PersistedCacheMetadata extends
-      org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements
-      // @@protoc_insertion_point(message_implements:hbase.pb.PersistedCacheMetadata)
-      PersistedCacheMetadataOrBuilder {
-    // Use PersistedCacheMetadata.newBuilder() to construct.
-    private PersistedCacheMetadata(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?>
builder) {
-      super(builder);
-    }
-    private PersistedCacheMetadata() {
-      version_ = 0;
-    }
-
-    @java.lang.Override
-    public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet
-    getUnknownFields() {
-      return this.unknownFields;
-    }
-    private PersistedCacheMetadata(
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException
{
-      this();
-      int mutable_bitField0_ = 0;
-      org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields
=
-          org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder();
-      try {
-        boolean done = false;
-        while (!done) {
-          int tag = input.readTag();
-          switch (tag) {
-            case 0:
-              done = true;
-              break;
-            default: {
-              if (!parseUnknownField(input, unknownFields,
-                                     extensionRegistry, tag)) {
-                done = true;
-              }
-              break;
-            }
-            case 8: {
-              bitField0_ |= 0x00000001;
-              version_ = input.readUInt32();
-              break;
-            }
-          }
-        }
-      } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException
e) {
-        throw e.setUnfinishedMessage(this);
-      } catch (java.io.IOException e) {
-        throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
-            e).setUnfinishedMessage(this);
-      } finally {
-        this.unknownFields = unknownFields.build();
-        makeExtensionsImmutable();
-      }
-    }
-    public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
-        getDescriptor() {
-      return org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.internal_static_hbase_pb_PersistedCacheMetadata_descriptor;
-    }
-
-    protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
-        internalGetFieldAccessorTable() {
-      return org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.internal_static_hbase_pb_PersistedCacheMetadata_fieldAccessorTable
-          .ensureFieldAccessorsInitialized(
-              org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.PersistedCacheMetadata.class,
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.PersistedCacheMetadata.Builder.class);
-    }
-
-    private int bitField0_;
-    public static final int VERSION_FIELD_NUMBER = 1;
-    private int version_;
-    /**
-     * <pre>
-     * Set version to be zero
-     * </pre>
-     *
-     * <code>optional uint32 version = 1 [default = 0];</code>
-     */
-    public boolean hasVersion() {
-      return ((bitField0_ & 0x00000001) == 0x00000001);
-    }
-    /**
-     * <pre>
-     * Set version to be zero
-     * </pre>
-     *
-     * <code>optional uint32 version = 1 [default = 0];</code>
-     */
-    public int getVersion() {
-      return version_;
-    }
-
-    private byte memoizedIsInitialized = -1;
-    public final boolean isInitialized() {
-      byte isInitialized = memoizedIsInitialized;
-      if (isInitialized == 1) return true;
-      if (isInitialized == 0) return false;
-
-      memoizedIsInitialized = 1;
-      return true;
-    }
-
-    public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
output)
-                        throws java.io.IOException {
-      if (((bitField0_ & 0x00000001) == 0x00000001)) {
-        output.writeUInt32(1, version_);
-      }
-      unknownFields.writeTo(output);
-    }
-
-    public int getSerializedSize() {
-      int size = memoizedSize;
-      if (size != -1) return size;
-
-      size = 0;
-      if (((bitField0_ & 0x00000001) == 0x00000001)) {
-        size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
-          .computeUInt32Size(1, version_);
-      }
-      size += unknownFields.getSerializedSize();
-      memoizedSize = size;
-      return size;
-    }
-
-    private static final long serialVersionUID = 0L;
-    @java.lang.Override
-    public boolean equals(final java.lang.Object obj) {
-      if (obj == this) {
-       return true;
-      }
-      if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.PersistedCacheMetadata))
{
-        return super.equals(obj);
-      }
-      org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.PersistedCacheMetadata
other = (org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.PersistedCacheMetadata)
obj;
-
-      boolean result = true;
-      result = result && (hasVersion() == other.hasVersion());
-      if (hasVersion()) {
-        result = result && (getVersion()
-            == other.getVersion());
-      }
-      result = result && unknownFields.equals(other.unknownFields);
-      return result;
-    }
-
-    @java.lang.Override
-    public int hashCode() {
-      if (memoizedHashCode != 0) {
-        return memoizedHashCode;
-      }
-      int hash = 41;
-      hash = (19 * hash) + getDescriptorForType().hashCode();
-      if (hasVersion()) {
-        hash = (37 * hash) + VERSION_FIELD_NUMBER;
-        hash = (53 * hash) + getVersion();
-      }
-      hash = (29 * hash) + unknownFields.hashCode();
-      memoizedHashCode = hash;
-      return hash;
-    }
-
-    public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.PersistedCacheMetadata
parseFrom(
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data)
-        throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException
{
-      return PARSER.parseFrom(data);
-    }
-    public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.PersistedCacheMetadata
parseFrom(
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data,
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException
{
-      return PARSER.parseFrom(data, extensionRegistry);
-    }
-    public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.PersistedCacheMetadata
parseFrom(byte[] data)
-        throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException
{
-      return PARSER.parseFrom(data);
-    }
-    public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.PersistedCacheMetadata
parseFrom(
-        byte[] data,
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException
{
-      return PARSER.parseFrom(data, extensionRegistry);
-    }
-    public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.PersistedCacheMetadata
parseFrom(java.io.InputStream input)
-        throws java.io.IOException {
-      return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
-          .parseWithIOException(PARSER, input);
-    }
-    public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.PersistedCacheMetadata
parseFrom(
-        java.io.InputStream input,
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws java.io.IOException {
-      return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
-          .parseWithIOException(PARSER, input, extensionRegistry);
-    }
-    public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.PersistedCacheMetadata
parseDelimitedFrom(java.io.InputStream input)
-        throws java.io.IOException {
-      return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
-          .parseDelimitedWithIOException(PARSER, input);
-    }
-    public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.PersistedCacheMetadata
parseDelimitedFrom(
-        java.io.InputStream input,
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws java.io.IOException {
-      return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
-          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
-    }
-    public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.PersistedCacheMetadata
parseFrom(
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input)
-        throws java.io.IOException {
-      return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
-          .parseWithIOException(PARSER, input);
-    }
-    public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.PersistedCacheMetadata
parseFrom(
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws java.io.IOException {
-      return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
-          .parseWithIOException(PARSER, input, extensionRegistry);
-    }
-
-    public Builder newBuilderForType() { return newBuilder(); }
-    public static Builder newBuilder() {
-      return DEFAULT_INSTANCE.toBuilder();
-    }
-    public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.PersistedCacheMetadata
prototype) {
-      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
-    }
-    public Builder toBuilder() {
-      return this == DEFAULT_INSTANCE
-          ? new Builder() : new Builder().mergeFrom(this);
-    }
-
-    @java.lang.Override
-    protected Builder newBuilderForType(
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent
parent) {
-      Builder builder = new Builder(parent);
-      return builder;
-    }
-    /**
-     * <pre>
-     **
-     * Metadata written out as preamble when we persist cache content.
-     * </pre>
-     *
-     * Protobuf type {@code hbase.pb.PersistedCacheMetadata}
-     */
-    public static final class Builder extends
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
-        // @@protoc_insertion_point(builder_implements:hbase.pb.PersistedCacheMetadata)
-        org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.PersistedCacheMetadataOrBuilder
{
-      public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
-          getDescriptor() {
-        return org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.internal_static_hbase_pb_PersistedCacheMetadata_descriptor;
-      }
-
-      protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
-          internalGetFieldAccessorTable() {
-        return org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.internal_static_hbase_pb_PersistedCacheMetadata_fieldAccessorTable
-            .ensureFieldAccessorsInitialized(
-                org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.PersistedCacheMetadata.class,
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.PersistedCacheMetadata.Builder.class);
-      }
-
-      // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.PersistedCacheMetadata.newBuilder()
-      private Builder() {
-        maybeForceBuilderInitialization();
-      }
-
-      private Builder(
-          org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent
parent) {
-        super(parent);
-        maybeForceBuilderInitialization();
-      }
-      private void maybeForceBuilderInitialization() {
-        if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
-                .alwaysUseFieldBuilders) {
-        }
-      }
-      public Builder clear() {
-        super.clear();
-        version_ = 0;
-        bitField0_ = (bitField0_ & ~0x00000001);
-        return this;
-      }
-
-      public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
-          getDescriptorForType() {
-        return org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.internal_static_hbase_pb_PersistedCacheMetadata_descriptor;
-      }
-
-      public org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.PersistedCacheMetadata
getDefaultInstanceForType() {
-        return org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.PersistedCacheMetadata.getDefaultInstance();
-      }
-
-      public org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.PersistedCacheMetadata
build() {
-        org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.PersistedCacheMetadata
result = buildPartial();
-        if (!result.isInitialized()) {
-          throw newUninitializedMessageException(result);
-        }
-        return result;
-      }
-
-      public org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.PersistedCacheMetadata
buildPartial() {
-        org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.PersistedCacheMetadata
result = new org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.PersistedCacheMetadata(this);
-        int from_bitField0_ = bitField0_;
-        int to_bitField0_ = 0;
-        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
-          to_bitField0_ |= 0x00000001;
-        }
-        result.version_ = version_;
-        result.bitField0_ = to_bitField0_;
-        onBuilt();
-        return result;
-      }
-
-      public Builder clone() {
-        return (Builder) super.clone();
-      }
-      public Builder setField(
-          org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor
field,
-          Object value) {
-        return (Builder) super.setField(field, value);
-      }
-      public Builder clearField(
-          org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor
field) {
-        return (Builder) super.clearField(field);
-      }
-      public Builder clearOneof(
-          org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor
oneof) {
-        return (Builder) super.clearOneof(oneof);
-      }
-      public Builder setRepeatedField(
-          org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor
field,
-          int index, Object value) {
-        return (Builder) super.setRepeatedField(field, index, value);
-      }
-      public Builder addRepeatedField(
-          org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor
field,
-          Object value) {
-        return (Builder) super.addRepeatedField(field, value);
-      }
-      public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message
other) {
-        if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.PersistedCacheMetadata)
{
-          return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.PersistedCacheMetadata)other);
-        } else {
-          super.mergeFrom(other);
-          return this;
-        }
-      }
-
-      public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.PersistedCacheMetadata
other) {
-        if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.PersistedCacheMetadata.getDefaultInstance())
return this;
-        if (other.hasVersion()) {
-          setVersion(other.getVersion());
-        }
-        this.mergeUnknownFields(other.unknownFields);
-        onChanged();
-        return this;
-      }
-
-      public final boolean isInitialized() {
-        return true;
-      }
-
-      public Builder mergeFrom(
-          org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
-          org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws java.io.IOException {
-        org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.PersistedCacheMetadata
parsedMessage = null;
-        try {
-          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
-        } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException
e) {
-          parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.PersistedCacheMetadata)
e.getUnfinishedMessage();
-          throw e.unwrapIOException();
-        } finally {
-          if (parsedMessage != null) {
-            mergeFrom(parsedMessage);
-          }
-        }
-        return this;
-      }
-      private int bitField0_;
-
-      private int version_ ;
-      /**
-       * <pre>
-       * Set version to be zero
-       * </pre>
-       *
-       * <code>optional uint32 version = 1 [default = 0];</code>
-       */
-      public boolean hasVersion() {
-        return ((bitField0_ & 0x00000001) == 0x00000001);
-      }
-      /**
-       * <pre>
-       * Set version to be zero
-       * </pre>
-       *
-       * <code>optional uint32 version = 1 [default = 0];</code>
-       */
-      public int getVersion() {
-        return version_;
-      }
-      /**
-       * <pre>
-       * Set version to be zero
-       * </pre>
-       *
-       * <code>optional uint32 version = 1 [default = 0];</code>
-       */
-      public Builder setVersion(int value) {
-        bitField0_ |= 0x00000001;
-        version_ = value;
-        onChanged();
-        return this;
-      }
-      /**
-       * <pre>
-       * Set version to be zero
-       * </pre>
-       *
-       * <code>optional uint32 version = 1 [default = 0];</code>
-       */
-      public Builder clearVersion() {
-        bitField0_ = (bitField0_ & ~0x00000001);
-        version_ = 0;
-        onChanged();
-        return this;
-      }
-      public final Builder setUnknownFields(
-          final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields)
{
-        return super.setUnknownFields(unknownFields);
-      }
-
-      public final Builder mergeUnknownFields(
-          final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields)
{
-        return super.mergeUnknownFields(unknownFields);
-      }
-
-
-      // @@protoc_insertion_point(builder_scope:hbase.pb.PersistedCacheMetadata)
-    }
-
-    // @@protoc_insertion_point(class_scope:hbase.pb.PersistedCacheMetadata)
-    private static final org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.PersistedCacheMetadata
DEFAULT_INSTANCE;
-    static {
-      DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.PersistedCacheMetadata();
-    }
-
-    public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.PersistedCacheMetadata
getDefaultInstance() {
-      return DEFAULT_INSTANCE;
-    }
-
-    @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<PersistedCacheMetadata>
-        PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<PersistedCacheMetadata>()
{
-      public PersistedCacheMetadata parsePartialFrom(
-          org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
-          org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException
{
-          return new PersistedCacheMetadata(input, extensionRegistry);
-      }
-    };
-
-    public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<PersistedCacheMetadata>
parser() {
-      return PARSER;
-    }
-
-    @java.lang.Override
-    public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<PersistedCacheMetadata>
getParserForType() {
-      return PARSER;
-    }
-
-    public org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.PersistedCacheMetadata
getDefaultInstanceForType() {
-      return DEFAULT_INSTANCE;
-    }
-
-  }
-
-  private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
-    internal_static_hbase_pb_PersistedCacheMetadata_descriptor;
-  private static final 
-    org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
-      internal_static_hbase_pb_PersistedCacheMetadata_fieldAccessorTable;
-
-  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor
-      getDescriptor() {
-    return descriptor;
-  }
-  private static  org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor
-      descriptor;
-  static {
-    java.lang.String[] descriptorData = {
-      "\n\021BucketCache.proto\022\010hbase.pb\",\n\026Persist" +
-      "edCacheMetadata\022\022\n\007version\030\001 \001(\r:\0010BK\n1o" +
-      "rg.apache.hadoop.hbase.shaded.protobuf.g" +
-      "eneratedB\021BucketCacheProtosH\001\240\001\001"
-    };
-    org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner
assigner =
-        new org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor.
   InternalDescriptorAssigner() {
-          public org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistry assignDescriptors(
-              org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor
root) {
-            descriptor = root;
-            return null;
-          }
-        };
-    org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor
-      .internalBuildGeneratedFileFrom(descriptorData,
-        new org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor[]
{
-        }, assigner);
-    internal_static_hbase_pb_PersistedCacheMetadata_descriptor =
-      getDescriptor().getMessageTypes().get(0);
-    internal_static_hbase_pb_PersistedCacheMetadata_fieldAccessorTable = new
-      org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
-        internal_static_hbase_pb_PersistedCacheMetadata_descriptor,
-        new java.lang.String[] { "Version", });
-  }
-
-  // @@protoc_insertion_point(outer_class_scope)
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/8192a6b6/hbase-protocol-shaded/src/main/protobuf/BucketCache.proto
----------------------------------------------------------------------
diff --git a/hbase-protocol-shaded/src/main/protobuf/BucketCache.proto b/hbase-protocol-shaded/src/main/protobuf/BucketCache.proto
deleted file mode 100644
index ebde19a..0000000
--- a/hbase-protocol-shaded/src/main/protobuf/BucketCache.proto
+++ /dev/null
@@ -1,33 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-// This file contains protocol buffers that are shared throughout HBase
-package hbase.pb;
-
-option java_package = "org.apache.hadoop.hbase.shaded.protobuf.generated";
-option java_outer_classname = "BucketCacheProtos";
-option java_generate_equals_and_hash = true;
-option optimize_for = SPEED;
-
-/**
- * Metadata written out as preamble when we persist cache content.
- */
-message PersistedCacheMetadata {
-  // Set version to be zero
-  optional uint32 version = 1 [default = 0];
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/8192a6b6/hbase-server/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-server/pom.xml b/hbase-server/pom.xml
index 2140a43..0bdee40 100644
--- a/hbase-server/pom.xml
+++ b/hbase-server/pom.xml
@@ -354,10 +354,6 @@
     </dependency>
     <dependency>
       <groupId>org.apache.hbase</groupId>
-      <artifactId>hbase-protocol-shaded</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hbase</groupId>
       <artifactId>hbase-protocol</artifactId>
     </dependency>
     <dependency>

http://git-wip-us.apache.org/repos/asf/hbase/blob/8192a6b6/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
index 7033b96..a36423e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
@@ -53,7 +53,6 @@ import java.util.concurrent.locks.ReentrantReadWriteLock;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hbase.HBaseIOException;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.io.HeapSize;
 import org.apache.hadoop.hbase.io.hfile.BlockCache;
@@ -69,9 +68,6 @@ import org.apache.hadoop.hbase.io.hfile.CacheableDeserializerIdManager;
 import org.apache.hadoop.hbase.io.hfile.CachedBlock;
 import org.apache.hadoop.hbase.io.hfile.HFileBlock;
 import org.apache.hadoop.hbase.nio.ByteBuff;
-import org.apache.hadoop.hbase.protobuf.ProtobufMagic;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos;
-import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 import org.apache.hadoop.hbase.util.HasThread;
 import org.apache.hadoop.hbase.util.IdReadWriteLock;
@@ -252,11 +248,9 @@ public class BucketCache implements BlockCache, HeapSize {
       try {
         retrieveFromFile(bucketSizes);
       } catch (IOException ioex) {
-        LOG.error("Can't restore cache from persisted file " + persistencePath +
-          "; file removed; cache is cold!", ioex);
+        LOG.error("Can't restore from file because of", ioex);
       } catch (ClassNotFoundException cnfe) {
-        LOG.error("Can't restore cache from persisted file in rebuild "
-            + "because can't deserialise; file removed; cache is cold!", cnfe);
+        LOG.error("Can't restore from file in rebuild because can't deserialise",cnfe);
         throw new RuntimeException(cnfe);
       }
     }
@@ -951,11 +945,6 @@ public class BucketCache implements BlockCache, HeapSize {
     return receptacle;
   }
 
-  /**
-   * The current version of the persisted cache file.
-   */
-  private static final int PERSISTED_CACHE_VERSION = 0;
-
   private void persistToFile() throws IOException {
     assert !cacheEnabled;
     FileOutputStream fos = null;
@@ -965,13 +954,6 @@ public class BucketCache implements BlockCache, HeapSize {
         throw new IOException("Attempt to persist non-persistent cache mappings!");
       }
       fos = new FileOutputStream(persistencePath, false);
-      // Write out a metdata protobuf block in case we change format at later date, etc.
-      // Add our magic as preamble.
-      fos.write(ProtobufMagic.PB_MAGIC, 0, ProtobufMagic.lengthOfPBMagic());
-      BucketCacheProtos.PersistedCacheMetadata metadata =
-          BucketCacheProtos.PersistedCacheMetadata.newBuilder().
-          setVersion(PERSISTED_CACHE_VERSION).build();
-      metadata.writeDelimitedTo(fos);
       oos = new ObjectOutputStream(fos);
       oos.writeLong(cacheCapacity);
       oos.writeUTF(ioEngine.getClass().getName());
@@ -984,12 +966,9 @@ public class BucketCache implements BlockCache, HeapSize {
     }
   }
 
-  /**
-   * @see #persistToFile()
-   */
   @SuppressWarnings("unchecked")
-  private void retrieveFromFile(int[] bucketSizes)
-  throws IOException, BucketAllocatorException, ClassNotFoundException {
+  private void retrieveFromFile(int[] bucketSizes) throws IOException, BucketAllocatorException,
+      ClassNotFoundException {
     File persistenceFile = new File(persistencePath);
     if (!persistenceFile.exists()) {
       return;
@@ -998,35 +977,10 @@ public class BucketCache implements BlockCache, HeapSize {
     FileInputStream fis = null;
     ObjectInputStream ois = null;
     try {
-      if (!ioEngine.isPersistent()) {
-        throw new IOException("Attempt to restore non-persistent cache mappings!");
-      }
+      if (!ioEngine.isPersistent())
+        throw new IOException(
+            "Attempt to restore non-persistent cache mappings!");
       fis = new FileInputStream(persistencePath);
-      // Read protobuf magic and then metadata. See persistToFile for where we wrote
-      // out metadata for format.
-      byte [] pbmagic = new byte [ProtobufMagic.lengthOfPBMagic()];
-      int len = fis.read(pbmagic, 0, pbmagic.length);
-      if (len != pbmagic.length || !ProtobufMagic.isPBMagicPrefix(pbmagic)) {
-        // Throw exception. In finally we remove the file ALWAYS.
-        throw new HBaseIOException("Failed read of protobuf magic ("
-            + Bytes.toString(pbmagic)+ "); old format (HBASE-16993)? "
-            + "Failed read of persisted cache file=" + persistencePath);
-      }
-      BucketCacheProtos.PersistedCacheMetadata metadata = null;
-      try {
-        metadata =
-            BucketCacheProtos.PersistedCacheMetadata.parseDelimitedFrom(fis);
-      } catch (IOException e) {
-        // Throw exception if failed parse. In finally we remove the
-        throw new HBaseIOException("Failed read of persisted cache metadata file=" +
-            persistencePath, e);
-      }
-      if (metadata.getVersion() != PERSISTED_CACHE_VERSION) {
-        throw new HBaseIOException("Unexpected version of persisted cache metadata file="
+
-            persistencePath + "; expected=" + PERSISTED_CACHE_VERSION + " but read=" +
-            metadata.getVersion());
-      }
-      // Ok. Read metadata. All seems good. Go ahead and pull in the persisted cache.
       ois = new ObjectInputStream(fis);
       long capacitySize = ois.readLong();
       if (capacitySize != cacheCapacity)
@@ -1056,8 +1010,6 @@ public class BucketCache implements BlockCache, HeapSize {
       if (!persistenceFile.delete()) {
         throw new IOException("Failed deleting persistence file "
             + persistenceFile.getAbsolutePath());
-      } else {
-        LOG.info("Deleted persisted cache file " + persistencePath);
       }
     }
   }
@@ -1178,7 +1130,10 @@ public class BucketCache implements BlockCache, HeapSize {
   /**
    * Item in cache. We expect this to be where most memory goes. Java uses 8
    * bytes just for object headers; after this, we want to use as little as
-   * possible.
+   * possible - so we only use 8 bytes, but in order to do so we end up messing
+   * around with all this Java casting stuff. Offset stored as 5 bytes that make
+   * up the long. Doubt we'll see devices this big for ages. Offsets are divided
+   * by 256. So 5 bytes gives us 256TB or so.
    */
   static class BucketEntry implements Serializable {
     private static final long serialVersionUID = -6741504807982257534L;
@@ -1192,14 +1147,15 @@ public class BucketCache implements BlockCache, HeapSize {
       }
     };
 
+    private int offsetBase;
     private int length;
+    private byte offset1;
     byte deserialiserIndex;
     private volatile long accessCounter;
     private BlockPriority priority;
     // Set this when we were not able to forcefully evict the block
     private volatile boolean markedForEvict;
     private AtomicInteger refCount = new AtomicInteger(0);
-    private long offset;
 
     /**
      * Time this block was cached.  Presumes we are created just before we are added to the
cache.
@@ -1217,12 +1173,17 @@ public class BucketCache implements BlockCache, HeapSize {
       }
     }
 
-    long offset() {
-      return this.offset;
+    long offset() { // Java has no unsigned numbers
+      long o = ((long) offsetBase) & 0xFFFFFFFF;
+      o += (((long) (offset1)) & 0xFF) << 32;
+      return o << 8;
     }
 
     private void setOffset(long value) {
-      this.offset = value;
+      assert (value & 0xFF) == 0;
+      value >>= 8;
+      offsetBase = (int) value;
+      offset1 = (byte) (value >> 32);
     }
 
     public int getLength() {

http://git-wip-us.apache.org/repos/asf/hbase/blob/8192a6b6/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCache.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCache.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCache.java
index 976066d..6fe352d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCache.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCache.java
@@ -18,7 +18,8 @@
  */
 package org.apache.hadoop.hbase.io.hfile.bucket;
 
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
 
 import java.io.FileNotFoundException;
 import java.io.IOException;
@@ -59,11 +60,12 @@ public class TestBucketCache {
   @Parameterized.Parameters(name = "{index}: blockSize={0}, bucketSizes={1}")
   public static Iterable<Object[]> data() {
     return Arrays.asList(new Object[][] {
-      {44600, null}, // Random size here and below to demo no need for multiple of 256 (HBASE-16993)
-      {16 * 1024,
-         new int[] { 2 * 1024 + 1024, 4 * 1024 + 1024, 15000, 46000, 49152, 51200, 8 * 1024
+ 1024,
-            16 * 1024 + 1024, 28 * 1024 + 1024, 32 * 1024 + 1024, 64 * 1024 + 1024, 96 *
1024 + 1024,
-            128 * 1024 + 1024 } } });
+        { 8192, null }, // TODO: why is 8k the default blocksize for these tests?
+        {
+            16 * 1024,
+            new int[] { 2 * 1024 + 1024, 4 * 1024 + 1024, 8 * 1024 + 1024, 16 * 1024 + 1024,
+                28 * 1024 + 1024, 32 * 1024 + 1024, 64 * 1024 + 1024, 96 * 1024 + 1024,
+                128 * 1024 + 1024 } } });
   }
 
   @Parameterized.Parameter(0)
@@ -227,11 +229,10 @@ public class TestBucketCache {
     HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
     Path testDir = TEST_UTIL.getDataTestDir();
     TEST_UTIL.getTestFileSystem().mkdirs(testDir);
-    Path persistFile = new Path(testDir, "bucketcache.persist.file");
-    String persistFileStr = persistFile.toString();
-    String engine = "file:/" + persistFile.toString();
-    BucketCache bucketCache = new BucketCache(engine, capacitySize,
-        constructedBlockSize, constructedBlockSizes, writeThreads, writerQLen, persistFileStr);
+
+    BucketCache bucketCache = new BucketCache("file:" + testDir + "/bucket.cache", capacitySize,
+        constructedBlockSize, constructedBlockSizes, writeThreads, writerQLen, testDir
+            + "/bucket.persistence");
     long usedSize = bucketCache.getAllocator().getUsedSize();
     assertTrue(usedSize == 0);
 
@@ -245,28 +246,26 @@ public class TestBucketCache {
     }
     usedSize = bucketCache.getAllocator().getUsedSize();
     assertTrue(usedSize != 0);
-    // Persist cache to file
+    // persist cache to file
     bucketCache.shutdown();
 
-    // Restore cache from file
-    bucketCache = new BucketCache(engine, capacitySize, constructedBlockSize,
-        constructedBlockSizes, writeThreads, writerQLen, persistFileStr);
+    // restore cache from file
+    bucketCache = new BucketCache("file:" + testDir + "/bucket.cache", capacitySize,
+        constructedBlockSize, constructedBlockSizes, writeThreads, writerQLen, testDir
+            + "/bucket.persistence");
     assertEquals(usedSize, bucketCache.getAllocator().getUsedSize());
-    // Assert file is no longer present.
-    assertFalse(TEST_UTIL.getTestFileSystem().exists(persistFile));
     // persist cache to file
     bucketCache.shutdown();
 
     // reconfig buckets sizes, the biggest bucket is small than constructedBlockSize (8k
or 16k)
     // so it can't restore cache from file
-    // Throw in a few random sizes to demo don't have to be multiple of 256 (HBASE-16993)
-    int[] smallBucketSizes = new int[] {2 * 1024 + 1024, 3600, 4 * 1024 + 1024, 47000 };
-    bucketCache = new BucketCache(engine, capacitySize, constructedBlockSize, smallBucketSizes,
-        writeThreads, writerQLen, persistFileStr);
+    int[] smallBucketSizes = new int[] { 2 * 1024 + 1024, 4 * 1024 + 1024 };
+    bucketCache = new BucketCache("file:" + testDir + "/bucket.cache", capacitySize,
+        constructedBlockSize, smallBucketSizes, writeThreads,
+        writerQLen, testDir + "/bucket.persistence");
     assertEquals(0, bucketCache.getAllocator().getUsedSize());
     assertEquals(0, bucketCache.backingMap.size());
-    // The above should have failed reading he cache file and then cleaned it up.
-    assertFalse(TEST_UTIL.getTestFileSystem().exists(persistFile));
+
     TEST_UTIL.cleanupTestDir();
   }
 }


Mime
View raw message