hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ser...@apache.org
Subject svn commit: r1477762 [4/6] - in /hbase/branches/0.95: hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ hbase-common/src/main/java/org/apache/hadoop/hbase/codec/ hbase-common/src/main/resources/ hbase-protocol/src/main/java/org/apache/hadoop...
Date Tue, 30 Apr 2013 18:34:59 GMT
Added: hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/WALProtos.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/WALProtos.java?rev=1477762&view=auto
==============================================================================
--- hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/WALProtos.java (added)
+++ hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/WALProtos.java Tue Apr 30 18:34:58 2013
@@ -0,0 +1,3019 @@
+// Generated by the protocol buffer compiler.  DO NOT EDIT!
+// source: WAL.proto
+
+package org.apache.hadoop.hbase.protobuf.generated;
+
+public final class WALProtos {
+  private WALProtos() {}
+  public static void registerAllExtensions(
+      com.google.protobuf.ExtensionRegistry registry) {
+  }
+  public enum ScopeType
+      implements com.google.protobuf.ProtocolMessageEnum {
+    REPLICATION_SCOPE_LOCAL(0, 0),
+    REPLICATION_SCOPE_GLOBAL(1, 1),
+    ;
+    
+    public static final int REPLICATION_SCOPE_LOCAL_VALUE = 0;
+    public static final int REPLICATION_SCOPE_GLOBAL_VALUE = 1;
+    
+    
+    public final int getNumber() { return value; }
+    
+    public static ScopeType valueOf(int value) {
+      switch (value) {
+        case 0: return REPLICATION_SCOPE_LOCAL;
+        case 1: return REPLICATION_SCOPE_GLOBAL;
+        default: return null;
+      }
+    }
+    
+    public static com.google.protobuf.Internal.EnumLiteMap<ScopeType>
+        internalGetValueMap() {
+      return internalValueMap;
+    }
+    private static com.google.protobuf.Internal.EnumLiteMap<ScopeType>
+        internalValueMap =
+          new com.google.protobuf.Internal.EnumLiteMap<ScopeType>() {
+            public ScopeType findValueByNumber(int number) {
+              return ScopeType.valueOf(number);
+            }
+          };
+    
+    public final com.google.protobuf.Descriptors.EnumValueDescriptor
+        getValueDescriptor() {
+      return getDescriptor().getValues().get(index);
+    }
+    public final com.google.protobuf.Descriptors.EnumDescriptor
+        getDescriptorForType() {
+      return getDescriptor();
+    }
+    public static final com.google.protobuf.Descriptors.EnumDescriptor
+        getDescriptor() {
+      return org.apache.hadoop.hbase.protobuf.generated.WALProtos.getDescriptor().getEnumTypes().get(0);
+    }
+    
+    private static final ScopeType[] VALUES = {
+      REPLICATION_SCOPE_LOCAL, REPLICATION_SCOPE_GLOBAL, 
+    };
+    
+    public static ScopeType valueOf(
+        com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
+      if (desc.getType() != getDescriptor()) {
+        throw new java.lang.IllegalArgumentException(
+          "EnumValueDescriptor is not for this type.");
+      }
+      return VALUES[desc.getIndex()];
+    }
+    
+    private final int index;
+    private final int value;
+    
+    private ScopeType(int index, int value) {
+      this.index = index;
+      this.value = value;
+    }
+    
+    // @@protoc_insertion_point(enum_scope:ScopeType)
+  }
+  
+  public interface WALHeaderOrBuilder
+      extends com.google.protobuf.MessageOrBuilder {
+    
+    // optional bool hasCompression = 1;
+    boolean hasHasCompression();
+    boolean getHasCompression();
+  }
+  public static final class WALHeader extends
+      com.google.protobuf.GeneratedMessage
+      implements WALHeaderOrBuilder {
+    // Use WALHeader.newBuilder() to construct.
+    private WALHeader(Builder builder) {
+      super(builder);
+    }
+    private WALHeader(boolean noInit) {}
+    
+    private static final WALHeader defaultInstance;
+    public static WALHeader getDefaultInstance() {
+      return defaultInstance;
+    }
+    
+    public WALHeader getDefaultInstanceForType() {
+      return defaultInstance;
+    }
+    
+    public static final com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_WALHeader_descriptor;
+    }
+    
+    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_WALHeader_fieldAccessorTable;
+    }
+    
+    private int bitField0_;
+    // optional bool hasCompression = 1;
+    public static final int HASCOMPRESSION_FIELD_NUMBER = 1;
+    private boolean hasCompression_;
+    public boolean hasHasCompression() {
+      return ((bitField0_ & 0x00000001) == 0x00000001);
+    }
+    public boolean getHasCompression() {
+      return hasCompression_;
+    }
+    
+    private void initFields() {
+      hasCompression_ = false;
+    }
+    private byte memoizedIsInitialized = -1;
+    public final boolean isInitialized() {
+      byte isInitialized = memoizedIsInitialized;
+      if (isInitialized != -1) return isInitialized == 1;
+      
+      memoizedIsInitialized = 1;
+      return true;
+    }
+    
+    public void writeTo(com.google.protobuf.CodedOutputStream output)
+                        throws java.io.IOException {
+      getSerializedSize();
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        output.writeBool(1, hasCompression_);
+      }
+      getUnknownFields().writeTo(output);
+    }
+    
+    private int memoizedSerializedSize = -1;
+    public int getSerializedSize() {
+      int size = memoizedSerializedSize;
+      if (size != -1) return size;
+    
+      size = 0;
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeBoolSize(1, hasCompression_);
+      }
+      size += getUnknownFields().getSerializedSize();
+      memoizedSerializedSize = size;
+      return size;
+    }
+    
+    private static final long serialVersionUID = 0L;
+    @java.lang.Override
+    protected java.lang.Object writeReplace()
+        throws java.io.ObjectStreamException {
+      return super.writeReplace();
+    }
+    
+    @java.lang.Override
+    public boolean equals(final java.lang.Object obj) {
+      if (obj == this) {
+       return true;
+      }
+      if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader)) {
+        return super.equals(obj);
+      }
+      org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader other = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader) obj;
+      
+      boolean result = true;
+      result = result && (hasHasCompression() == other.hasHasCompression());
+      if (hasHasCompression()) {
+        result = result && (getHasCompression()
+            == other.getHasCompression());
+      }
+      result = result &&
+          getUnknownFields().equals(other.getUnknownFields());
+      return result;
+    }
+    
+    @java.lang.Override
+    public int hashCode() {
+      int hash = 41;
+      hash = (19 * hash) + getDescriptorForType().hashCode();
+      if (hasHasCompression()) {
+        hash = (37 * hash) + HASCOMPRESSION_FIELD_NUMBER;
+        hash = (53 * hash) + hashBoolean(getHasCompression());
+      }
+      hash = (29 * hash) + getUnknownFields().hashCode();
+      return hash;
+    }
+    
+    public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader parseFrom(
+        com.google.protobuf.ByteString data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return newBuilder().mergeFrom(data).buildParsed();
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader parseFrom(
+        com.google.protobuf.ByteString data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return newBuilder().mergeFrom(data, extensionRegistry)
+               .buildParsed();
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader parseFrom(byte[] data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return newBuilder().mergeFrom(data).buildParsed();
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader parseFrom(
+        byte[] data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return newBuilder().mergeFrom(data, extensionRegistry)
+               .buildParsed();
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader parseFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return newBuilder().mergeFrom(input).buildParsed();
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader parseFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return newBuilder().mergeFrom(input, extensionRegistry)
+               .buildParsed();
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader parseDelimitedFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      Builder builder = newBuilder();
+      if (builder.mergeDelimitedFrom(input)) {
+        return builder.buildParsed();
+      } else {
+        return null;
+      }
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader parseDelimitedFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      Builder builder = newBuilder();
+      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+        return builder.buildParsed();
+      } else {
+        return null;
+      }
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader parseFrom(
+        com.google.protobuf.CodedInputStream input)
+        throws java.io.IOException {
+      return newBuilder().mergeFrom(input).buildParsed();
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader parseFrom(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return newBuilder().mergeFrom(input, extensionRegistry)
+               .buildParsed();
+    }
+    
+    public static Builder newBuilder() { return Builder.create(); }
+    public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader prototype) {
+      return newBuilder().mergeFrom(prototype);
+    }
+    public Builder toBuilder() { return newBuilder(this); }
+    
+    @java.lang.Override
+    protected Builder newBuilderForType(
+        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+      Builder builder = new Builder(parent);
+      return builder;
+    }
+    public static final class Builder extends
+        com.google.protobuf.GeneratedMessage.Builder<Builder>
+       implements org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeaderOrBuilder {
+      public static final com.google.protobuf.Descriptors.Descriptor
+          getDescriptor() {
+        return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_WALHeader_descriptor;
+      }
+      
+      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+          internalGetFieldAccessorTable() {
+        return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_WALHeader_fieldAccessorTable;
+      }
+      
+      // Construct using org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader.newBuilder()
+      private Builder() {
+        maybeForceBuilderInitialization();
+      }
+      
+      private Builder(BuilderParent parent) {
+        super(parent);
+        maybeForceBuilderInitialization();
+      }
+      private void maybeForceBuilderInitialization() {
+        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+        }
+      }
+      private static Builder create() {
+        return new Builder();
+      }
+      
+      public Builder clear() {
+        super.clear();
+        hasCompression_ = false;
+        bitField0_ = (bitField0_ & ~0x00000001);
+        return this;
+      }
+      
+      public Builder clone() {
+        return create().mergeFrom(buildPartial());
+      }
+      
+      public com.google.protobuf.Descriptors.Descriptor
+          getDescriptorForType() {
+        return org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader.getDescriptor();
+      }
+      
+      public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader getDefaultInstanceForType() {
+        return org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader.getDefaultInstance();
+      }
+      
+      public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader build() {
+        org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(result);
+        }
+        return result;
+      }
+      
+      private org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader buildParsed()
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(
+            result).asInvalidProtocolBufferException();
+        }
+        return result;
+      }
+      
+      public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader buildPartial() {
+        org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader result = new org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader(this);
+        int from_bitField0_ = bitField0_;
+        int to_bitField0_ = 0;
+        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+          to_bitField0_ |= 0x00000001;
+        }
+        result.hasCompression_ = hasCompression_;
+        result.bitField0_ = to_bitField0_;
+        onBuilt();
+        return result;
+      }
+      
+      public Builder mergeFrom(com.google.protobuf.Message other) {
+        if (other instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader) {
+          return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader)other);
+        } else {
+          super.mergeFrom(other);
+          return this;
+        }
+      }
+      
+      public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader other) {
+        if (other == org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader.getDefaultInstance()) return this;
+        if (other.hasHasCompression()) {
+          setHasCompression(other.getHasCompression());
+        }
+        this.mergeUnknownFields(other.getUnknownFields());
+        return this;
+      }
+      
+      public final boolean isInitialized() {
+        return true;
+      }
+      
+      public Builder mergeFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          com.google.protobuf.UnknownFieldSet.newBuilder(
+            this.getUnknownFields());
+        while (true) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              this.setUnknownFields(unknownFields.build());
+              onChanged();
+              return this;
+            default: {
+              if (!parseUnknownField(input, unknownFields,
+                                     extensionRegistry, tag)) {
+                this.setUnknownFields(unknownFields.build());
+                onChanged();
+                return this;
+              }
+              break;
+            }
+            case 8: {
+              bitField0_ |= 0x00000001;
+              hasCompression_ = input.readBool();
+              break;
+            }
+          }
+        }
+      }
+      
+      private int bitField0_;
+      
+      // optional bool hasCompression = 1;
+      private boolean hasCompression_ ;
+      public boolean hasHasCompression() {
+        return ((bitField0_ & 0x00000001) == 0x00000001);
+      }
+      public boolean getHasCompression() {
+        return hasCompression_;
+      }
+      public Builder setHasCompression(boolean value) {
+        bitField0_ |= 0x00000001;
+        hasCompression_ = value;
+        onChanged();
+        return this;
+      }
+      public Builder clearHasCompression() {
+        bitField0_ = (bitField0_ & ~0x00000001);
+        hasCompression_ = false;
+        onChanged();
+        return this;
+      }
+      
+      // @@protoc_insertion_point(builder_scope:WALHeader)
+    }
+    
+    static {
+      defaultInstance = new WALHeader(true);
+      defaultInstance.initFields();
+    }
+    
+    // @@protoc_insertion_point(class_scope:WALHeader)
+  }
+  
+  public interface WALKeyOrBuilder
+      extends com.google.protobuf.MessageOrBuilder {
+    
+    // required bytes encodedRegionName = 1;
+    boolean hasEncodedRegionName();
+    com.google.protobuf.ByteString getEncodedRegionName();
+    
+    // required bytes tableName = 2;
+    boolean hasTableName();
+    com.google.protobuf.ByteString getTableName();
+    
+    // required uint64 logSequenceNumber = 3;
+    boolean hasLogSequenceNumber();
+    long getLogSequenceNumber();
+    
+    // required uint64 writeTime = 4;
+    boolean hasWriteTime();
+    long getWriteTime();
+    
+    // optional .UUID clusterId = 5;
+    boolean hasClusterId();
+    org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID getClusterId();
+    org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUIDOrBuilder getClusterIdOrBuilder();
+    
+    // repeated .FamilyScope scopes = 6;
+    java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope> 
+        getScopesList();
+    org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope getScopes(int index);
+    int getScopesCount();
+    java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScopeOrBuilder> 
+        getScopesOrBuilderList();
+    org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScopeOrBuilder getScopesOrBuilder(
+        int index);
+    
+    // optional uint32 followingKvCount = 7;
+    boolean hasFollowingKvCount();
+    int getFollowingKvCount();
+  }
+  public static final class WALKey extends
+      com.google.protobuf.GeneratedMessage
+      implements WALKeyOrBuilder {
+    // Use WALKey.newBuilder() to construct.
+    private WALKey(Builder builder) {
+      super(builder);
+    }
+    private WALKey(boolean noInit) {}
+    
+    private static final WALKey defaultInstance;
+    public static WALKey getDefaultInstance() {
+      return defaultInstance;
+    }
+    
+    public WALKey getDefaultInstanceForType() {
+      return defaultInstance;
+    }
+    
+    public static final com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_WALKey_descriptor;
+    }
+    
+    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_WALKey_fieldAccessorTable;
+    }
+    
+    private int bitField0_;
+    // required bytes encodedRegionName = 1;
+    public static final int ENCODEDREGIONNAME_FIELD_NUMBER = 1;
+    private com.google.protobuf.ByteString encodedRegionName_;
+    public boolean hasEncodedRegionName() {
+      return ((bitField0_ & 0x00000001) == 0x00000001);
+    }
+    public com.google.protobuf.ByteString getEncodedRegionName() {
+      return encodedRegionName_;
+    }
+    
+    // required bytes tableName = 2;
+    public static final int TABLENAME_FIELD_NUMBER = 2;
+    private com.google.protobuf.ByteString tableName_;
+    public boolean hasTableName() {
+      return ((bitField0_ & 0x00000002) == 0x00000002);
+    }
+    public com.google.protobuf.ByteString getTableName() {
+      return tableName_;
+    }
+    
+    // required uint64 logSequenceNumber = 3;
+    public static final int LOGSEQUENCENUMBER_FIELD_NUMBER = 3;
+    private long logSequenceNumber_;
+    public boolean hasLogSequenceNumber() {
+      return ((bitField0_ & 0x00000004) == 0x00000004);
+    }
+    public long getLogSequenceNumber() {
+      return logSequenceNumber_;
+    }
+    
+    // required uint64 writeTime = 4;
+    public static final int WRITETIME_FIELD_NUMBER = 4;
+    private long writeTime_;
+    public boolean hasWriteTime() {
+      return ((bitField0_ & 0x00000008) == 0x00000008);
+    }
+    public long getWriteTime() {
+      return writeTime_;
+    }
+    
+    // optional .UUID clusterId = 5;
+    public static final int CLUSTERID_FIELD_NUMBER = 5;
+    private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID clusterId_;
+    public boolean hasClusterId() {
+      return ((bitField0_ & 0x00000010) == 0x00000010);
+    }
+    public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID getClusterId() {
+      return clusterId_;
+    }
+    public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUIDOrBuilder getClusterIdOrBuilder() {
+      return clusterId_;
+    }
+    
+    // repeated .FamilyScope scopes = 6;
+    public static final int SCOPES_FIELD_NUMBER = 6;
+    private java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope> scopes_;
+    public java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope> getScopesList() {
+      return scopes_;
+    }
+    public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScopeOrBuilder> 
+        getScopesOrBuilderList() {
+      return scopes_;
+    }
+    public int getScopesCount() {
+      return scopes_.size();
+    }
+    public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope getScopes(int index) {
+      return scopes_.get(index);
+    }
+    public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScopeOrBuilder getScopesOrBuilder(
+        int index) {
+      return scopes_.get(index);
+    }
+    
+    // optional uint32 followingKvCount = 7;
+    public static final int FOLLOWINGKVCOUNT_FIELD_NUMBER = 7;
+    private int followingKvCount_;
+    public boolean hasFollowingKvCount() {
+      return ((bitField0_ & 0x00000020) == 0x00000020);
+    }
+    public int getFollowingKvCount() {
+      return followingKvCount_;
+    }
+    
+    private void initFields() {
+      encodedRegionName_ = com.google.protobuf.ByteString.EMPTY;
+      tableName_ = com.google.protobuf.ByteString.EMPTY;
+      logSequenceNumber_ = 0L;
+      writeTime_ = 0L;
+      clusterId_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.getDefaultInstance();
+      scopes_ = java.util.Collections.emptyList();
+      followingKvCount_ = 0;
+    }
+    private byte memoizedIsInitialized = -1;
+    public final boolean isInitialized() {
+      byte isInitialized = memoizedIsInitialized;
+      if (isInitialized != -1) return isInitialized == 1;
+      
+      if (!hasEncodedRegionName()) {
+        memoizedIsInitialized = 0;
+        return false;
+      }
+      if (!hasTableName()) {
+        memoizedIsInitialized = 0;
+        return false;
+      }
+      if (!hasLogSequenceNumber()) {
+        memoizedIsInitialized = 0;
+        return false;
+      }
+      if (!hasWriteTime()) {
+        memoizedIsInitialized = 0;
+        return false;
+      }
+      if (hasClusterId()) {
+        if (!getClusterId().isInitialized()) {
+          memoizedIsInitialized = 0;
+          return false;
+        }
+      }
+      for (int i = 0; i < getScopesCount(); i++) {
+        if (!getScopes(i).isInitialized()) {
+          memoizedIsInitialized = 0;
+          return false;
+        }
+      }
+      memoizedIsInitialized = 1;
+      return true;
+    }
+    
+    public void writeTo(com.google.protobuf.CodedOutputStream output)
+                        throws java.io.IOException {
+      getSerializedSize();
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        output.writeBytes(1, encodedRegionName_);
+      }
+      if (((bitField0_ & 0x00000002) == 0x00000002)) {
+        output.writeBytes(2, tableName_);
+      }
+      if (((bitField0_ & 0x00000004) == 0x00000004)) {
+        output.writeUInt64(3, logSequenceNumber_);
+      }
+      if (((bitField0_ & 0x00000008) == 0x00000008)) {
+        output.writeUInt64(4, writeTime_);
+      }
+      if (((bitField0_ & 0x00000010) == 0x00000010)) {
+        output.writeMessage(5, clusterId_);
+      }
+      for (int i = 0; i < scopes_.size(); i++) {
+        output.writeMessage(6, scopes_.get(i));
+      }
+      if (((bitField0_ & 0x00000020) == 0x00000020)) {
+        output.writeUInt32(7, followingKvCount_);
+      }
+      getUnknownFields().writeTo(output);
+    }
+    
+    private int memoizedSerializedSize = -1;
+    public int getSerializedSize() {
+      int size = memoizedSerializedSize;
+      if (size != -1) return size;
+    
+      size = 0;
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeBytesSize(1, encodedRegionName_);
+      }
+      if (((bitField0_ & 0x00000002) == 0x00000002)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeBytesSize(2, tableName_);
+      }
+      if (((bitField0_ & 0x00000004) == 0x00000004)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeUInt64Size(3, logSequenceNumber_);
+      }
+      if (((bitField0_ & 0x00000008) == 0x00000008)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeUInt64Size(4, writeTime_);
+      }
+      if (((bitField0_ & 0x00000010) == 0x00000010)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeMessageSize(5, clusterId_);
+      }
+      for (int i = 0; i < scopes_.size(); i++) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeMessageSize(6, scopes_.get(i));
+      }
+      if (((bitField0_ & 0x00000020) == 0x00000020)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeUInt32Size(7, followingKvCount_);
+      }
+      size += getUnknownFields().getSerializedSize();
+      memoizedSerializedSize = size;
+      return size;
+    }
+    
+    private static final long serialVersionUID = 0L;
+    @java.lang.Override
+    protected java.lang.Object writeReplace()
+        throws java.io.ObjectStreamException {
+      return super.writeReplace();
+    }
+    
+    @java.lang.Override
+    public boolean equals(final java.lang.Object obj) {
+      if (obj == this) {
+       return true;
+      }
+      if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey)) {
+        return super.equals(obj);
+      }
+      org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey other = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey) obj;
+      
+      boolean result = true;
+      result = result && (hasEncodedRegionName() == other.hasEncodedRegionName());
+      if (hasEncodedRegionName()) {
+        result = result && getEncodedRegionName()
+            .equals(other.getEncodedRegionName());
+      }
+      result = result && (hasTableName() == other.hasTableName());
+      if (hasTableName()) {
+        result = result && getTableName()
+            .equals(other.getTableName());
+      }
+      result = result && (hasLogSequenceNumber() == other.hasLogSequenceNumber());
+      if (hasLogSequenceNumber()) {
+        result = result && (getLogSequenceNumber()
+            == other.getLogSequenceNumber());
+      }
+      result = result && (hasWriteTime() == other.hasWriteTime());
+      if (hasWriteTime()) {
+        result = result && (getWriteTime()
+            == other.getWriteTime());
+      }
+      result = result && (hasClusterId() == other.hasClusterId());
+      if (hasClusterId()) {
+        result = result && getClusterId()
+            .equals(other.getClusterId());
+      }
+      result = result && getScopesList()
+          .equals(other.getScopesList());
+      result = result && (hasFollowingKvCount() == other.hasFollowingKvCount());
+      if (hasFollowingKvCount()) {
+        result = result && (getFollowingKvCount()
+            == other.getFollowingKvCount());
+      }
+      result = result &&
+          getUnknownFields().equals(other.getUnknownFields());
+      return result;
+    }
+    
+    @java.lang.Override
+    public int hashCode() {
+      int hash = 41;
+      hash = (19 * hash) + getDescriptorForType().hashCode();
+      if (hasEncodedRegionName()) {
+        hash = (37 * hash) + ENCODEDREGIONNAME_FIELD_NUMBER;
+        hash = (53 * hash) + getEncodedRegionName().hashCode();
+      }
+      if (hasTableName()) {
+        hash = (37 * hash) + TABLENAME_FIELD_NUMBER;
+        hash = (53 * hash) + getTableName().hashCode();
+      }
+      if (hasLogSequenceNumber()) {
+        hash = (37 * hash) + LOGSEQUENCENUMBER_FIELD_NUMBER;
+        hash = (53 * hash) + hashLong(getLogSequenceNumber());
+      }
+      if (hasWriteTime()) {
+        hash = (37 * hash) + WRITETIME_FIELD_NUMBER;
+        hash = (53 * hash) + hashLong(getWriteTime());
+      }
+      if (hasClusterId()) {
+        hash = (37 * hash) + CLUSTERID_FIELD_NUMBER;
+        hash = (53 * hash) + getClusterId().hashCode();
+      }
+      if (getScopesCount() > 0) {
+        hash = (37 * hash) + SCOPES_FIELD_NUMBER;
+        hash = (53 * hash) + getScopesList().hashCode();
+      }
+      if (hasFollowingKvCount()) {
+        hash = (37 * hash) + FOLLOWINGKVCOUNT_FIELD_NUMBER;
+        hash = (53 * hash) + getFollowingKvCount();
+      }
+      hash = (29 * hash) + getUnknownFields().hashCode();
+      return hash;
+    }
+    
+    public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parseFrom(
+        com.google.protobuf.ByteString data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return newBuilder().mergeFrom(data).buildParsed();
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parseFrom(
+        com.google.protobuf.ByteString data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return newBuilder().mergeFrom(data, extensionRegistry)
+               .buildParsed();
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parseFrom(byte[] data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return newBuilder().mergeFrom(data).buildParsed();
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parseFrom(
+        byte[] data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return newBuilder().mergeFrom(data, extensionRegistry)
+               .buildParsed();
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parseFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return newBuilder().mergeFrom(input).buildParsed();
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parseFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return newBuilder().mergeFrom(input, extensionRegistry)
+               .buildParsed();
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parseDelimitedFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      Builder builder = newBuilder();
+      if (builder.mergeDelimitedFrom(input)) {
+        return builder.buildParsed();
+      } else {
+        return null;
+      }
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parseDelimitedFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      Builder builder = newBuilder();
+      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+        return builder.buildParsed();
+      } else {
+        return null;
+      }
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parseFrom(
+        com.google.protobuf.CodedInputStream input)
+        throws java.io.IOException {
+      return newBuilder().mergeFrom(input).buildParsed();
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parseFrom(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return newBuilder().mergeFrom(input, extensionRegistry)
+               .buildParsed();
+    }
+    
+    public static Builder newBuilder() { return Builder.create(); }
+    public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey prototype) {
+      return newBuilder().mergeFrom(prototype);
+    }
+    public Builder toBuilder() { return newBuilder(this); }
+    
+    @java.lang.Override
+    protected Builder newBuilderForType(
+        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+      Builder builder = new Builder(parent);
+      return builder;
+    }
+    public static final class Builder extends
+        com.google.protobuf.GeneratedMessage.Builder<Builder>
+       implements org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKeyOrBuilder {
+      public static final com.google.protobuf.Descriptors.Descriptor
+          getDescriptor() {
+        return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_WALKey_descriptor;
+      }
+      
+      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+          internalGetFieldAccessorTable() {
+        return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_WALKey_fieldAccessorTable;
+      }
+      
+      // Construct using org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.newBuilder()
+      private Builder() {
+        maybeForceBuilderInitialization();
+      }
+      
+      private Builder(BuilderParent parent) {
+        super(parent);
+        maybeForceBuilderInitialization();
+      }
+      private void maybeForceBuilderInitialization() {
+        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+          getClusterIdFieldBuilder();
+          getScopesFieldBuilder();
+        }
+      }
+      private static Builder create() {
+        return new Builder();
+      }
+      
+      public Builder clear() {
+        super.clear();
+        encodedRegionName_ = com.google.protobuf.ByteString.EMPTY;
+        bitField0_ = (bitField0_ & ~0x00000001);
+        tableName_ = com.google.protobuf.ByteString.EMPTY;
+        bitField0_ = (bitField0_ & ~0x00000002);
+        logSequenceNumber_ = 0L;
+        bitField0_ = (bitField0_ & ~0x00000004);
+        writeTime_ = 0L;
+        bitField0_ = (bitField0_ & ~0x00000008);
+        if (clusterIdBuilder_ == null) {
+          clusterId_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.getDefaultInstance();
+        } else {
+          clusterIdBuilder_.clear();
+        }
+        bitField0_ = (bitField0_ & ~0x00000010);
+        if (scopesBuilder_ == null) {
+          scopes_ = java.util.Collections.emptyList();
+          bitField0_ = (bitField0_ & ~0x00000020);
+        } else {
+          scopesBuilder_.clear();
+        }
+        followingKvCount_ = 0;
+        bitField0_ = (bitField0_ & ~0x00000040);
+        return this;
+      }
+      
+      public Builder clone() {
+        return create().mergeFrom(buildPartial());
+      }
+      
+      public com.google.protobuf.Descriptors.Descriptor
+          getDescriptorForType() {
+        return org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.getDescriptor();
+      }
+      
+      public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey getDefaultInstanceForType() {
+        return org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.getDefaultInstance();
+      }
+      
+      public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey build() {
+        org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(result);
+        }
+        return result;
+      }
+      
+      private org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey buildParsed()
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(
+            result).asInvalidProtocolBufferException();
+        }
+        return result;
+      }
+      
+      public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey buildPartial() {
+        org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey result = new org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey(this);
+        int from_bitField0_ = bitField0_;
+        int to_bitField0_ = 0;
+        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+          to_bitField0_ |= 0x00000001;
+        }
+        result.encodedRegionName_ = encodedRegionName_;
+        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+          to_bitField0_ |= 0x00000002;
+        }
+        result.tableName_ = tableName_;
+        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
+          to_bitField0_ |= 0x00000004;
+        }
+        result.logSequenceNumber_ = logSequenceNumber_;
+        if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
+          to_bitField0_ |= 0x00000008;
+        }
+        result.writeTime_ = writeTime_;
+        if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
+          to_bitField0_ |= 0x00000010;
+        }
+        if (clusterIdBuilder_ == null) {
+          result.clusterId_ = clusterId_;
+        } else {
+          result.clusterId_ = clusterIdBuilder_.build();
+        }
+        if (scopesBuilder_ == null) {
+          if (((bitField0_ & 0x00000020) == 0x00000020)) {
+            scopes_ = java.util.Collections.unmodifiableList(scopes_);
+            bitField0_ = (bitField0_ & ~0x00000020);
+          }
+          result.scopes_ = scopes_;
+        } else {
+          result.scopes_ = scopesBuilder_.build();
+        }
+        if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
+          to_bitField0_ |= 0x00000020;
+        }
+        result.followingKvCount_ = followingKvCount_;
+        result.bitField0_ = to_bitField0_;
+        onBuilt();
+        return result;
+      }
+      
+      public Builder mergeFrom(com.google.protobuf.Message other) {
+        if (other instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey) {
+          return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey)other);
+        } else {
+          super.mergeFrom(other);
+          return this;
+        }
+      }
+      
+      public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey other) {
+        if (other == org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.getDefaultInstance()) return this;
+        if (other.hasEncodedRegionName()) {
+          setEncodedRegionName(other.getEncodedRegionName());
+        }
+        if (other.hasTableName()) {
+          setTableName(other.getTableName());
+        }
+        if (other.hasLogSequenceNumber()) {
+          setLogSequenceNumber(other.getLogSequenceNumber());
+        }
+        if (other.hasWriteTime()) {
+          setWriteTime(other.getWriteTime());
+        }
+        if (other.hasClusterId()) {
+          mergeClusterId(other.getClusterId());
+        }
+        if (scopesBuilder_ == null) {
+          if (!other.scopes_.isEmpty()) {
+            if (scopes_.isEmpty()) {
+              scopes_ = other.scopes_;
+              bitField0_ = (bitField0_ & ~0x00000020);
+            } else {
+              ensureScopesIsMutable();
+              scopes_.addAll(other.scopes_);
+            }
+            onChanged();
+          }
+        } else {
+          if (!other.scopes_.isEmpty()) {
+            if (scopesBuilder_.isEmpty()) {
+              scopesBuilder_.dispose();
+              scopesBuilder_ = null;
+              scopes_ = other.scopes_;
+              bitField0_ = (bitField0_ & ~0x00000020);
+              scopesBuilder_ = 
+                com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
+                   getScopesFieldBuilder() : null;
+            } else {
+              scopesBuilder_.addAllMessages(other.scopes_);
+            }
+          }
+        }
+        if (other.hasFollowingKvCount()) {
+          setFollowingKvCount(other.getFollowingKvCount());
+        }
+        this.mergeUnknownFields(other.getUnknownFields());
+        return this;
+      }
+      
+      public final boolean isInitialized() {
+        if (!hasEncodedRegionName()) {
+          
+          return false;
+        }
+        if (!hasTableName()) {
+          
+          return false;
+        }
+        if (!hasLogSequenceNumber()) {
+          
+          return false;
+        }
+        if (!hasWriteTime()) {
+          
+          return false;
+        }
+        if (hasClusterId()) {
+          if (!getClusterId().isInitialized()) {
+            
+            return false;
+          }
+        }
+        for (int i = 0; i < getScopesCount(); i++) {
+          if (!getScopes(i).isInitialized()) {
+            
+            return false;
+          }
+        }
+        return true;
+      }
+      
+      public Builder mergeFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          com.google.protobuf.UnknownFieldSet.newBuilder(
+            this.getUnknownFields());
+        while (true) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              this.setUnknownFields(unknownFields.build());
+              onChanged();
+              return this;
+            default: {
+              if (!parseUnknownField(input, unknownFields,
+                                     extensionRegistry, tag)) {
+                this.setUnknownFields(unknownFields.build());
+                onChanged();
+                return this;
+              }
+              break;
+            }
+            case 10: {
+              bitField0_ |= 0x00000001;
+              encodedRegionName_ = input.readBytes();
+              break;
+            }
+            case 18: {
+              bitField0_ |= 0x00000002;
+              tableName_ = input.readBytes();
+              break;
+            }
+            case 24: {
+              bitField0_ |= 0x00000004;
+              logSequenceNumber_ = input.readUInt64();
+              break;
+            }
+            case 32: {
+              bitField0_ |= 0x00000008;
+              writeTime_ = input.readUInt64();
+              break;
+            }
+            case 42: {
+              org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.newBuilder();
+              if (hasClusterId()) {
+                subBuilder.mergeFrom(getClusterId());
+              }
+              input.readMessage(subBuilder, extensionRegistry);
+              setClusterId(subBuilder.buildPartial());
+              break;
+            }
+            case 50: {
+              org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.newBuilder();
+              input.readMessage(subBuilder, extensionRegistry);
+              addScopes(subBuilder.buildPartial());
+              break;
+            }
+            case 56: {
+              bitField0_ |= 0x00000040;
+              followingKvCount_ = input.readUInt32();
+              break;
+            }
+          }
+        }
+      }
+      
+      private int bitField0_;
+      
+      // required bytes encodedRegionName = 1;
+      private com.google.protobuf.ByteString encodedRegionName_ = com.google.protobuf.ByteString.EMPTY;
+      public boolean hasEncodedRegionName() {
+        return ((bitField0_ & 0x00000001) == 0x00000001);
+      }
+      public com.google.protobuf.ByteString getEncodedRegionName() {
+        return encodedRegionName_;
+      }
+      public Builder setEncodedRegionName(com.google.protobuf.ByteString value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000001;
+        encodedRegionName_ = value;
+        onChanged();
+        return this;
+      }
+      public Builder clearEncodedRegionName() {
+        bitField0_ = (bitField0_ & ~0x00000001);
+        encodedRegionName_ = getDefaultInstance().getEncodedRegionName();
+        onChanged();
+        return this;
+      }
+      
+      // required bytes tableName = 2;
+      private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY;
+      public boolean hasTableName() {
+        return ((bitField0_ & 0x00000002) == 0x00000002);
+      }
+      public com.google.protobuf.ByteString getTableName() {
+        return tableName_;
+      }
+      public Builder setTableName(com.google.protobuf.ByteString value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000002;
+        tableName_ = value;
+        onChanged();
+        return this;
+      }
+      public Builder clearTableName() {
+        bitField0_ = (bitField0_ & ~0x00000002);
+        tableName_ = getDefaultInstance().getTableName();
+        onChanged();
+        return this;
+      }
+      
+      // required uint64 logSequenceNumber = 3;
+      private long logSequenceNumber_ ;
+      public boolean hasLogSequenceNumber() {
+        return ((bitField0_ & 0x00000004) == 0x00000004);
+      }
+      public long getLogSequenceNumber() {
+        return logSequenceNumber_;
+      }
+      public Builder setLogSequenceNumber(long value) {
+        bitField0_ |= 0x00000004;
+        logSequenceNumber_ = value;
+        onChanged();
+        return this;
+      }
+      public Builder clearLogSequenceNumber() {
+        bitField0_ = (bitField0_ & ~0x00000004);
+        logSequenceNumber_ = 0L;
+        onChanged();
+        return this;
+      }
+      
+      // required uint64 writeTime = 4;
+      private long writeTime_ ;
+      public boolean hasWriteTime() {
+        return ((bitField0_ & 0x00000008) == 0x00000008);
+      }
+      public long getWriteTime() {
+        return writeTime_;
+      }
+      public Builder setWriteTime(long value) {
+        bitField0_ |= 0x00000008;
+        writeTime_ = value;
+        onChanged();
+        return this;
+      }
+      public Builder clearWriteTime() {
+        bitField0_ = (bitField0_ & ~0x00000008);
+        writeTime_ = 0L;
+        onChanged();
+        return this;
+      }
+      
+      // optional .UUID clusterId = 5;
+      private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID clusterId_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.getDefaultInstance();
+      private com.google.protobuf.SingleFieldBuilder<
+          org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUIDOrBuilder> clusterIdBuilder_;
+      public boolean hasClusterId() {
+        return ((bitField0_ & 0x00000010) == 0x00000010);
+      }
+      public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID getClusterId() {
+        if (clusterIdBuilder_ == null) {
+          return clusterId_;
+        } else {
+          return clusterIdBuilder_.getMessage();
+        }
+      }
+      public Builder setClusterId(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID value) {
+        if (clusterIdBuilder_ == null) {
+          if (value == null) {
+            throw new NullPointerException();
+          }
+          clusterId_ = value;
+          onChanged();
+        } else {
+          clusterIdBuilder_.setMessage(value);
+        }
+        bitField0_ |= 0x00000010;
+        return this;
+      }
+      public Builder setClusterId(
+          org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.Builder builderForValue) {
+        if (clusterIdBuilder_ == null) {
+          clusterId_ = builderForValue.build();
+          onChanged();
+        } else {
+          clusterIdBuilder_.setMessage(builderForValue.build());
+        }
+        bitField0_ |= 0x00000010;
+        return this;
+      }
+      public Builder mergeClusterId(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID value) {
+        if (clusterIdBuilder_ == null) {
+          if (((bitField0_ & 0x00000010) == 0x00000010) &&
+              clusterId_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.getDefaultInstance()) {
+            clusterId_ =
+              org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.newBuilder(clusterId_).mergeFrom(value).buildPartial();
+          } else {
+            clusterId_ = value;
+          }
+          onChanged();
+        } else {
+          clusterIdBuilder_.mergeFrom(value);
+        }
+        bitField0_ |= 0x00000010;
+        return this;
+      }
+      public Builder clearClusterId() {
+        if (clusterIdBuilder_ == null) {
+          clusterId_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.getDefaultInstance();
+          onChanged();
+        } else {
+          clusterIdBuilder_.clear();
+        }
+        bitField0_ = (bitField0_ & ~0x00000010);
+        return this;
+      }
+      public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.Builder getClusterIdBuilder() {
+        bitField0_ |= 0x00000010;
+        onChanged();
+        return getClusterIdFieldBuilder().getBuilder();
+      }
+      public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUIDOrBuilder getClusterIdOrBuilder() {
+        if (clusterIdBuilder_ != null) {
+          return clusterIdBuilder_.getMessageOrBuilder();
+        } else {
+          return clusterId_;
+        }
+      }
+      private com.google.protobuf.SingleFieldBuilder<
+          org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUIDOrBuilder> 
+          getClusterIdFieldBuilder() {
+        if (clusterIdBuilder_ == null) {
+          clusterIdBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+              org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUIDOrBuilder>(
+                  clusterId_,
+                  getParentForChildren(),
+                  isClean());
+          clusterId_ = null;
+        }
+        return clusterIdBuilder_;
+      }
+      
+      // repeated .FamilyScope scopes = 6;
+      private java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope> scopes_ =
+        java.util.Collections.emptyList();
+      private void ensureScopesIsMutable() {
+        if (!((bitField0_ & 0x00000020) == 0x00000020)) {
+          scopes_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope>(scopes_);
+          bitField0_ |= 0x00000020;
+         }
+      }
+      
+      private com.google.protobuf.RepeatedFieldBuilder<
+          org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScopeOrBuilder> scopesBuilder_;
+      
+      public java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope> getScopesList() {
+        if (scopesBuilder_ == null) {
+          return java.util.Collections.unmodifiableList(scopes_);
+        } else {
+          return scopesBuilder_.getMessageList();
+        }
+      }
+      public int getScopesCount() {
+        if (scopesBuilder_ == null) {
+          return scopes_.size();
+        } else {
+          return scopesBuilder_.getCount();
+        }
+      }
+      public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope getScopes(int index) {
+        if (scopesBuilder_ == null) {
+          return scopes_.get(index);
+        } else {
+          return scopesBuilder_.getMessage(index);
+        }
+      }
+      public Builder setScopes(
+          int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope value) {
+        if (scopesBuilder_ == null) {
+          if (value == null) {
+            throw new NullPointerException();
+          }
+          ensureScopesIsMutable();
+          scopes_.set(index, value);
+          onChanged();
+        } else {
+          scopesBuilder_.setMessage(index, value);
+        }
+        return this;
+      }
+      public Builder setScopes(
+          int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.Builder builderForValue) {
+        if (scopesBuilder_ == null) {
+          ensureScopesIsMutable();
+          scopes_.set(index, builderForValue.build());
+          onChanged();
+        } else {
+          scopesBuilder_.setMessage(index, builderForValue.build());
+        }
+        return this;
+      }
+      public Builder addScopes(org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope value) {
+        if (scopesBuilder_ == null) {
+          if (value == null) {
+            throw new NullPointerException();
+          }
+          ensureScopesIsMutable();
+          scopes_.add(value);
+          onChanged();
+        } else {
+          scopesBuilder_.addMessage(value);
+        }
+        return this;
+      }
+      public Builder addScopes(
+          int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope value) {
+        if (scopesBuilder_ == null) {
+          if (value == null) {
+            throw new NullPointerException();
+          }
+          ensureScopesIsMutable();
+          scopes_.add(index, value);
+          onChanged();
+        } else {
+          scopesBuilder_.addMessage(index, value);
+        }
+        return this;
+      }
+      public Builder addScopes(
+          org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.Builder builderForValue) {
+        if (scopesBuilder_ == null) {
+          ensureScopesIsMutable();
+          scopes_.add(builderForValue.build());
+          onChanged();
+        } else {
+          scopesBuilder_.addMessage(builderForValue.build());
+        }
+        return this;
+      }
+      public Builder addScopes(
+          int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.Builder builderForValue) {
+        if (scopesBuilder_ == null) {
+          ensureScopesIsMutable();
+          scopes_.add(index, builderForValue.build());
+          onChanged();
+        } else {
+          scopesBuilder_.addMessage(index, builderForValue.build());
+        }
+        return this;
+      }
+      public Builder addAllScopes(
+          java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope> values) {
+        if (scopesBuilder_ == null) {
+          ensureScopesIsMutable();
+          super.addAll(values, scopes_);
+          onChanged();
+        } else {
+          scopesBuilder_.addAllMessages(values);
+        }
+        return this;
+      }
+      public Builder clearScopes() {
+        if (scopesBuilder_ == null) {
+          scopes_ = java.util.Collections.emptyList();
+          bitField0_ = (bitField0_ & ~0x00000020);
+          onChanged();
+        } else {
+          scopesBuilder_.clear();
+        }
+        return this;
+      }
+      public Builder removeScopes(int index) {
+        if (scopesBuilder_ == null) {
+          ensureScopesIsMutable();
+          scopes_.remove(index);
+          onChanged();
+        } else {
+          scopesBuilder_.remove(index);
+        }
+        return this;
+      }
+      public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.Builder getScopesBuilder(
+          int index) {
+        return getScopesFieldBuilder().getBuilder(index);
+      }
+      public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScopeOrBuilder getScopesOrBuilder(
+          int index) {
+        if (scopesBuilder_ == null) {
+          return scopes_.get(index);  } else {
+          return scopesBuilder_.getMessageOrBuilder(index);
+        }
+      }
+      public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScopeOrBuilder> 
+           getScopesOrBuilderList() {
+        if (scopesBuilder_ != null) {
+          return scopesBuilder_.getMessageOrBuilderList();
+        } else {
+          return java.util.Collections.unmodifiableList(scopes_);
+        }
+      }
+      public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.Builder addScopesBuilder() {
+        return getScopesFieldBuilder().addBuilder(
+            org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.getDefaultInstance());
+      }
+      public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.Builder addScopesBuilder(
+          int index) {
+        return getScopesFieldBuilder().addBuilder(
+            index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.getDefaultInstance());
+      }
+      public java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.Builder> 
+           getScopesBuilderList() {
+        return getScopesFieldBuilder().getBuilderList();
+      }
+      private com.google.protobuf.RepeatedFieldBuilder<
+          org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScopeOrBuilder> 
+          getScopesFieldBuilder() {
+        if (scopesBuilder_ == null) {
+          scopesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
+              org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScopeOrBuilder>(
+                  scopes_,
+                  ((bitField0_ & 0x00000020) == 0x00000020),
+                  getParentForChildren(),
+                  isClean());
+          scopes_ = null;
+        }
+        return scopesBuilder_;
+      }
+      
+      // optional uint32 followingKvCount = 7;
+      private int followingKvCount_ ;
+      public boolean hasFollowingKvCount() {
+        return ((bitField0_ & 0x00000040) == 0x00000040);
+      }
+      public int getFollowingKvCount() {
+        return followingKvCount_;
+      }
+      public Builder setFollowingKvCount(int value) {
+        bitField0_ |= 0x00000040;
+        followingKvCount_ = value;
+        onChanged();
+        return this;
+      }
+      public Builder clearFollowingKvCount() {
+        bitField0_ = (bitField0_ & ~0x00000040);
+        followingKvCount_ = 0;
+        onChanged();
+        return this;
+      }
+      
+      // @@protoc_insertion_point(builder_scope:WALKey)
+    }
+    
+    static {
+      defaultInstance = new WALKey(true);
+      defaultInstance.initFields();
+    }
+    
+    // @@protoc_insertion_point(class_scope:WALKey)
+  }
+  
+  public interface FamilyScopeOrBuilder
+      extends com.google.protobuf.MessageOrBuilder {
+    
+    // required bytes family = 1;
+    boolean hasFamily();
+    com.google.protobuf.ByteString getFamily();
+    
+    // required .ScopeType scopeType = 2;
+    boolean hasScopeType();
+    org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType getScopeType();
+  }
+  public static final class FamilyScope extends
+      com.google.protobuf.GeneratedMessage
+      implements FamilyScopeOrBuilder {
+    // Use FamilyScope.newBuilder() to construct.
+    private FamilyScope(Builder builder) {
+      super(builder);
+    }
+    private FamilyScope(boolean noInit) {}
+    
+    private static final FamilyScope defaultInstance;
+    public static FamilyScope getDefaultInstance() {
+      return defaultInstance;
+    }
+    
+    public FamilyScope getDefaultInstanceForType() {
+      return defaultInstance;
+    }
+    
+    public static final com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_FamilyScope_descriptor;
+    }
+    
+    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_FamilyScope_fieldAccessorTable;
+    }
+    
+    private int bitField0_;
+    // required bytes family = 1;
+    public static final int FAMILY_FIELD_NUMBER = 1;
+    private com.google.protobuf.ByteString family_;
+    public boolean hasFamily() {
+      return ((bitField0_ & 0x00000001) == 0x00000001);
+    }
+    public com.google.protobuf.ByteString getFamily() {
+      return family_;
+    }
+    
+    // required .ScopeType scopeType = 2;
+    public static final int SCOPETYPE_FIELD_NUMBER = 2;
+    private org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType scopeType_;
+    public boolean hasScopeType() {
+      return ((bitField0_ & 0x00000002) == 0x00000002);
+    }
+    public org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType getScopeType() {
+      return scopeType_;
+    }
+    
+    private void initFields() {
+      family_ = com.google.protobuf.ByteString.EMPTY;
+      scopeType_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType.REPLICATION_SCOPE_LOCAL;
+    }
+    private byte memoizedIsInitialized = -1;
+    public final boolean isInitialized() {
+      byte isInitialized = memoizedIsInitialized;
+      if (isInitialized != -1) return isInitialized == 1;
+      
+      if (!hasFamily()) {
+        memoizedIsInitialized = 0;
+        return false;
+      }
+      if (!hasScopeType()) {
+        memoizedIsInitialized = 0;
+        return false;
+      }
+      memoizedIsInitialized = 1;
+      return true;
+    }
+    
+    public void writeTo(com.google.protobuf.CodedOutputStream output)
+                        throws java.io.IOException {
+      getSerializedSize();
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        output.writeBytes(1, family_);
+      }
+      if (((bitField0_ & 0x00000002) == 0x00000002)) {
+        output.writeEnum(2, scopeType_.getNumber());
+      }
+      getUnknownFields().writeTo(output);
+    }
+    
+    private int memoizedSerializedSize = -1;
+    public int getSerializedSize() {
+      int size = memoizedSerializedSize;
+      if (size != -1) return size;
+    
+      size = 0;
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeBytesSize(1, family_);
+      }
+      if (((bitField0_ & 0x00000002) == 0x00000002)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeEnumSize(2, scopeType_.getNumber());
+      }
+      size += getUnknownFields().getSerializedSize();
+      memoizedSerializedSize = size;
+      return size;
+    }
+    
+    private static final long serialVersionUID = 0L;
+    @java.lang.Override
+    protected java.lang.Object writeReplace()
+        throws java.io.ObjectStreamException {
+      return super.writeReplace();
+    }
+    
+    @java.lang.Override
+    public boolean equals(final java.lang.Object obj) {
+      if (obj == this) {
+       return true;
+      }
+      if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope)) {
+        return super.equals(obj);
+      }
+      org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope other = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope) obj;
+      
+      boolean result = true;
+      result = result && (hasFamily() == other.hasFamily());
+      if (hasFamily()) {
+        result = result && getFamily()
+            .equals(other.getFamily());
+      }
+      result = result && (hasScopeType() == other.hasScopeType());
+      if (hasScopeType()) {
+        result = result &&
+            (getScopeType() == other.getScopeType());
+      }
+      result = result &&
+          getUnknownFields().equals(other.getUnknownFields());
+      return result;
+    }
+    
+    @java.lang.Override
+    public int hashCode() {
+      int hash = 41;
+      hash = (19 * hash) + getDescriptorForType().hashCode();
+      if (hasFamily()) {
+        hash = (37 * hash) + FAMILY_FIELD_NUMBER;
+        hash = (53 * hash) + getFamily().hashCode();
+      }
+      if (hasScopeType()) {
+        hash = (37 * hash) + SCOPETYPE_FIELD_NUMBER;
+        hash = (53 * hash) + hashEnum(getScopeType());
+      }
+      hash = (29 * hash) + getUnknownFields().hashCode();
+      return hash;
+    }
+    
+    public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope parseFrom(
+        com.google.protobuf.ByteString data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return newBuilder().mergeFrom(data).buildParsed();
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope parseFrom(
+        com.google.protobuf.ByteString data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return newBuilder().mergeFrom(data, extensionRegistry)
+               .buildParsed();
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope parseFrom(byte[] data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return newBuilder().mergeFrom(data).buildParsed();
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope parseFrom(
+        byte[] data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return newBuilder().mergeFrom(data, extensionRegistry)
+               .buildParsed();
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope parseFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return newBuilder().mergeFrom(input).buildParsed();
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope parseFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return newBuilder().mergeFrom(input, extensionRegistry)
+               .buildParsed();
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope parseDelimitedFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      Builder builder = newBuilder();
+      if (builder.mergeDelimitedFrom(input)) {
+        return builder.buildParsed();
+      } else {
+        return null;
+      }
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope parseDelimitedFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      Builder builder = newBuilder();
+      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+        return builder.buildParsed();
+      } else {
+        return null;
+      }
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope parseFrom(
+        com.google.protobuf.CodedInputStream input)
+        throws java.io.IOException {
+      return newBuilder().mergeFrom(input).buildParsed();
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope parseFrom(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return newBuilder().mergeFrom(input, extensionRegistry)
+               .buildParsed();
+    }
+    
+    public static Builder newBuilder() { return Builder.create(); }
+    public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope prototype) {
+      return newBuilder().mergeFrom(prototype);
+    }
+    public Builder toBuilder() { return newBuilder(this); }
+    
+    @java.lang.Override
+    protected Builder newBuilderForType(
+        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+      Builder builder = new Builder(parent);
+      return builder;
+    }
+    public static final class Builder extends
+        com.google.protobuf.GeneratedMessage.Builder<Builder>
+       implements org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScopeOrBuilder {
+      public static final com.google.protobuf.Descriptors.Descriptor
+          getDescriptor() {
+        return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_FamilyScope_descriptor;
+      }
+      
+      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+          internalGetFieldAccessorTable() {
+        return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_FamilyScope_fieldAccessorTable;
+      }
+      
+      // Construct using org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.newBuilder()
+      private Builder() {
+        maybeForceBuilderInitialization();
+      }
+      
+      private Builder(BuilderParent parent) {
+        super(parent);
+        maybeForceBuilderInitialization();
+      }
+      private void maybeForceBuilderInitialization() {
+        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+        }
+      }
+      private static Builder create() {
+        return new Builder();
+      }
+      
+      public Builder clear() {
+        super.clear();
+        family_ = com.google.protobuf.ByteString.EMPTY;
+        bitField0_ = (bitField0_ & ~0x00000001);
+        scopeType_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType.REPLICATION_SCOPE_LOCAL;
+        bitField0_ = (bitField0_ & ~0x00000002);
+        return this;
+      }
+      
+      public Builder clone() {
+        return create().mergeFrom(buildPartial());
+      }
+      
+      public com.google.protobuf.Descriptors.Descriptor
+          getDescriptorForType() {
+        return org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.getDescriptor();
+      }
+      
+      public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope getDefaultInstanceForType() {
+        return org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.getDefaultInstance();
+      }
+      
+      public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope build() {
+        org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(result);
+        }
+        return result;
+      }
+      
+      private org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope buildParsed()
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(
+            result).asInvalidProtocolBufferException();
+        }
+        return result;
+      }
+      
+      public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope buildPartial() {
+        org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope result = new org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope(this);
+        int from_bitField0_ = bitField0_;
+        int to_bitField0_ = 0;
+        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+          to_bitField0_ |= 0x00000001;
+        }
+        result.family_ = family_;
+        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+          to_bitField0_ |= 0x00000002;
+        }
+        result.scopeType_ = scopeType_;
+        result.bitField0_ = to_bitField0_;
+        onBuilt();
+        return result;
+      }
+      
+      public Builder mergeFrom(com.google.protobuf.Message other) {
+        if (other instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope) {
+          return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope)other);
+        } else {
+          super.mergeFrom(other);
+          return this;
+        }
+      }
+      
+      public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope other) {
+        if (other == org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.getDefaultInstance()) return this;
+        if (other.hasFamily()) {
+          setFamily(other.getFamily());
+        }
+        if (other.hasScopeType()) {
+          setScopeType(other.getScopeType());
+        }
+        this.mergeUnknownFields(other.getUnknownFields());
+        return this;
+      }
+      
+      public final boolean isInitialized() {
+        if (!hasFamily()) {
+          
+          return false;
+        }
+        if (!hasScopeType()) {
+          
+          return false;
+        }
+        return true;
+      }
+      
+      public Builder mergeFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          com.google.protobuf.UnknownFieldSet.newBuilder(
+            this.getUnknownFields());
+        while (true) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              this.setUnknownFields(unknownFields.build());
+              onChanged();
+              return this;
+            default: {
+              if (!parseUnknownField(input, unknownFields,
+                                     extensionRegistry, tag)) {
+                this.setUnknownFields(unknownFields.build());
+                onChanged();
+                return this;
+              }
+              break;
+            }
+            case 10: {
+              bitField0_ |= 0x00000001;
+              family_ = input.readBytes();
+              break;
+            }
+            case 16: {
+              int rawValue = input.readEnum();
+              org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType value = org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType.valueOf(rawValue);
+              if (value == null) {
+                unknownFields.mergeVarintField(2, rawValue);
+              } else {
+                bitField0_ |= 0x00000002;
+                scopeType_ = value;
+              }
+              break;
+            }
+          }
+        }
+      }
+      
+      private int bitField0_;
+      
+      // required bytes family = 1;
+      private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY;
+      public boolean hasFamily() {
+        return ((bitField0_ & 0x00000001) == 0x00000001);
+      }
+      public com.google.protobuf.ByteString getFamily() {
+        return family_;
+      }
+      public Builder setFamily(com.google.protobuf.ByteString value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000001;
+        family_ = value;
+        onChanged();
+        return this;
+      }
+      public Builder clearFamily() {
+        bitField0_ = (bitField0_ & ~0x00000001);
+        family_ = getDefaultInstance().getFamily();
+        onChanged();
+        return this;
+      }
+      
+      // required .ScopeType scopeType = 2;
+      private org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType scopeType_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType.REPLICATION_SCOPE_LOCAL;
+      public boolean hasScopeType() {
+        return ((bitField0_ & 0x00000002) == 0x00000002);
+      }
+      public org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType getScopeType() {
+        return scopeType_;
+      }
+      public Builder setScopeType(org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType value) {
+        if (value == null) {
+          throw new NullPointerException();
+        }
+        bitField0_ |= 0x00000002;
+        scopeType_ = value;
+        onChanged();
+        return this;
+      }
+      public Builder clearScopeType() {
+        bitField0_ = (bitField0_ & ~0x00000002);
+        scopeType_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType.REPLICATION_SCOPE_LOCAL;
+        onChanged();
+        return this;
+      }
+      
+      // @@protoc_insertion_point(builder_scope:FamilyScope)
+    }
+    
+    static {
+      defaultInstance = new FamilyScope(true);
+      defaultInstance.initFields();
+    }
+    
+    // @@protoc_insertion_point(class_scope:FamilyScope)
+  }
+  
+  public interface CompactionDescriptorOrBuilder
+      extends com.google.protobuf.MessageOrBuilder {
+    
+    // required bytes tableName = 1;
+    boolean hasTableName();
+    com.google.protobuf.ByteString getTableName();
+    
+    // required bytes encodedRegionName = 2;
+    boolean hasEncodedRegionName();
+    com.google.protobuf.ByteString getEncodedRegionName();
+    
+    // required bytes familyName = 3;
+    boolean hasFamilyName();
+    com.google.protobuf.ByteString getFamilyName();
+    
+    // repeated string compactionInput = 4;
+    java.util.List<String> getCompactionInputList();
+    int getCompactionInputCount();
+    String getCompactionInput(int index);
+    
+    // repeated string compactionOutput = 5;
+    java.util.List<String> getCompactionOutputList();
+    int getCompactionOutputCount();
+    String getCompactionOutput(int index);
+    
+    // required string storeHomeDir = 6;
+    boolean hasStoreHomeDir();
+    String getStoreHomeDir();
+  }
+  public static final class CompactionDescriptor extends
+      com.google.protobuf.GeneratedMessage
+      implements CompactionDescriptorOrBuilder {
+    // Use CompactionDescriptor.newBuilder() to construct.
+    private CompactionDescriptor(Builder builder) {
+      super(builder);
+    }
+    private CompactionDescriptor(boolean noInit) {}
+    
+    private static final CompactionDescriptor defaultInstance;
+    public static CompactionDescriptor getDefaultInstance() {
+      return defaultInstance;
+    }
+    
+    public CompactionDescriptor getDefaultInstanceForType() {
+      return defaultInstance;
+    }
+    
+    public static final com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_CompactionDescriptor_descriptor;
+    }
+    
+    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_CompactionDescriptor_fieldAccessorTable;
+    }
+    
+    private int bitField0_;
+    // required bytes tableName = 1;
+    public static final int TABLENAME_FIELD_NUMBER = 1;
+    private com.google.protobuf.ByteString tableName_;
+    public boolean hasTableName() {
+      return ((bitField0_ & 0x00000001) == 0x00000001);
+    }
+    public com.google.protobuf.ByteString getTableName() {
+      return tableName_;
+    }
+    
+    // required bytes encodedRegionName = 2;
+    public static final int ENCODEDREGIONNAME_FIELD_NUMBER = 2;
+    private com.google.protobuf.ByteString encodedRegionName_;
+    public boolean hasEncodedRegionName() {
+      return ((bitField0_ & 0x00000002) == 0x00000002);
+    }
+    public com.google.protobuf.ByteString getEncodedRegionName() {
+      return encodedRegionName_;
+    }
+    
+    // required bytes familyName = 3;
+    public static final int FAMILYNAME_FIELD_NUMBER = 3;
+    private com.google.protobuf.ByteString familyName_;
+    public boolean hasFamilyName() {
+      return ((bitField0_ & 0x00000004) == 0x00000004);
+    }
+    public com.google.protobuf.ByteString getFamilyName() {
+      return familyName_;
+    }
+    
+    // repeated string compactionInput = 4;
+    public static final int COMPACTIONINPUT_FIELD_NUMBER = 4;
+    private com.google.protobuf.LazyStringList compactionInput_;
+    public java.util.List<String>
+        getCompactionInputList() {
+      return compactionInput_;
+    }
+    public int getCompactionInputCount() {
+      return compactionInput_.size();
+    }
+    public String getCompactionInput(int index) {
+      return compactionInput_.get(index);
+    }
+    
+    // repeated string compactionOutput = 5;
+    public static final int COMPACTIONOUTPUT_FIELD_NUMBER = 5;
+    private com.google.protobuf.LazyStringList compactionOutput_;
+    public java.util.List<String>
+        getCompactionOutputList() {
+      return compactionOutput_;
+    }
+    public int getCompactionOutputCount() {
+      return compactionOutput_.size();
+    }
+    public String getCompactionOutput(int index) {
+      return compactionOutput_.get(index);
+    }
+    
+    // required string storeHomeDir = 6;
+    public static final int STOREHOMEDIR_FIELD_NUMBER = 6;
+    private java.lang.Object storeHomeDir_;
+    public boolean hasStoreHomeDir() {
+      return ((bitField0_ & 0x00000008) == 0x00000008);
+    }
+    public String getStoreHomeDir() {
+      java.lang.Object ref = storeHomeDir_;
+      if (ref instanceof String) {
+        return (String) ref;
+      } else {
+        com.google.protobuf.ByteString bs = 
+            (com.google.protobuf.ByteString) ref;
+        String s = bs.toStringUtf8();
+        if (com.google.protobuf.Internal.isValidUtf8(bs)) {
+          storeHomeDir_ = s;
+        }
+        return s;
+      }
+    }
+    private com.google.protobuf.ByteString getStoreHomeDirBytes() {
+      java.lang.Object ref = storeHomeDir_;
+      if (ref instanceof String) {
+        com.google.protobuf.ByteString b = 
+            com.google.protobuf.ByteString.copyFromUtf8((String) ref);
+        storeHomeDir_ = b;
+        return b;
+      } else {
+        return (com.google.protobuf.ByteString) ref;
+      }
+    }
+    
+    private void initFields() {
+      tableName_ = com.google.protobuf.ByteString.EMPTY;
+      encodedRegionName_ = com.google.protobuf.ByteString.EMPTY;
+      familyName_ = com.google.protobuf.ByteString.EMPTY;
+      compactionInput_ = com.google.protobuf.LazyStringArrayList.EMPTY;
+      compactionOutput_ = com.google.protobuf.LazyStringArrayList.EMPTY;
+      storeHomeDir_ = "";
+    }
+    private byte memoizedIsInitialized = -1;
+    public final boolean isInitialized() {
+      byte isInitialized = memoizedIsInitialized;
+      if (isInitialized != -1) return isInitialized == 1;
+      
+      if (!hasTableName()) {
+        memoizedIsInitialized = 0;
+        return false;
+      }
+      if (!hasEncodedRegionName()) {
+        memoizedIsInitialized = 0;
+        return false;
+      }
+      if (!hasFamilyName()) {
+        memoizedIsInitialized = 0;
+        return false;
+      }
+      if (!hasStoreHomeDir()) {
+        memoizedIsInitialized = 0;
+        return false;
+      }
+      memoizedIsInitialized = 1;
+      return true;
+    }
+    
+    public void writeTo(com.google.protobuf.CodedOutputStream output)
+                        throws java.io.IOException {
+      getSerializedSize();
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        output.writeBytes(1, tableName_);
+      }
+      if (((bitField0_ & 0x00000002) == 0x00000002)) {
+        output.writeBytes(2, encodedRegionName_);
+      }
+      if (((bitField0_ & 0x00000004) == 0x00000004)) {
+        output.writeBytes(3, familyName_);
+      }
+      for (int i = 0; i < compactionInput_.size(); i++) {
+        output.writeBytes(4, compactionInput_.getByteString(i));
+      }
+      for (int i = 0; i < compactionOutput_.size(); i++) {
+        output.writeBytes(5, compactionOutput_.getByteString(i));
+      }
+      if (((bitField0_ & 0x00000008) == 0x00000008)) {
+        output.writeBytes(6, getStoreHomeDirBytes());
+      }
+      getUnknownFields().writeTo(output);
+    }
+    
+    private int memoizedSerializedSize = -1;
+    public int getSerializedSize() {
+      int size = memoizedSerializedSize;
+      if (size != -1) return size;
+    
+      size = 0;
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeBytesSize(1, tableName_);
+      }
+      if (((bitField0_ & 0x00000002) == 0x00000002)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeBytesSize(2, encodedRegionName_);
+      }
+      if (((bitField0_ & 0x00000004) == 0x00000004)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeBytesSize(3, familyName_);
+      }
+      {
+        int dataSize = 0;
+        for (int i = 0; i < compactionInput_.size(); i++) {
+          dataSize += com.google.protobuf.CodedOutputStream
+            .computeBytesSizeNoTag(compactionInput_.getByteString(i));
+        }
+        size += dataSize;
+        size += 1 * getCompactionInputList().size();
+      }
+      {
+        int dataSize = 0;
+        for (int i = 0; i < compactionOutput_.size(); i++) {
+          dataSize += com.google.protobuf.CodedOutputStream
+            .computeBytesSizeNoTag(compactionOutput_.getByteString(i));
+        }
+        size += dataSize;
+        size += 1 * getCompactionOutputList().size();
+      }
+      if (((bitField0_ & 0x00000008) == 0x00000008)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeBytesSize(6, getStoreHomeDirBytes());
+      }
+      size += getUnknownFields().getSerializedSize();
+      memoizedSerializedSize = size;
+      return size;
+    }
+    
+    private static final long serialVersionUID = 0L;
+    @java.lang.Override
+    protected java.lang.Object writeReplace()
+        throws java.io.ObjectStreamException {
+      return super.writeReplace();
+    }
+    
+    @java.lang.Override
+    public boolean equals(final java.lang.Object obj) {
+      if (obj == this) {
+       return true;
+      }
+      if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor)) {
+        return super.equals(obj);
+      }
+      org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor other = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor) obj;
+      
+      boolean result = true;
+      result = result && (hasTableName() == other.hasTableName());
+      if (hasTableName()) {
+        result = result && getTableName()
+            .equals(other.getTableName());
+      }
+      result = result && (hasEncodedRegionName() == other.hasEncodedRegionName());
+      if (hasEncodedRegionName()) {
+        result = result && getEncodedRegionName()
+            .equals(other.getEncodedRegionName());
+      }
+      result = result && (hasFamilyName() == other.hasFamilyName());
+      if (hasFamilyName()) {
+        result = result && getFamilyName()
+            .equals(other.getFamilyName());
+      }
+      result = result && getCompactionInputList()
+          .equals(other.getCompactionInputList());
+      result = result && getCompactionOutputList()
+          .equals(other.getCompactionOutputList());
+      result = result && (hasStoreHomeDir() == other.hasStoreHomeDir());
+      if (hasStoreHomeDir()) {
+        result = result && getStoreHomeDir()
+            .equals(other.getStoreHomeDir());
+      }
+      result = result &&
+          getUnknownFields().equals(other.getUnknownFields());
+      return result;
+    }
+    
+    @java.lang.Override
+    public int hashCode() {

[... 678 lines stripped ...]


Mime
View raw message