hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From st...@apache.org
Subject svn commit: r1466556 [13/41] - in /hbase/trunk: ./ hbase-client/src/main/java/org/apache/hadoop/hbase/ hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ hbase-protocol/...
Date Wed, 10 Apr 2013 16:42:29 GMT
Modified: hbase/trunk/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java?rev=1466556&r1=1466555&r2=1466556&view=diff
==============================================================================
--- hbase/trunk/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java (original)
+++ hbase/trunk/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java Wed Apr 10 16:42:28 2013
@@ -8,26 +8,78 @@ public final class HBaseProtos {
   public static void registerAllExtensions(
       com.google.protobuf.ExtensionRegistry registry) {
   }
+  /**
+   * Protobuf enum {@code CellType}
+   *
+   * <pre>
+   **
+   * The type of the key in a Cell
+   * </pre>
+   */
   public enum CellType
       implements com.google.protobuf.ProtocolMessageEnum {
+    /**
+     * <code>MINIMUM = 0;</code>
+     */
     MINIMUM(0, 0),
+    /**
+     * <code>PUT = 4;</code>
+     */
     PUT(1, 4),
+    /**
+     * <code>DELETE = 8;</code>
+     */
     DELETE(2, 8),
+    /**
+     * <code>DELETE_COLUMN = 12;</code>
+     */
     DELETE_COLUMN(3, 12),
+    /**
+     * <code>DELETE_FAMILY = 14;</code>
+     */
     DELETE_FAMILY(4, 14),
+    /**
+     * <code>MAXIMUM = 255;</code>
+     *
+     * <pre>
+     * MAXIMUM is used when searching; you look from maximum on down.
+     * </pre>
+     */
     MAXIMUM(5, 255),
     ;
-    
+
+    /**
+     * <code>MINIMUM = 0;</code>
+     */
     public static final int MINIMUM_VALUE = 0;
+    /**
+     * <code>PUT = 4;</code>
+     */
     public static final int PUT_VALUE = 4;
+    /**
+     * <code>DELETE = 8;</code>
+     */
     public static final int DELETE_VALUE = 8;
+    /**
+     * <code>DELETE_COLUMN = 12;</code>
+     */
     public static final int DELETE_COLUMN_VALUE = 12;
+    /**
+     * <code>DELETE_FAMILY = 14;</code>
+     */
     public static final int DELETE_FAMILY_VALUE = 14;
+    /**
+     * <code>MAXIMUM = 255;</code>
+     *
+     * <pre>
+     * MAXIMUM is used when searching; you look from maximum on down.
+     * </pre>
+     */
     public static final int MAXIMUM_VALUE = 255;
-    
-    
+
+
     public final int getNumber() { return value; }
-    
+
     public static CellType valueOf(int value) {
       switch (value) {
         case 0: return MINIMUM;
@@ -39,7 +91,7 @@ public final class HBaseProtos {
         default: return null;
       }
     }
-    
+
     public static com.google.protobuf.Internal.EnumLiteMap<CellType>
         internalGetValueMap() {
       return internalValueMap;
@@ -51,7 +103,7 @@ public final class HBaseProtos {
               return CellType.valueOf(number);
             }
           };
-    
+
     public final com.google.protobuf.Descriptors.EnumValueDescriptor
         getValueDescriptor() {
       return getDescriptor().getValues().get(index);
@@ -64,11 +116,9 @@ public final class HBaseProtos {
         getDescriptor() {
       return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor().getEnumTypes().get(0);
     }
-    
-    private static final CellType[] VALUES = {
-      MINIMUM, PUT, DELETE, DELETE_COLUMN, DELETE_FAMILY, MAXIMUM, 
-    };
-    
+
+    private static final CellType[] VALUES = values();
+
     public static CellType valueOf(
         com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
       if (desc.getType() != getDescriptor()) {
@@ -77,40 +127,89 @@ public final class HBaseProtos {
       }
       return VALUES[desc.getIndex()];
     }
-    
+
     private final int index;
     private final int value;
-    
+
     private CellType(int index, int value) {
       this.index = index;
       this.value = value;
     }
-    
+
     // @@protoc_insertion_point(enum_scope:CellType)
   }
-  
+
+  /**
+   * Protobuf enum {@code CompareType}
+   *
+   * <pre>
+   * Comparison operators 
+   * </pre>
+   */
   public enum CompareType
       implements com.google.protobuf.ProtocolMessageEnum {
+    /**
+     * <code>LESS = 0;</code>
+     */
     LESS(0, 0),
+    /**
+     * <code>LESS_OR_EQUAL = 1;</code>
+     */
     LESS_OR_EQUAL(1, 1),
+    /**
+     * <code>EQUAL = 2;</code>
+     */
     EQUAL(2, 2),
+    /**
+     * <code>NOT_EQUAL = 3;</code>
+     */
     NOT_EQUAL(3, 3),
+    /**
+     * <code>GREATER_OR_EQUAL = 4;</code>
+     */
     GREATER_OR_EQUAL(4, 4),
+    /**
+     * <code>GREATER = 5;</code>
+     */
     GREATER(5, 5),
+    /**
+     * <code>NO_OP = 6;</code>
+     */
     NO_OP(6, 6),
     ;
-    
+
+    /**
+     * <code>LESS = 0;</code>
+     */
     public static final int LESS_VALUE = 0;
+    /**
+     * <code>LESS_OR_EQUAL = 1;</code>
+     */
     public static final int LESS_OR_EQUAL_VALUE = 1;
+    /**
+     * <code>EQUAL = 2;</code>
+     */
     public static final int EQUAL_VALUE = 2;
+    /**
+     * <code>NOT_EQUAL = 3;</code>
+     */
     public static final int NOT_EQUAL_VALUE = 3;
+    /**
+     * <code>GREATER_OR_EQUAL = 4;</code>
+     */
     public static final int GREATER_OR_EQUAL_VALUE = 4;
+    /**
+     * <code>GREATER = 5;</code>
+     */
     public static final int GREATER_VALUE = 5;
+    /**
+     * <code>NO_OP = 6;</code>
+     */
     public static final int NO_OP_VALUE = 6;
-    
-    
+
+
     public final int getNumber() { return value; }
-    
+
     public static CompareType valueOf(int value) {
       switch (value) {
         case 0: return LESS;
@@ -123,7 +222,7 @@ public final class HBaseProtos {
         default: return null;
       }
     }
-    
+
     public static com.google.protobuf.Internal.EnumLiteMap<CompareType>
         internalGetValueMap() {
       return internalValueMap;
@@ -135,7 +234,7 @@ public final class HBaseProtos {
               return CompareType.valueOf(number);
             }
           };
-    
+
     public final com.google.protobuf.Descriptors.EnumValueDescriptor
         getValueDescriptor() {
       return getDescriptor().getValues().get(index);
@@ -148,11 +247,9 @@ public final class HBaseProtos {
         getDescriptor() {
       return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor().getEnumTypes().get(1);
     }
-    
-    private static final CompareType[] VALUES = {
-      LESS, LESS_OR_EQUAL, EQUAL, NOT_EQUAL, GREATER_OR_EQUAL, GREATER, NO_OP, 
-    };
-    
+
+    private static final CompareType[] VALUES = values();
+
     public static CompareType valueOf(
         com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
       if (desc.getType() != getDescriptor()) {
@@ -161,134 +258,309 @@ public final class HBaseProtos {
       }
       return VALUES[desc.getIndex()];
     }
-    
+
     private final int index;
     private final int value;
-    
+
     private CompareType(int index, int value) {
       this.index = index;
       this.value = value;
     }
-    
+
     // @@protoc_insertion_point(enum_scope:CompareType)
   }
-  
+
   public interface CellOrBuilder
       extends com.google.protobuf.MessageOrBuilder {
-    
+
     // optional bytes row = 1;
+    /**
+     * <code>optional bytes row = 1;</code>
+     */
     boolean hasRow();
+    /**
+     * <code>optional bytes row = 1;</code>
+     */
     com.google.protobuf.ByteString getRow();
-    
+
     // optional bytes family = 2;
+    /**
+     * <code>optional bytes family = 2;</code>
+     */
     boolean hasFamily();
+    /**
+     * <code>optional bytes family = 2;</code>
+     */
     com.google.protobuf.ByteString getFamily();
-    
+
     // optional bytes qualifier = 3;
+    /**
+     * <code>optional bytes qualifier = 3;</code>
+     */
     boolean hasQualifier();
+    /**
+     * <code>optional bytes qualifier = 3;</code>
+     */
     com.google.protobuf.ByteString getQualifier();
-    
+
     // optional uint64 timestamp = 4;
+    /**
+     * <code>optional uint64 timestamp = 4;</code>
+     */
     boolean hasTimestamp();
+    /**
+     * <code>optional uint64 timestamp = 4;</code>
+     */
     long getTimestamp();
-    
+
     // optional .CellType cellType = 5;
+    /**
+     * <code>optional .CellType cellType = 5;</code>
+     */
     boolean hasCellType();
+    /**
+     * <code>optional .CellType cellType = 5;</code>
+     */
     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CellType getCellType();
-    
+
     // optional bytes value = 6;
+    /**
+     * <code>optional bytes value = 6;</code>
+     */
     boolean hasValue();
+    /**
+     * <code>optional bytes value = 6;</code>
+     */
     com.google.protobuf.ByteString getValue();
   }
+  /**
+   * Protobuf type {@code Cell}
+   *
+   * <pre>
+   **
+   * Protocol buffer version of Cell.
+   * </pre>
+   */
   public static final class Cell extends
       com.google.protobuf.GeneratedMessage
       implements CellOrBuilder {
     // Use Cell.newBuilder() to construct.
-    private Cell(Builder builder) {
+    private Cell(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
       super(builder);
+      this.unknownFields = builder.getUnknownFields();
     }
-    private Cell(boolean noInit) {}
-    
+    private Cell(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
     private static final Cell defaultInstance;
     public static Cell getDefaultInstance() {
       return defaultInstance;
     }
-    
+
     public Cell getDefaultInstanceForType() {
       return defaultInstance;
     }
-    
+
+    private final com.google.protobuf.UnknownFieldSet unknownFields;
+    @java.lang.Override
+    public final com.google.protobuf.UnknownFieldSet
+        getUnknownFields() {
+      return this.unknownFields;
+    }
+    private Cell(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      initFields();
+      int mutable_bitField0_ = 0;
+      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          com.google.protobuf.UnknownFieldSet.newBuilder();
+      try {
+        boolean done = false;
+        while (!done) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              done = true;
+              break;
+            default: {
+              if (!parseUnknownField(input, unknownFields,
+                                     extensionRegistry, tag)) {
+                done = true;
+              }
+              break;
+            }
+            case 10: {
+              bitField0_ |= 0x00000001;
+              row_ = input.readBytes();
+              break;
+            }
+            case 18: {
+              bitField0_ |= 0x00000002;
+              family_ = input.readBytes();
+              break;
+            }
+            case 26: {
+              bitField0_ |= 0x00000004;
+              qualifier_ = input.readBytes();
+              break;
+            }
+            case 32: {
+              bitField0_ |= 0x00000008;
+              timestamp_ = input.readUInt64();
+              break;
+            }
+            case 40: {
+              int rawValue = input.readEnum();
+              org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CellType value = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CellType.valueOf(rawValue);
+              if (value == null) {
+                unknownFields.mergeVarintField(5, rawValue);
+              } else {
+                bitField0_ |= 0x00000010;
+                cellType_ = value;
+              }
+              break;
+            }
+            case 50: {
+              bitField0_ |= 0x00000020;
+              value_ = input.readBytes();
+              break;
+            }
+          }
+        }
+      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+        throw e.setUnfinishedMessage(this);
+      } catch (java.io.IOException e) {
+        throw new com.google.protobuf.InvalidProtocolBufferException(
+            e.getMessage()).setUnfinishedMessage(this);
+      } finally {
+        this.unknownFields = unknownFields.build();
+        makeExtensionsImmutable();
+      }
+    }
     public static final com.google.protobuf.Descriptors.Descriptor
         getDescriptor() {
       return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_Cell_descriptor;
     }
-    
+
     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
         internalGetFieldAccessorTable() {
-      return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_Cell_fieldAccessorTable;
+      return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_Cell_fieldAccessorTable
+          .ensureFieldAccessorsInitialized(
+              org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell.Builder.class);
+    }
+
+    public static com.google.protobuf.Parser<Cell> PARSER =
+        new com.google.protobuf.AbstractParser<Cell>() {
+      public Cell parsePartialFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        return new Cell(input, extensionRegistry);
+      }
+    };
+
+    @java.lang.Override
+    public com.google.protobuf.Parser<Cell> getParserForType() {
+      return PARSER;
     }
-    
+
     private int bitField0_;
     // optional bytes row = 1;
     public static final int ROW_FIELD_NUMBER = 1;
     private com.google.protobuf.ByteString row_;
+    /**
+     * <code>optional bytes row = 1;</code>
+     */
     public boolean hasRow() {
       return ((bitField0_ & 0x00000001) == 0x00000001);
     }
+    /**
+     * <code>optional bytes row = 1;</code>
+     */
     public com.google.protobuf.ByteString getRow() {
       return row_;
     }
-    
+
     // optional bytes family = 2;
     public static final int FAMILY_FIELD_NUMBER = 2;
     private com.google.protobuf.ByteString family_;
+    /**
+     * <code>optional bytes family = 2;</code>
+     */
     public boolean hasFamily() {
       return ((bitField0_ & 0x00000002) == 0x00000002);
     }
+    /**
+     * <code>optional bytes family = 2;</code>
+     */
     public com.google.protobuf.ByteString getFamily() {
       return family_;
     }
-    
+
     // optional bytes qualifier = 3;
     public static final int QUALIFIER_FIELD_NUMBER = 3;
     private com.google.protobuf.ByteString qualifier_;
+    /**
+     * <code>optional bytes qualifier = 3;</code>
+     */
     public boolean hasQualifier() {
       return ((bitField0_ & 0x00000004) == 0x00000004);
     }
+    /**
+     * <code>optional bytes qualifier = 3;</code>
+     */
     public com.google.protobuf.ByteString getQualifier() {
       return qualifier_;
     }
-    
+
     // optional uint64 timestamp = 4;
     public static final int TIMESTAMP_FIELD_NUMBER = 4;
     private long timestamp_;
+    /**
+     * <code>optional uint64 timestamp = 4;</code>
+     */
     public boolean hasTimestamp() {
       return ((bitField0_ & 0x00000008) == 0x00000008);
     }
+    /**
+     * <code>optional uint64 timestamp = 4;</code>
+     */
     public long getTimestamp() {
       return timestamp_;
     }
-    
+
     // optional .CellType cellType = 5;
     public static final int CELLTYPE_FIELD_NUMBER = 5;
     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CellType cellType_;
+    /**
+     * <code>optional .CellType cellType = 5;</code>
+     */
     public boolean hasCellType() {
       return ((bitField0_ & 0x00000010) == 0x00000010);
     }
+    /**
+     * <code>optional .CellType cellType = 5;</code>
+     */
     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CellType getCellType() {
       return cellType_;
     }
-    
+
     // optional bytes value = 6;
     public static final int VALUE_FIELD_NUMBER = 6;
     private com.google.protobuf.ByteString value_;
+    /**
+     * <code>optional bytes value = 6;</code>
+     */
     public boolean hasValue() {
       return ((bitField0_ & 0x00000020) == 0x00000020);
     }
+    /**
+     * <code>optional bytes value = 6;</code>
+     */
     public com.google.protobuf.ByteString getValue() {
       return value_;
     }
-    
+
     private void initFields() {
       row_ = com.google.protobuf.ByteString.EMPTY;
       family_ = com.google.protobuf.ByteString.EMPTY;
@@ -301,11 +573,11 @@ public final class HBaseProtos {
     public final boolean isInitialized() {
       byte isInitialized = memoizedIsInitialized;
       if (isInitialized != -1) return isInitialized == 1;
-      
+
       memoizedIsInitialized = 1;
       return true;
     }
-    
+
     public void writeTo(com.google.protobuf.CodedOutputStream output)
                         throws java.io.IOException {
       getSerializedSize();
@@ -329,12 +601,12 @@ public final class HBaseProtos {
       }
       getUnknownFields().writeTo(output);
     }
-    
+
     private int memoizedSerializedSize = -1;
     public int getSerializedSize() {
       int size = memoizedSerializedSize;
       if (size != -1) return size;
-    
+
       size = 0;
       if (((bitField0_ & 0x00000001) == 0x00000001)) {
         size += com.google.protobuf.CodedOutputStream
@@ -364,14 +636,14 @@ public final class HBaseProtos {
       memoizedSerializedSize = size;
       return size;
     }
-    
+
     private static final long serialVersionUID = 0L;
     @java.lang.Override
     protected java.lang.Object writeReplace()
         throws java.io.ObjectStreamException {
       return super.writeReplace();
     }
-    
+
     @java.lang.Override
     public boolean equals(final java.lang.Object obj) {
       if (obj == this) {
@@ -381,7 +653,7 @@ public final class HBaseProtos {
         return super.equals(obj);
       }
       org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell) obj;
-      
+
       boolean result = true;
       result = result && (hasRow() == other.hasRow());
       if (hasRow()) {
@@ -417,9 +689,13 @@ public final class HBaseProtos {
           getUnknownFields().equals(other.getUnknownFields());
       return result;
     }
-    
+
+    private int memoizedHashCode = 0;
     @java.lang.Override
     public int hashCode() {
+      if (memoizedHashCode != 0) {
+        return memoizedHashCode;
+      }
       int hash = 41;
       hash = (19 * hash) + getDescriptorForType().hashCode();
       if (hasRow()) {
@@ -447,89 +723,84 @@ public final class HBaseProtos {
         hash = (53 * hash) + getValue().hashCode();
       }
       hash = (29 * hash) + getUnknownFields().hashCode();
+      memoizedHashCode = hash;
       return hash;
     }
-    
+
     public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell parseFrom(
         com.google.protobuf.ByteString data)
         throws com.google.protobuf.InvalidProtocolBufferException {
-      return newBuilder().mergeFrom(data).buildParsed();
+      return PARSER.parseFrom(data);
     }
     public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell parseFrom(
         com.google.protobuf.ByteString data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
-      return newBuilder().mergeFrom(data, extensionRegistry)
-               .buildParsed();
+      return PARSER.parseFrom(data, extensionRegistry);
     }
     public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell parseFrom(byte[] data)
         throws com.google.protobuf.InvalidProtocolBufferException {
-      return newBuilder().mergeFrom(data).buildParsed();
+      return PARSER.parseFrom(data);
     }
     public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell parseFrom(
         byte[] data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
-      return newBuilder().mergeFrom(data, extensionRegistry)
-               .buildParsed();
+      return PARSER.parseFrom(data, extensionRegistry);
     }
     public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell parseFrom(java.io.InputStream input)
         throws java.io.IOException {
-      return newBuilder().mergeFrom(input).buildParsed();
+      return PARSER.parseFrom(input);
     }
     public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell parseFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
-      return newBuilder().mergeFrom(input, extensionRegistry)
-               .buildParsed();
+      return PARSER.parseFrom(input, extensionRegistry);
     }
     public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell parseDelimitedFrom(java.io.InputStream input)
         throws java.io.IOException {
-      Builder builder = newBuilder();
-      if (builder.mergeDelimitedFrom(input)) {
-        return builder.buildParsed();
-      } else {
-        return null;
-      }
+      return PARSER.parseDelimitedFrom(input);
     }
     public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell parseDelimitedFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
-      Builder builder = newBuilder();
-      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
-        return builder.buildParsed();
-      } else {
-        return null;
-      }
+      return PARSER.parseDelimitedFrom(input, extensionRegistry);
     }
     public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell parseFrom(
         com.google.protobuf.CodedInputStream input)
         throws java.io.IOException {
-      return newBuilder().mergeFrom(input).buildParsed();
+      return PARSER.parseFrom(input);
     }
     public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell parseFrom(
         com.google.protobuf.CodedInputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
-      return newBuilder().mergeFrom(input, extensionRegistry)
-               .buildParsed();
+      return PARSER.parseFrom(input, extensionRegistry);
     }
-    
+
     public static Builder newBuilder() { return Builder.create(); }
     public Builder newBuilderForType() { return newBuilder(); }
     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell prototype) {
       return newBuilder().mergeFrom(prototype);
     }
     public Builder toBuilder() { return newBuilder(this); }
-    
+
     @java.lang.Override
     protected Builder newBuilderForType(
         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
       Builder builder = new Builder(parent);
       return builder;
     }
+    /**
+     * Protobuf type {@code Cell}
+     *
+     * <pre>
+     **
+     * Protocol buffer version of Cell.
+     * </pre>
+     */
     public static final class Builder extends
         com.google.protobuf.GeneratedMessage.Builder<Builder>
        implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CellOrBuilder {
@@ -537,18 +808,21 @@ public final class HBaseProtos {
           getDescriptor() {
         return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_Cell_descriptor;
       }
-      
+
       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
           internalGetFieldAccessorTable() {
-        return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_Cell_fieldAccessorTable;
+        return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_Cell_fieldAccessorTable
+            .ensureFieldAccessorsInitialized(
+                org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell.Builder.class);
       }
-      
+
       // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell.newBuilder()
       private Builder() {
         maybeForceBuilderInitialization();
       }
-      
-      private Builder(BuilderParent parent) {
+
+      private Builder(
+          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
         super(parent);
         maybeForceBuilderInitialization();
       }
@@ -559,7 +833,7 @@ public final class HBaseProtos {
       private static Builder create() {
         return new Builder();
       }
-      
+
       public Builder clear() {
         super.clear();
         row_ = com.google.protobuf.ByteString.EMPTY;
@@ -576,20 +850,20 @@ public final class HBaseProtos {
         bitField0_ = (bitField0_ & ~0x00000020);
         return this;
       }
-      
+
       public Builder clone() {
         return create().mergeFrom(buildPartial());
       }
-      
+
       public com.google.protobuf.Descriptors.Descriptor
           getDescriptorForType() {
-        return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell.getDescriptor();
+        return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_Cell_descriptor;
       }
-      
+
       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell getDefaultInstanceForType() {
         return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell.getDefaultInstance();
       }
-      
+
       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell build() {
         org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell result = buildPartial();
         if (!result.isInitialized()) {
@@ -597,17 +871,7 @@ public final class HBaseProtos {
         }
         return result;
       }
-      
-      private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell buildParsed()
-          throws com.google.protobuf.InvalidProtocolBufferException {
-        org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell result = buildPartial();
-        if (!result.isInitialized()) {
-          throw newUninitializedMessageException(
-            result).asInvalidProtocolBufferException();
-        }
-        return result;
-      }
-      
+
       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell buildPartial() {
         org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell(this);
         int from_bitField0_ = bitField0_;
@@ -640,7 +904,7 @@ public final class HBaseProtos {
         onBuilt();
         return result;
       }
-      
+
       public Builder mergeFrom(com.google.protobuf.Message other) {
         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell) {
           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell)other);
@@ -649,7 +913,7 @@ public final class HBaseProtos {
           return this;
         }
       }
-      
+
       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell other) {
         if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell.getDefaultInstance()) return this;
         if (other.hasRow()) {
@@ -673,84 +937,47 @@ public final class HBaseProtos {
         this.mergeUnknownFields(other.getUnknownFields());
         return this;
       }
-      
+
       public final boolean isInitialized() {
         return true;
       }
-      
+
       public Builder mergeFrom(
           com.google.protobuf.CodedInputStream input,
           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
           throws java.io.IOException {
-        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
-          com.google.protobuf.UnknownFieldSet.newBuilder(
-            this.getUnknownFields());
-        while (true) {
-          int tag = input.readTag();
-          switch (tag) {
-            case 0:
-              this.setUnknownFields(unknownFields.build());
-              onChanged();
-              return this;
-            default: {
-              if (!parseUnknownField(input, unknownFields,
-                                     extensionRegistry, tag)) {
-                this.setUnknownFields(unknownFields.build());
-                onChanged();
-                return this;
-              }
-              break;
-            }
-            case 10: {
-              bitField0_ |= 0x00000001;
-              row_ = input.readBytes();
-              break;
-            }
-            case 18: {
-              bitField0_ |= 0x00000002;
-              family_ = input.readBytes();
-              break;
-            }
-            case 26: {
-              bitField0_ |= 0x00000004;
-              qualifier_ = input.readBytes();
-              break;
-            }
-            case 32: {
-              bitField0_ |= 0x00000008;
-              timestamp_ = input.readUInt64();
-              break;
-            }
-            case 40: {
-              int rawValue = input.readEnum();
-              org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CellType value = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CellType.valueOf(rawValue);
-              if (value == null) {
-                unknownFields.mergeVarintField(5, rawValue);
-              } else {
-                bitField0_ |= 0x00000010;
-                cellType_ = value;
-              }
-              break;
-            }
-            case 50: {
-              bitField0_ |= 0x00000020;
-              value_ = input.readBytes();
-              break;
-            }
+        org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell parsedMessage = null;
+        try {
+          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+          parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell) e.getUnfinishedMessage();
+          throw e;
+        } finally {
+          if (parsedMessage != null) {
+            mergeFrom(parsedMessage);
           }
         }
+        return this;
       }
-      
       private int bitField0_;
-      
+
       // optional bytes row = 1;
       private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY;
+      /**
+       * <code>optional bytes row = 1;</code>
+       */
       public boolean hasRow() {
         return ((bitField0_ & 0x00000001) == 0x00000001);
       }
+      /**
+       * <code>optional bytes row = 1;</code>
+       */
       public com.google.protobuf.ByteString getRow() {
         return row_;
       }
+      /**
+       * <code>optional bytes row = 1;</code>
+       */
       public Builder setRow(com.google.protobuf.ByteString value) {
         if (value == null) {
     throw new NullPointerException();
@@ -760,21 +987,33 @@ public final class HBaseProtos {
         onChanged();
         return this;
       }
+      /**
+       * <code>optional bytes row = 1;</code>
+       */
       public Builder clearRow() {
         bitField0_ = (bitField0_ & ~0x00000001);
         row_ = getDefaultInstance().getRow();
         onChanged();
         return this;
       }
-      
+
       // optional bytes family = 2;
       private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY;
+      /**
+       * <code>optional bytes family = 2;</code>
+       */
       public boolean hasFamily() {
         return ((bitField0_ & 0x00000002) == 0x00000002);
       }
+      /**
+       * <code>optional bytes family = 2;</code>
+       */
       public com.google.protobuf.ByteString getFamily() {
         return family_;
       }
+      /**
+       * <code>optional bytes family = 2;</code>
+       */
       public Builder setFamily(com.google.protobuf.ByteString value) {
         if (value == null) {
     throw new NullPointerException();
@@ -784,21 +1023,33 @@ public final class HBaseProtos {
         onChanged();
         return this;
       }
+      /**
+       * <code>optional bytes family = 2;</code>
+       */
       public Builder clearFamily() {
         bitField0_ = (bitField0_ & ~0x00000002);
         family_ = getDefaultInstance().getFamily();
         onChanged();
         return this;
       }
-      
+
       // optional bytes qualifier = 3;
       private com.google.protobuf.ByteString qualifier_ = com.google.protobuf.ByteString.EMPTY;
+      /**
+       * <code>optional bytes qualifier = 3;</code>
+       */
       public boolean hasQualifier() {
         return ((bitField0_ & 0x00000004) == 0x00000004);
       }
+      /**
+       * <code>optional bytes qualifier = 3;</code>
+       */
       public com.google.protobuf.ByteString getQualifier() {
         return qualifier_;
       }
+      /**
+       * <code>optional bytes qualifier = 3;</code>
+       */
       public Builder setQualifier(com.google.protobuf.ByteString value) {
         if (value == null) {
     throw new NullPointerException();
@@ -808,42 +1059,66 @@ public final class HBaseProtos {
         onChanged();
         return this;
       }
+      /**
+       * <code>optional bytes qualifier = 3;</code>
+       */
       public Builder clearQualifier() {
         bitField0_ = (bitField0_ & ~0x00000004);
         qualifier_ = getDefaultInstance().getQualifier();
         onChanged();
         return this;
       }
-      
+
       // optional uint64 timestamp = 4;
       private long timestamp_ ;
+      /**
+       * <code>optional uint64 timestamp = 4;</code>
+       */
       public boolean hasTimestamp() {
         return ((bitField0_ & 0x00000008) == 0x00000008);
       }
+      /**
+       * <code>optional uint64 timestamp = 4;</code>
+       */
       public long getTimestamp() {
         return timestamp_;
       }
+      /**
+       * <code>optional uint64 timestamp = 4;</code>
+       */
       public Builder setTimestamp(long value) {
         bitField0_ |= 0x00000008;
         timestamp_ = value;
         onChanged();
         return this;
       }
+      /**
+       * <code>optional uint64 timestamp = 4;</code>
+       */
       public Builder clearTimestamp() {
         bitField0_ = (bitField0_ & ~0x00000008);
         timestamp_ = 0L;
         onChanged();
         return this;
       }
-      
+
       // optional .CellType cellType = 5;
       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CellType cellType_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CellType.MINIMUM;
+      /**
+       * <code>optional .CellType cellType = 5;</code>
+       */
       public boolean hasCellType() {
         return ((bitField0_ & 0x00000010) == 0x00000010);
       }
+      /**
+       * <code>optional .CellType cellType = 5;</code>
+       */
       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CellType getCellType() {
         return cellType_;
       }
+      /**
+       * <code>optional .CellType cellType = 5;</code>
+       */
       public Builder setCellType(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CellType value) {
         if (value == null) {
           throw new NullPointerException();
@@ -853,21 +1128,33 @@ public final class HBaseProtos {
         onChanged();
         return this;
       }
+      /**
+       * <code>optional .CellType cellType = 5;</code>
+       */
       public Builder clearCellType() {
         bitField0_ = (bitField0_ & ~0x00000010);
         cellType_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CellType.MINIMUM;
         onChanged();
         return this;
       }
-      
+
       // optional bytes value = 6;
       private com.google.protobuf.ByteString value_ = com.google.protobuf.ByteString.EMPTY;
+      /**
+       * <code>optional bytes value = 6;</code>
+       */
       public boolean hasValue() {
         return ((bitField0_ & 0x00000020) == 0x00000020);
       }
+      /**
+       * <code>optional bytes value = 6;</code>
+       */
       public com.google.protobuf.ByteString getValue() {
         return value_;
       }
+      /**
+       * <code>optional bytes value = 6;</code>
+       */
       public Builder setValue(com.google.protobuf.ByteString value) {
         if (value == null) {
     throw new NullPointerException();
@@ -877,163 +1164,374 @@ public final class HBaseProtos {
         onChanged();
         return this;
       }
+      /**
+       * <code>optional bytes value = 6;</code>
+       */
       public Builder clearValue() {
         bitField0_ = (bitField0_ & ~0x00000020);
         value_ = getDefaultInstance().getValue();
         onChanged();
         return this;
       }
-      
+
       // @@protoc_insertion_point(builder_scope:Cell)
     }
-    
+
     static {
       defaultInstance = new Cell(true);
       defaultInstance.initFields();
     }
-    
+
     // @@protoc_insertion_point(class_scope:Cell)
   }
-  
+
   public interface TableSchemaOrBuilder
       extends com.google.protobuf.MessageOrBuilder {
-    
+
     // optional bytes name = 1;
+    /**
+     * <code>optional bytes name = 1;</code>
+     */
     boolean hasName();
+    /**
+     * <code>optional bytes name = 1;</code>
+     */
     com.google.protobuf.ByteString getName();
-    
+
     // repeated .BytesBytesPair attributes = 2;
+    /**
+     * <code>repeated .BytesBytesPair attributes = 2;</code>
+     */
     java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> 
         getAttributesList();
+    /**
+     * <code>repeated .BytesBytesPair attributes = 2;</code>
+     */
     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair getAttributes(int index);
+    /**
+     * <code>repeated .BytesBytesPair attributes = 2;</code>
+     */
     int getAttributesCount();
+    /**
+     * <code>repeated .BytesBytesPair attributes = 2;</code>
+     */
     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> 
         getAttributesOrBuilderList();
+    /**
+     * <code>repeated .BytesBytesPair attributes = 2;</code>
+     */
     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getAttributesOrBuilder(
         int index);
-    
+
     // repeated .ColumnFamilySchema columnFamilies = 3;
+    /**
+     * <code>repeated .ColumnFamilySchema columnFamilies = 3;</code>
+     */
     java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema> 
         getColumnFamiliesList();
+    /**
+     * <code>repeated .ColumnFamilySchema columnFamilies = 3;</code>
+     */
     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies(int index);
+    /**
+     * <code>repeated .ColumnFamilySchema columnFamilies = 3;</code>
+     */
     int getColumnFamiliesCount();
+    /**
+     * <code>repeated .ColumnFamilySchema columnFamilies = 3;</code>
+     */
     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> 
         getColumnFamiliesOrBuilderList();
+    /**
+     * <code>repeated .ColumnFamilySchema columnFamilies = 3;</code>
+     */
     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder(
         int index);
-    
+
     // repeated .NameStringPair configuration = 4;
+    /**
+     * <code>repeated .NameStringPair configuration = 4;</code>
+     */
     java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> 
         getConfigurationList();
+    /**
+     * <code>repeated .NameStringPair configuration = 4;</code>
+     */
     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getConfiguration(int index);
+    /**
+     * <code>repeated .NameStringPair configuration = 4;</code>
+     */
     int getConfigurationCount();
+    /**
+     * <code>repeated .NameStringPair configuration = 4;</code>
+     */
     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> 
         getConfigurationOrBuilderList();
+    /**
+     * <code>repeated .NameStringPair configuration = 4;</code>
+     */
     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getConfigurationOrBuilder(
         int index);
   }
+  /**
+   * Protobuf type {@code TableSchema}
+   *
+   * <pre>
+   **
+   * Table Schema
+   * Inspired by the rest TableSchema
+   * </pre>
+   */
   public static final class TableSchema extends
       com.google.protobuf.GeneratedMessage
       implements TableSchemaOrBuilder {
     // Use TableSchema.newBuilder() to construct.
-    private TableSchema(Builder builder) {
+    private TableSchema(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
       super(builder);
+      this.unknownFields = builder.getUnknownFields();
     }
-    private TableSchema(boolean noInit) {}
-    
+    private TableSchema(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
     private static final TableSchema defaultInstance;
     public static TableSchema getDefaultInstance() {
       return defaultInstance;
     }
-    
+
     public TableSchema getDefaultInstanceForType() {
       return defaultInstance;
     }
-    
+
+    private final com.google.protobuf.UnknownFieldSet unknownFields;
+    @java.lang.Override
+    public final com.google.protobuf.UnknownFieldSet
+        getUnknownFields() {
+      return this.unknownFields;
+    }
+    private TableSchema(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      initFields();
+      int mutable_bitField0_ = 0;
+      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          com.google.protobuf.UnknownFieldSet.newBuilder();
+      try {
+        boolean done = false;
+        while (!done) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              done = true;
+              break;
+            default: {
+              if (!parseUnknownField(input, unknownFields,
+                                     extensionRegistry, tag)) {
+                done = true;
+              }
+              break;
+            }
+            case 10: {
+              bitField0_ |= 0x00000001;
+              name_ = input.readBytes();
+              break;
+            }
+            case 18: {
+              if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
+                attributes_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair>();
+                mutable_bitField0_ |= 0x00000002;
+              }
+              attributes_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.PARSER, extensionRegistry));
+              break;
+            }
+            case 26: {
+              if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
+                columnFamilies_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema>();
+                mutable_bitField0_ |= 0x00000004;
+              }
+              columnFamilies_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.PARSER, extensionRegistry));
+              break;
+            }
+            case 34: {
+              if (!((mutable_bitField0_ & 0x00000008) == 0x00000008)) {
+                configuration_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair>();
+                mutable_bitField0_ |= 0x00000008;
+              }
+              configuration_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.PARSER, extensionRegistry));
+              break;
+            }
+          }
+        }
+      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+        throw e.setUnfinishedMessage(this);
+      } catch (java.io.IOException e) {
+        throw new com.google.protobuf.InvalidProtocolBufferException(
+            e.getMessage()).setUnfinishedMessage(this);
+      } finally {
+        if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
+          attributes_ = java.util.Collections.unmodifiableList(attributes_);
+        }
+        if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
+          columnFamilies_ = java.util.Collections.unmodifiableList(columnFamilies_);
+        }
+        if (((mutable_bitField0_ & 0x00000008) == 0x00000008)) {
+          configuration_ = java.util.Collections.unmodifiableList(configuration_);
+        }
+        this.unknownFields = unknownFields.build();
+        makeExtensionsImmutable();
+      }
+    }
     public static final com.google.protobuf.Descriptors.Descriptor
         getDescriptor() {
       return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TableSchema_descriptor;
     }
-    
+
     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
         internalGetFieldAccessorTable() {
-      return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TableSchema_fieldAccessorTable;
+      return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TableSchema_fieldAccessorTable
+          .ensureFieldAccessorsInitialized(
+              org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder.class);
+    }
+
+    public static com.google.protobuf.Parser<TableSchema> PARSER =
+        new com.google.protobuf.AbstractParser<TableSchema>() {
+      public TableSchema parsePartialFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        return new TableSchema(input, extensionRegistry);
+      }
+    };
+
+    @java.lang.Override
+    public com.google.protobuf.Parser<TableSchema> getParserForType() {
+      return PARSER;
     }
-    
+
     private int bitField0_;
     // optional bytes name = 1;
     public static final int NAME_FIELD_NUMBER = 1;
     private com.google.protobuf.ByteString name_;
+    /**
+     * <code>optional bytes name = 1;</code>
+     */
     public boolean hasName() {
       return ((bitField0_ & 0x00000001) == 0x00000001);
     }
+    /**
+     * <code>optional bytes name = 1;</code>
+     */
     public com.google.protobuf.ByteString getName() {
       return name_;
     }
-    
+
     // repeated .BytesBytesPair attributes = 2;
     public static final int ATTRIBUTES_FIELD_NUMBER = 2;
     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> attributes_;
+    /**
+     * <code>repeated .BytesBytesPair attributes = 2;</code>
+     */
     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> getAttributesList() {
       return attributes_;
     }
+    /**
+     * <code>repeated .BytesBytesPair attributes = 2;</code>
+     */
     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> 
         getAttributesOrBuilderList() {
       return attributes_;
     }
+    /**
+     * <code>repeated .BytesBytesPair attributes = 2;</code>
+     */
     public int getAttributesCount() {
       return attributes_.size();
     }
+    /**
+     * <code>repeated .BytesBytesPair attributes = 2;</code>
+     */
     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair getAttributes(int index) {
       return attributes_.get(index);
     }
+    /**
+     * <code>repeated .BytesBytesPair attributes = 2;</code>
+     */
     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getAttributesOrBuilder(
         int index) {
       return attributes_.get(index);
     }
-    
+
     // repeated .ColumnFamilySchema columnFamilies = 3;
     public static final int COLUMNFAMILIES_FIELD_NUMBER = 3;
     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema> columnFamilies_;
+    /**
+     * <code>repeated .ColumnFamilySchema columnFamilies = 3;</code>
+     */
     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema> getColumnFamiliesList() {
       return columnFamilies_;
     }
+    /**
+     * <code>repeated .ColumnFamilySchema columnFamilies = 3;</code>
+     */
     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> 
         getColumnFamiliesOrBuilderList() {
       return columnFamilies_;
     }
+    /**
+     * <code>repeated .ColumnFamilySchema columnFamilies = 3;</code>
+     */
     public int getColumnFamiliesCount() {
       return columnFamilies_.size();
     }
+    /**
+     * <code>repeated .ColumnFamilySchema columnFamilies = 3;</code>
+     */
     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies(int index) {
       return columnFamilies_.get(index);
     }
+    /**
+     * <code>repeated .ColumnFamilySchema columnFamilies = 3;</code>
+     */
     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder(
         int index) {
       return columnFamilies_.get(index);
     }
-    
+
     // repeated .NameStringPair configuration = 4;
     public static final int CONFIGURATION_FIELD_NUMBER = 4;
     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> configuration_;
+    /**
+     * <code>repeated .NameStringPair configuration = 4;</code>
+     */
     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> getConfigurationList() {
       return configuration_;
     }
+    /**
+     * <code>repeated .NameStringPair configuration = 4;</code>
+     */
     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> 
         getConfigurationOrBuilderList() {
       return configuration_;
     }
+    /**
+     * <code>repeated .NameStringPair configuration = 4;</code>
+     */
     public int getConfigurationCount() {
       return configuration_.size();
     }
+    /**
+     * <code>repeated .NameStringPair configuration = 4;</code>
+     */
     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getConfiguration(int index) {
       return configuration_.get(index);
     }
+    /**
+     * <code>repeated .NameStringPair configuration = 4;</code>
+     */
     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getConfigurationOrBuilder(
         int index) {
       return configuration_.get(index);
     }
-    
+
     private void initFields() {
       name_ = com.google.protobuf.ByteString.EMPTY;
       attributes_ = java.util.Collections.emptyList();
@@ -1044,7 +1542,7 @@ public final class HBaseProtos {
     public final boolean isInitialized() {
       byte isInitialized = memoizedIsInitialized;
       if (isInitialized != -1) return isInitialized == 1;
-      
+
       for (int i = 0; i < getAttributesCount(); i++) {
         if (!getAttributes(i).isInitialized()) {
           memoizedIsInitialized = 0;
@@ -1066,7 +1564,7 @@ public final class HBaseProtos {
       memoizedIsInitialized = 1;
       return true;
     }
-    
+
     public void writeTo(com.google.protobuf.CodedOutputStream output)
                         throws java.io.IOException {
       getSerializedSize();
@@ -1084,12 +1582,12 @@ public final class HBaseProtos {
       }
       getUnknownFields().writeTo(output);
     }
-    
+
     private int memoizedSerializedSize = -1;
     public int getSerializedSize() {
       int size = memoizedSerializedSize;
       if (size != -1) return size;
-    
+
       size = 0;
       if (((bitField0_ & 0x00000001) == 0x00000001)) {
         size += com.google.protobuf.CodedOutputStream
@@ -1111,14 +1609,14 @@ public final class HBaseProtos {
       memoizedSerializedSize = size;
       return size;
     }
-    
+
     private static final long serialVersionUID = 0L;
     @java.lang.Override
     protected java.lang.Object writeReplace()
         throws java.io.ObjectStreamException {
       return super.writeReplace();
     }
-    
+
     @java.lang.Override
     public boolean equals(final java.lang.Object obj) {
       if (obj == this) {
@@ -1128,7 +1626,7 @@ public final class HBaseProtos {
         return super.equals(obj);
       }
       org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema) obj;
-      
+
       boolean result = true;
       result = result && (hasName() == other.hasName());
       if (hasName()) {
@@ -1145,9 +1643,13 @@ public final class HBaseProtos {
           getUnknownFields().equals(other.getUnknownFields());
       return result;
     }
-    
+
+    private int memoizedHashCode = 0;
     @java.lang.Override
     public int hashCode() {
+      if (memoizedHashCode != 0) {
+        return memoizedHashCode;
+      }
       int hash = 41;
       hash = (19 * hash) + getDescriptorForType().hashCode();
       if (hasName()) {
@@ -1167,89 +1669,85 @@ public final class HBaseProtos {
         hash = (53 * hash) + getConfigurationList().hashCode();
       }
       hash = (29 * hash) + getUnknownFields().hashCode();
+      memoizedHashCode = hash;
       return hash;
     }
-    
+
     public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom(
         com.google.protobuf.ByteString data)
         throws com.google.protobuf.InvalidProtocolBufferException {
-      return newBuilder().mergeFrom(data).buildParsed();
+      return PARSER.parseFrom(data);
     }
     public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom(
         com.google.protobuf.ByteString data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
-      return newBuilder().mergeFrom(data, extensionRegistry)
-               .buildParsed();
+      return PARSER.parseFrom(data, extensionRegistry);
     }
     public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom(byte[] data)
         throws com.google.protobuf.InvalidProtocolBufferException {
-      return newBuilder().mergeFrom(data).buildParsed();
+      return PARSER.parseFrom(data);
     }
     public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom(
         byte[] data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
-      return newBuilder().mergeFrom(data, extensionRegistry)
-               .buildParsed();
+      return PARSER.parseFrom(data, extensionRegistry);
     }
     public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom(java.io.InputStream input)
         throws java.io.IOException {
-      return newBuilder().mergeFrom(input).buildParsed();
+      return PARSER.parseFrom(input);
     }
     public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
-      return newBuilder().mergeFrom(input, extensionRegistry)
-               .buildParsed();
+      return PARSER.parseFrom(input, extensionRegistry);
     }
     public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseDelimitedFrom(java.io.InputStream input)
         throws java.io.IOException {
-      Builder builder = newBuilder();
-      if (builder.mergeDelimitedFrom(input)) {
-        return builder.buildParsed();
-      } else {
-        return null;
-      }
+      return PARSER.parseDelimitedFrom(input);
     }
     public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseDelimitedFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
-      Builder builder = newBuilder();
-      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
-        return builder.buildParsed();
-      } else {
-        return null;
-      }
+      return PARSER.parseDelimitedFrom(input, extensionRegistry);
     }
     public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom(
         com.google.protobuf.CodedInputStream input)
         throws java.io.IOException {
-      return newBuilder().mergeFrom(input).buildParsed();
+      return PARSER.parseFrom(input);
     }
     public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom(
         com.google.protobuf.CodedInputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
-      return newBuilder().mergeFrom(input, extensionRegistry)
-               .buildParsed();
+      return PARSER.parseFrom(input, extensionRegistry);
     }
-    
+
     public static Builder newBuilder() { return Builder.create(); }
     public Builder newBuilderForType() { return newBuilder(); }
     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema prototype) {
       return newBuilder().mergeFrom(prototype);
     }
     public Builder toBuilder() { return newBuilder(this); }
-    
+
     @java.lang.Override
     protected Builder newBuilderForType(
         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
       Builder builder = new Builder(parent);
       return builder;
     }
+    /**
+     * Protobuf type {@code TableSchema}
+     *
+     * <pre>
+     **
+     * Table Schema
+     * Inspired by the rest TableSchema
+     * </pre>
+     */
     public static final class Builder extends
         com.google.protobuf.GeneratedMessage.Builder<Builder>
        implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder {
@@ -1257,18 +1755,21 @@ public final class HBaseProtos {
           getDescriptor() {
         return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TableSchema_descriptor;
       }
-      
+
       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
           internalGetFieldAccessorTable() {
-        return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TableSchema_fieldAccessorTable;
+        return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TableSchema_fieldAccessorTable
+            .ensureFieldAccessorsInitialized(
+                org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder.class);
       }
-      
+
       // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.newBuilder()
       private Builder() {
         maybeForceBuilderInitialization();
       }
-      
-      private Builder(BuilderParent parent) {
+
+      private Builder(
+          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
         super(parent);
         maybeForceBuilderInitialization();
       }
@@ -1282,7 +1783,7 @@ public final class HBaseProtos {
       private static Builder create() {
         return new Builder();
       }
-      
+
       public Builder clear() {
         super.clear();
         name_ = com.google.protobuf.ByteString.EMPTY;
@@ -1307,20 +1808,20 @@ public final class HBaseProtos {
         }
         return this;
       }
-      
+
       public Builder clone() {
         return create().mergeFrom(buildPartial());
       }
-      
+
       public com.google.protobuf.Descriptors.Descriptor
           getDescriptorForType() {
-        return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDescriptor();
+        return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TableSchema_descriptor;
       }
-      
+
       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getDefaultInstanceForType() {
         return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance();
       }
-      
+
       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema build() {
         org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema result = buildPartial();
         if (!result.isInitialized()) {
@@ -1328,17 +1829,7 @@ public final class HBaseProtos {
         }
         return result;
       }
-      
-      private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema buildParsed()
-          throws com.google.protobuf.InvalidProtocolBufferException {
-        org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema result = buildPartial();
-        if (!result.isInitialized()) {
-          throw newUninitializedMessageException(
-            result).asInvalidProtocolBufferException();
-        }
-        return result;
-      }
-      
+
       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema buildPartial() {
         org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema(this);
         int from_bitField0_ = bitField0_;
@@ -1378,7 +1869,7 @@ public final class HBaseProtos {
         onBuilt();
         return result;
       }
-      
+
       public Builder mergeFrom(com.google.protobuf.Message other) {
         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema) {
           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema)other);
@@ -1387,7 +1878,7 @@ public final class HBaseProtos {
           return this;
         }
       }
-      
+
       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema other) {
         if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance()) return this;
         if (other.hasName()) {
@@ -1474,7 +1965,7 @@ public final class HBaseProtos {
         this.mergeUnknownFields(other.getUnknownFields());
         return this;
       }
-      
+
       public final boolean isInitialized() {
         for (int i = 0; i < getAttributesCount(); i++) {
           if (!getAttributes(i).isInitialized()) {
@@ -1496,67 +1987,43 @@ public final class HBaseProtos {
         }
         return true;
       }
-      
+
       public Builder mergeFrom(
           com.google.protobuf.CodedInputStream input,
           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
           throws java.io.IOException {
-        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
-          com.google.protobuf.UnknownFieldSet.newBuilder(
-            this.getUnknownFields());
-        while (true) {
-          int tag = input.readTag();
-          switch (tag) {
-            case 0:
-              this.setUnknownFields(unknownFields.build());
-              onChanged();
-              return this;
-            default: {
-              if (!parseUnknownField(input, unknownFields,
-                                     extensionRegistry, tag)) {
-                this.setUnknownFields(unknownFields.build());
-                onChanged();
-                return this;
-              }
-              break;
-            }
-            case 10: {
-              bitField0_ |= 0x00000001;
-              name_ = input.readBytes();
-              break;
-            }
-            case 18: {
-              org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.newBuilder();
-              input.readMessage(subBuilder, extensionRegistry);
-              addAttributes(subBuilder.buildPartial());
-              break;
-            }
-            case 26: {
-              org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.newBuilder();
-              input.readMessage(subBuilder, extensionRegistry);
-              addColumnFamilies(subBuilder.buildPartial());
-              break;
-            }
-            case 34: {
-              org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.newBuilder();
-              input.readMessage(subBuilder, extensionRegistry);
-              addConfiguration(subBuilder.buildPartial());
-              break;
-            }
+        org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parsedMessage = null;
+        try {
+          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+          parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema) e.getUnfinishedMessage();
+          throw e;
+        } finally {
+          if (parsedMessage != null) {
+            mergeFrom(parsedMessage);
           }
         }
+        return this;
       }
-      
       private int bitField0_;
-      
+
       // optional bytes name = 1;
       private com.google.protobuf.ByteString name_ = com.google.protobuf.ByteString.EMPTY;
+      /**
+       * <code>optional bytes name = 1;</code>
+       */
       public boolean hasName() {
         return ((bitField0_ & 0x00000001) == 0x00000001);
       }
+      /**
+       * <code>optional bytes name = 1;</code>
+       */
       public com.google.protobuf.ByteString getName() {
         return name_;
       }
+      /**
+       * <code>optional bytes name = 1;</code>
+       */
       public Builder setName(com.google.protobuf.ByteString value) {
         if (value == null) {
     throw new NullPointerException();
@@ -1566,13 +2033,16 @@ public final class HBaseProtos {
         onChanged();
         return this;
       }
+      /**
+       * <code>optional bytes name = 1;</code>
+       */
       public Builder clearName() {
         bitField0_ = (bitField0_ & ~0x00000001);
         name_ = getDefaultInstance().getName();
         onChanged();
         return this;
       }
-      
+
       // repeated .BytesBytesPair attributes = 2;
       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> attributes_ =
         java.util.Collections.emptyList();
@@ -1582,10 +2052,13 @@ public final class HBaseProtos {
           bitField0_ |= 0x00000002;
          }
       }
-      
+
       private com.google.protobuf.RepeatedFieldBuilder<
           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> attributesBuilder_;
-      
+
+      /**
+       * <code>repeated .BytesBytesPair attributes = 2;</code>
+       */
       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> getAttributesList() {
         if (attributesBuilder_ == null) {
           return java.util.Collections.unmodifiableList(attributes_);
@@ -1593,6 +2066,9 @@ public final class HBaseProtos {
           return attributesBuilder_.getMessageList();
         }
       }
+      /**
+       * <code>repeated .BytesBytesPair attributes = 2;</code>
+       */
       public int getAttributesCount() {
         if (attributesBuilder_ == null) {
           return attributes_.size();
@@ -1600,6 +2076,9 @@ public final class HBaseProtos {
           return attributesBuilder_.getCount();
         }
       }
+      /**
+       * <code>repeated .BytesBytesPair attributes = 2;</code>
+       */
       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair getAttributes(int index) {
         if (attributesBuilder_ == null) {
           return attributes_.get(index);
@@ -1607,6 +2086,9 @@ public final class HBaseProtos {
           return attributesBuilder_.getMessage(index);
         }
       }
+      /**
+       * <code>repeated .BytesBytesPair attributes = 2;</code>
+       */
       public Builder setAttributes(
           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value) {
         if (attributesBuilder_ == null) {
@@ -1621,6 +2103,9 @@ public final class HBaseProtos {
         }
         return this;
       }
+      /**
+       * <code>repeated .BytesBytesPair attributes = 2;</code>
+       */
       public Builder setAttributes(
           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) {
         if (attributesBuilder_ == null) {
@@ -1632,6 +2117,9 @@ public final class HBaseProtos {
         }
         return this;
       }
+      /**
+       * <code>repeated .BytesBytesPair attributes = 2;</code>
+       */
       public Builder addAttributes(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value) {
         if (attributesBuilder_ == null) {
           if (value == null) {
@@ -1645,6 +2133,9 @@ public final class HBaseProtos {
         }
         return this;
       }
+      /**
+       * <code>repeated .BytesBytesPair attributes = 2;</code>
+       */
       public Builder addAttributes(
           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value) {
         if (attributesBuilder_ == null) {
@@ -1659,6 +2150,9 @@ public final class HBaseProtos {
         }
         return this;
       }
+      /**
+       * <code>repeated .BytesBytesPair attributes = 2;</code>
+       */
       public Builder addAttributes(
           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) {
         if (attributesBuilder_ == null) {
@@ -1670,6 +2164,9 @@ public final class HBaseProtos {
         }
         return this;
       }
+      /**
+       * <code>repeated .BytesBytesPair attributes = 2;</code>
+       */
       public Builder addAttributes(
           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) {
         if (attributesBuilder_ == null) {
@@ -1681,6 +2178,9 @@ public final class HBaseProtos {
         }
         return this;
       }
+      /**
+       * <code>repeated .BytesBytesPair attributes = 2;</code>
+       */
       public Builder addAllAttributes(
           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> values) {
         if (attributesBuilder_ == null) {
@@ -1692,6 +2192,9 @@ public final class HBaseProtos {
         }
         return this;
       }
+      /**
+       * <code>repeated .BytesBytesPair attributes = 2;</code>
+       */
       public Builder clearAttributes() {
         if (attributesBuilder_ == null) {
           attributes_ = java.util.Collections.emptyList();
@@ -1702,6 +2205,9 @@ public final class HBaseProtos {
         }
         return this;
       }
+      /**
+       * <code>repeated .BytesBytesPair attributes = 2;</code>
+       */
       public Builder removeAttributes(int index) {
         if (attributesBuilder_ == null) {
           ensureAttributesIsMutable();
@@ -1712,10 +2218,16 @@ public final class HBaseProtos {
         }
         return this;
       }
+      /**
+       * <code>repeated .BytesBytesPair attributes = 2;</code>
+       */
       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder getAttributesBuilder(
           int index) {
         return getAttributesFieldBuilder().getBuilder(index);
       }
+      /**
+       * <code>repeated .BytesBytesPair attributes = 2;</code>
+       */
       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getAttributesOrBuilder(
           int index) {
         if (attributesBuilder_ == null) {
@@ -1723,6 +2235,9 @@ public final class HBaseProtos {
           return attributesBuilder_.getMessageOrBuilder(index);
         }
       }
+      /**
+       * <code>repeated .BytesBytesPair attributes = 2;</code>
+       */
       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> 
            getAttributesOrBuilderList() {
         if (attributesBuilder_ != null) {
@@ -1731,15 +2246,24 @@ public final class HBaseProtos {
           return java.util.Collections.unmodifiableList(attributes_);
         }
       }
+      /**
+       * <code>repeated .BytesBytesPair attributes = 2;</code>
+       */
       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder addAttributesBuilder() {
         return getAttributesFieldBuilder().addBuilder(
             org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.getDefaultInstance());
       }
+      /**
+       * <code>repeated .BytesBytesPair attributes = 2;</code>
+       */
       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder addAttributesBuilder(
           int index) {
         return getAttributesFieldBuilder().addBuilder(
             index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.getDefaultInstance());
       }
+      /**
+       * <code>repeated .BytesBytesPair attributes = 2;</code>
+       */
       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder> 
            getAttributesBuilderList() {
         return getAttributesFieldBuilder().getBuilderList();
@@ -1758,7 +2282,7 @@ public final class HBaseProtos {
         }
         return attributesBuilder_;
       }
-      
+
       // repeated .ColumnFamilySchema columnFamilies = 3;
       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema> columnFamilies_ =
         java.util.Collections.emptyList();
@@ -1768,10 +2292,13 @@ public final class HBaseProtos {
           bitField0_ |= 0x00000004;
          }
       }
-      
+
       private com.google.protobuf.RepeatedFieldBuilder<
           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> columnFamiliesBuilder_;
-      
+
+      /**
+       * <code>repeated .ColumnFamilySchema columnFamilies = 3;</code>
+       */
       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema> getColumnFamiliesList() {
         if (columnFamiliesBuilder_ == null) {
           return java.util.Collections.unmodifiableList(columnFamilies_);
@@ -1779,6 +2306,9 @@ public final class HBaseProtos {
           return columnFamiliesBuilder_.getMessageList();
         }
       }
+      /**
+       * <code>repeated .ColumnFamilySchema columnFamilies = 3;</code>
+       */
       public int getColumnFamiliesCount() {
         if (columnFamiliesBuilder_ == null) {
           return columnFamilies_.size();
@@ -1786,6 +2316,9 @@ public final class HBaseProtos {
           return columnFamiliesBuilder_.getCount();
         }
       }
+      /**
+       * <code>repeated .ColumnFamilySchema columnFamilies = 3;</code>
+       */
       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies(int index) {
         if (columnFamiliesBuilder_ == null) {
           return columnFamilies_.get(index);
@@ -1793,6 +2326,9 @@ public final class HBaseProtos {
           return columnFamiliesBuilder_.getMessage(index);
         }
       }
+      /**
+       * <code>repeated .ColumnFamilySchema columnFamilies = 3;</code>
+       */
       public Builder setColumnFamilies(
           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema value) {
         if (columnFamiliesBuilder_ == null) {
@@ -1807,6 +2343,9 @@ public final class HBaseProtos {
         }
         return this;
       }
+      /**
+       * <code>repeated .ColumnFamilySchema columnFamilies = 3;</code>
+       */
       public Builder setColumnFamilies(
           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder builderForValue) {
         if (columnFamiliesBuilder_ == null) {
@@ -1818,6 +2357,9 @@ public final class HBaseProtos {
         }
         return this;
       }
+      /**
+       * <code>repeated .ColumnFamilySchema columnFamilies = 3;</code>
+       */
       public Builder addColumnFamilies(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema value) {
         if (columnFamiliesBuilder_ == null) {
           if (value == null) {
@@ -1831,6 +2373,9 @@ public final class HBaseProtos {
         }
         return this;
       }
+      /**
+       * <code>repeated .ColumnFamilySchema columnFamilies = 3;</code>
+       */
       public Builder addColumnFamilies(
           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema value) {
         if (columnFamiliesBuilder_ == null) {
@@ -1845,6 +2390,9 @@ public final class HBaseProtos {
         }
         return this;
       }
+      /**
+       * <code>repeated .ColumnFamilySchema columnFamilies = 3;</code>
+       */
       public Builder addColumnFamilies(
           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder builderForValue) {
         if (columnFamiliesBuilder_ == null) {
@@ -1856,6 +2404,9 @@ public final class HBaseProtos {
         }
         return this;
       }
+      /**
+       * <code>repeated .ColumnFamilySchema columnFamilies = 3;</code>
+       */
       public Builder addColumnFamilies(
           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder builderForValue) {
         if (columnFamiliesBuilder_ == null) {
@@ -1867,6 +2418,9 @@ public final class HBaseProtos {
         }
         return this;
       }
+      /**
+       * <code>repeated .ColumnFamilySchema columnFamilies = 3;</code>
+       */
       public Builder addAllColumnFamilies(
           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema> values) {
         if (columnFamiliesBuilder_ == null) {
@@ -1878,6 +2432,9 @@ public final class HBaseProtos {
         }
         return this;
       }
+      /**
+       * <code>repeated .ColumnFamilySchema columnFamilies = 3;</code>
+       */
       public Builder clearColumnFamilies() {
         if (columnFamiliesBuilder_ == null) {
           columnFamilies_ = java.util.Collections.emptyList();
@@ -1888,6 +2445,9 @@ public final class HBaseProtos {
         }
         return this;
       }
+      /**
+       * <code>repeated .ColumnFamilySchema columnFamilies = 3;</code>
+       */
       public Builder removeColumnFamilies(int index) {
         if (columnFamiliesBuilder_ == null) {
           ensureColumnFamiliesIsMutable();
@@ -1898,10 +2458,16 @@ public final class HBaseProtos {
         }
         return this;
       }
+      /**
+       * <code>repeated .ColumnFamilySchema columnFamilies = 3;</code>
+       */
       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder getColumnFamiliesBuilder(
           int index) {
         return getColumnFamiliesFieldBuilder().getBuilder(index);
       }
+      /**
+       * <code>repeated .ColumnFamilySchema columnFamilies = 3;</code>
+       */
       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder(
           int index) {
         if (columnFamiliesBuilder_ == null) {
@@ -1909,6 +2475,9 @@ public final class HBaseProtos {
           return columnFamiliesBuilder_.getMessageOrBuilder(index);
         }
       }
+      /**
+       * <code>repeated .ColumnFamilySchema columnFamilies = 3;</code>
+       */
       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> 
            getColumnFamiliesOrBuilderList() {
         if (columnFamiliesBuilder_ != null) {
@@ -1917,15 +2486,24 @@ public final class HBaseProtos {
           return java.util.Collections.unmodifiableList(columnFamilies_);
         }
       }
+      /**
+       * <code>repeated .ColumnFamilySchema columnFamilies = 3;</code>
+       */
       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder addColumnFamiliesBuilder() {
         return getColumnFamiliesFieldBuilder().addBuilder(
             org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance());
       }
+      /**
+       * <code>repeated .ColumnFamilySchema columnFamilies = 3;</code>
+       */
       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder addColumnFamiliesBuilder(
           int index) {
         return getColumnFamiliesFieldBuilder().addBuilder(
             index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance());
       }
+      /**
+       * <code>repeated .ColumnFamilySchema columnFamilies = 3;</code>
+       */
       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder> 
            getColumnFamiliesBuilderList() {
         return getColumnFamiliesFieldBuilder().getBuilderList();
@@ -1944,7 +2522,7 @@ public final class HBaseProtos {
         }
         return columnFamiliesBuilder_;
       }
-      
+
       // repeated .NameStringPair configuration = 4;
       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> configuration_ =
         java.util.Collections.emptyList();
@@ -1954,10 +2532,13 @@ public final class HBaseProtos {
           bitField0_ |= 0x00000008;
          }
       }
-      
+
       private com.google.protobuf.RepeatedFieldBuilder<
           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> configurationBuilder_;
-      
+
+      /**
+       * <code>repeated .NameStringPair configuration = 4;</code>
+       */
       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> getConfigurationList() {
         if (configurationBuilder_ == null) {
           return java.util.Collections.unmodifiableList(configuration_);
@@ -1965,6 +2546,9 @@ public final class HBaseProtos {
           return configurationBuilder_.getMessageList();
         }
       }
+      /**
+       * <code>repeated .NameStringPair configuration = 4;</code>
+       */
       public int getConfigurationCount() {
         if (configurationBuilder_ == null) {
           return configuration_.size();
@@ -1972,6 +2556,9 @@ public final class HBaseProtos {
           return configurationBuilder_.getCount();
         }
       }
+      /**
+       * <code>repeated .NameStringPair configuration = 4;</code>
+       */
       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getConfiguration(int index) {
         if (configurationBuilder_ == null) {
           return configuration_.get(index);
@@ -1979,6 +2566,9 @@ public final class HBaseProtos {
           return configurationBuilder_.getMessage(index);
         }
       }
+      /**
+       * <code>repeated .NameStringPair configuration = 4;</code>
+       */
       public Builder setConfiguration(
           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) {
         if (configurationBuilder_ == null) {
@@ -1993,6 +2583,9 @@ public final class HBaseProtos {
         }
         return this;
       }
+      /**
+       * <code>repeated .NameStringPair configuration = 4;</code>
+       */
       public Builder setConfiguration(
           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) {
         if (configurationBuilder_ == null) {
@@ -2004,6 +2597,9 @@ public final class HBaseProtos {
         }
         return this;
       }
+      /**
+       * <code>repeated .NameStringPair configuration = 4;</code>
+       */
       public Builder addConfiguration(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) {
         if (configurationBuilder_ == null) {
           if (value == null) {
@@ -2017,6 +2613,9 @@ public final class HBaseProtos {
         }
         return this;
       }
+      /**
+       * <code>repeated .NameStringPair configuration = 4;</code>
+       */
       public Builder addConfiguration(
           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) {
         if (configurationBuilder_ == null) {
@@ -2031,6 +2630,9 @@ public final class HBaseProtos {
         }
         return this;
       }
+      /**
+       * <code>repeated .NameStringPair configuration = 4;</code>
+       */
       public Builder addConfiguration(
           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) {
         if (configurationBuilder_ == null) {
@@ -2042,6 +2644,9 @@ public final class HBaseProtos {
         }
         return this;
       }
+      /**
+       * <code>repeated .NameStringPair configuration = 4;</code>
+       */
       public Builder addConfiguration(
           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) {
         if (configurationBuilder_ == null) {
@@ -2053,6 +2658,9 @@ public final class HBaseProtos {
         }
         return this;
       }
+      /**
+       * <code>repeated .NameStringPair configuration = 4;</code>
+       */
       public Builder addAllConfiguration(
           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> values) {
         if (configurationBuilder_ == null) {
@@ -2064,6 +2672,9 @@ public final class HBaseProtos {
         }
         return this;
       }
+      /**
+       * <code>repeated .NameStringPair configuration = 4;</code>
+       */
       public Builder clearConfiguration() {
         if (configurationBuilder_ == null) {
           configuration_ = java.util.Collections.emptyList();
@@ -2074,6 +2685,9 @@ public final class HBaseProtos {
         }
         return this;
       }
+      /**
+       * <code>repeated .NameStringPair configuration = 4;</code>
+       */
       public Builder removeConfiguration(int index) {
         if (configurationBuilder_ == null) {
           ensureConfigurationIsMutable();
@@ -2084,10 +2698,16 @@ public final class HBaseProtos {
         }
         return this;
       }
+      /**
+       * <code>repeated .NameStringPair configuration = 4;</code>
+       */
       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder getConfigurationBuilder(
           int index) {
         return getConfigurationFieldBuilder().getBuilder(index);
       }
+      /**
+       * <code>repeated .NameStringPair configuration = 4;</code>
+       */
       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getConfigurationOrBuilder(
           int index) {
         if (configurationBuilder_ == null) {
@@ -2095,6 +2715,9 @@ public final class HBaseProtos {
           return configurationBuilder_.getMessageOrBuilder(index);
         }
       }
+      /**
+       * <code>repeated .NameStringPair configuration = 4;</code>
+       */
       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> 
            getConfigurationOrBuilderList() {
         if (configurationBuilder_ != null) {
@@ -2103,15 +2726,24 @@ public final class HBaseProtos {
           return java.util.Collections.unmodifiableList(configuration_);
         }
       }
+      /**
+       * <code>repeated .NameStringPair configuration = 4;</code>
+       */
       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder addConfigurationBuilder() {
         return getConfigurationFieldBuilder().addBuilder(
             org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance());
       }
+      /**
+       * <code>repeated .NameStringPair configuration = 4;</code>
+       */
       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder addConfigurationBuilder(
           int index) {
         return getConfigurationFieldBuilder().addBuilder(
             index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance());
       }
+      /**

[... 16247 lines stripped ...]


Mime
View raw message