hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From st...@apache.org
Subject svn commit: r1466759 [31/41] - in /hbase/trunk: ./ hbase-client/src/main/java/org/apache/hadoop/hbase/ hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ hbase-protocol/...
Date Thu, 11 Apr 2013 03:51:48 GMT
Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/StorageClusterStatusMessage.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/StorageClusterStatusMessage.java?rev=1466759&r1=1466758&r2=1466759&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/StorageClusterStatusMessage.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/StorageClusterStatusMessage.java Thu Apr 11 03:51:47 2013
@@ -10,734 +10,276 @@ public final class StorageClusterStatusM
   }
   public interface StorageClusterStatusOrBuilder
       extends com.google.protobuf.MessageOrBuilder {
-
+    
     // repeated .org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatus.Node liveNodes = 1;
-    /**
-     * <code>repeated .org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatus.Node liveNodes = 1;</code>
-     *
-     * <pre>
-     * node status
-     * </pre>
-     */
     java.util.List<org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node> 
         getLiveNodesList();
-    /**
-     * <code>repeated .org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatus.Node liveNodes = 1;</code>
-     *
-     * <pre>
-     * node status
-     * </pre>
-     */
     org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node getLiveNodes(int index);
-    /**
-     * <code>repeated .org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatus.Node liveNodes = 1;</code>
-     *
-     * <pre>
-     * node status
-     * </pre>
-     */
     int getLiveNodesCount();
-    /**
-     * <code>repeated .org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatus.Node liveNodes = 1;</code>
-     *
-     * <pre>
-     * node status
-     * </pre>
-     */
     java.util.List<? extends org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.NodeOrBuilder> 
         getLiveNodesOrBuilderList();
-    /**
-     * <code>repeated .org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatus.Node liveNodes = 1;</code>
-     *
-     * <pre>
-     * node status
-     * </pre>
-     */
     org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.NodeOrBuilder getLiveNodesOrBuilder(
         int index);
-
+    
     // repeated string deadNodes = 2;
-    /**
-     * <code>repeated string deadNodes = 2;</code>
-     */
-    java.util.List<java.lang.String>
-    getDeadNodesList();
-    /**
-     * <code>repeated string deadNodes = 2;</code>
-     */
+    java.util.List<String> getDeadNodesList();
     int getDeadNodesCount();
-    /**
-     * <code>repeated string deadNodes = 2;</code>
-     */
-    java.lang.String getDeadNodes(int index);
-    /**
-     * <code>repeated string deadNodes = 2;</code>
-     */
-    com.google.protobuf.ByteString
-        getDeadNodesBytes(int index);
-
+    String getDeadNodes(int index);
+    
     // optional int32 regions = 3;
-    /**
-     * <code>optional int32 regions = 3;</code>
-     *
-     * <pre>
-     * summary statistics
-     * </pre>
-     */
     boolean hasRegions();
-    /**
-     * <code>optional int32 regions = 3;</code>
-     *
-     * <pre>
-     * summary statistics
-     * </pre>
-     */
     int getRegions();
-
+    
     // optional int32 requests = 4;
-    /**
-     * <code>optional int32 requests = 4;</code>
-     */
     boolean hasRequests();
-    /**
-     * <code>optional int32 requests = 4;</code>
-     */
     int getRequests();
-
+    
     // optional double averageLoad = 5;
-    /**
-     * <code>optional double averageLoad = 5;</code>
-     */
     boolean hasAverageLoad();
-    /**
-     * <code>optional double averageLoad = 5;</code>
-     */
     double getAverageLoad();
   }
-  /**
-   * Protobuf type {@code org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatus}
-   */
   public static final class StorageClusterStatus extends
       com.google.protobuf.GeneratedMessage
       implements StorageClusterStatusOrBuilder {
     // Use StorageClusterStatus.newBuilder() to construct.
-    private StorageClusterStatus(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+    private StorageClusterStatus(Builder builder) {
       super(builder);
-      this.unknownFields = builder.getUnknownFields();
     }
-    private StorageClusterStatus(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
+    private StorageClusterStatus(boolean noInit) {}
+    
     private static final StorageClusterStatus defaultInstance;
     public static StorageClusterStatus getDefaultInstance() {
       return defaultInstance;
     }
-
+    
     public StorageClusterStatus getDefaultInstanceForType() {
       return defaultInstance;
     }
-
-    private final com.google.protobuf.UnknownFieldSet unknownFields;
-    @java.lang.Override
-    public final com.google.protobuf.UnknownFieldSet
-        getUnknownFields() {
-      return this.unknownFields;
-    }
-    private StorageClusterStatus(
-        com.google.protobuf.CodedInputStream input,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws com.google.protobuf.InvalidProtocolBufferException {
-      initFields();
-      int mutable_bitField0_ = 0;
-      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
-          com.google.protobuf.UnknownFieldSet.newBuilder();
-      try {
-        boolean done = false;
-        while (!done) {
-          int tag = input.readTag();
-          switch (tag) {
-            case 0:
-              done = true;
-              break;
-            default: {
-              if (!parseUnknownField(input, unknownFields,
-                                     extensionRegistry, tag)) {
-                done = true;
-              }
-              break;
-            }
-            case 10: {
-              if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
-                liveNodes_ = new java.util.ArrayList<org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node>();
-                mutable_bitField0_ |= 0x00000001;
-              }
-              liveNodes_.add(input.readMessage(org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node.PARSER, extensionRegistry));
-              break;
-            }
-            case 18: {
-              if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
-                deadNodes_ = new com.google.protobuf.LazyStringArrayList();
-                mutable_bitField0_ |= 0x00000002;
-              }
-              deadNodes_.add(input.readBytes());
-              break;
-            }
-            case 24: {
-              bitField0_ |= 0x00000001;
-              regions_ = input.readInt32();
-              break;
-            }
-            case 32: {
-              bitField0_ |= 0x00000002;
-              requests_ = input.readInt32();
-              break;
-            }
-            case 41: {
-              bitField0_ |= 0x00000004;
-              averageLoad_ = input.readDouble();
-              break;
-            }
-          }
-        }
-      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
-        throw e.setUnfinishedMessage(this);
-      } catch (java.io.IOException e) {
-        throw new com.google.protobuf.InvalidProtocolBufferException(
-            e.getMessage()).setUnfinishedMessage(this);
-      } finally {
-        if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
-          liveNodes_ = java.util.Collections.unmodifiableList(liveNodes_);
-        }
-        if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
-          deadNodes_ = new com.google.protobuf.UnmodifiableLazyStringList(deadNodes_);
-        }
-        this.unknownFields = unknownFields.build();
-        makeExtensionsImmutable();
-      }
-    }
+    
     public static final com.google.protobuf.Descriptors.Descriptor
         getDescriptor() {
       return org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_StorageClusterStatus_descriptor;
     }
-
+    
     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
         internalGetFieldAccessorTable() {
-      return org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_StorageClusterStatus_fieldAccessorTable
-          .ensureFieldAccessorsInitialized(
-              org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.class, org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Builder.class);
-    }
-
-    public static com.google.protobuf.Parser<StorageClusterStatus> PARSER =
-        new com.google.protobuf.AbstractParser<StorageClusterStatus>() {
-      public StorageClusterStatus parsePartialFrom(
-          com.google.protobuf.CodedInputStream input,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws com.google.protobuf.InvalidProtocolBufferException {
-        return new StorageClusterStatus(input, extensionRegistry);
-      }
-    };
-
-    @java.lang.Override
-    public com.google.protobuf.Parser<StorageClusterStatus> getParserForType() {
-      return PARSER;
+      return org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_StorageClusterStatus_fieldAccessorTable;
     }
-
+    
     public interface RegionOrBuilder
         extends com.google.protobuf.MessageOrBuilder {
-
+      
       // required bytes name = 1;
-      /**
-       * <code>required bytes name = 1;</code>
-       */
       boolean hasName();
-      /**
-       * <code>required bytes name = 1;</code>
-       */
       com.google.protobuf.ByteString getName();
-
+      
       // optional int32 stores = 2;
-      /**
-       * <code>optional int32 stores = 2;</code>
-       */
       boolean hasStores();
-      /**
-       * <code>optional int32 stores = 2;</code>
-       */
       int getStores();
-
+      
       // optional int32 storefiles = 3;
-      /**
-       * <code>optional int32 storefiles = 3;</code>
-       */
       boolean hasStorefiles();
-      /**
-       * <code>optional int32 storefiles = 3;</code>
-       */
       int getStorefiles();
-
+      
       // optional int32 storefileSizeMB = 4;
-      /**
-       * <code>optional int32 storefileSizeMB = 4;</code>
-       */
       boolean hasStorefileSizeMB();
-      /**
-       * <code>optional int32 storefileSizeMB = 4;</code>
-       */
       int getStorefileSizeMB();
-
+      
       // optional int32 memstoreSizeMB = 5;
-      /**
-       * <code>optional int32 memstoreSizeMB = 5;</code>
-       */
       boolean hasMemstoreSizeMB();
-      /**
-       * <code>optional int32 memstoreSizeMB = 5;</code>
-       */
       int getMemstoreSizeMB();
-
+      
       // optional int32 storefileIndexSizeMB = 6;
-      /**
-       * <code>optional int32 storefileIndexSizeMB = 6;</code>
-       */
       boolean hasStorefileIndexSizeMB();
-      /**
-       * <code>optional int32 storefileIndexSizeMB = 6;</code>
-       */
       int getStorefileIndexSizeMB();
-
+      
       // optional int64 readRequestsCount = 7;
-      /**
-       * <code>optional int64 readRequestsCount = 7;</code>
-       */
       boolean hasReadRequestsCount();
-      /**
-       * <code>optional int64 readRequestsCount = 7;</code>
-       */
       long getReadRequestsCount();
-
+      
       // optional int64 writeRequestsCount = 8;
-      /**
-       * <code>optional int64 writeRequestsCount = 8;</code>
-       */
       boolean hasWriteRequestsCount();
-      /**
-       * <code>optional int64 writeRequestsCount = 8;</code>
-       */
       long getWriteRequestsCount();
-
+      
       // optional int32 rootIndexSizeKB = 9;
-      /**
-       * <code>optional int32 rootIndexSizeKB = 9;</code>
-       */
       boolean hasRootIndexSizeKB();
-      /**
-       * <code>optional int32 rootIndexSizeKB = 9;</code>
-       */
       int getRootIndexSizeKB();
-
+      
       // optional int32 totalStaticIndexSizeKB = 10;
-      /**
-       * <code>optional int32 totalStaticIndexSizeKB = 10;</code>
-       */
       boolean hasTotalStaticIndexSizeKB();
-      /**
-       * <code>optional int32 totalStaticIndexSizeKB = 10;</code>
-       */
       int getTotalStaticIndexSizeKB();
-
+      
       // optional int32 totalStaticBloomSizeKB = 11;
-      /**
-       * <code>optional int32 totalStaticBloomSizeKB = 11;</code>
-       */
       boolean hasTotalStaticBloomSizeKB();
-      /**
-       * <code>optional int32 totalStaticBloomSizeKB = 11;</code>
-       */
       int getTotalStaticBloomSizeKB();
-
+      
       // optional int64 totalCompactingKVs = 12;
-      /**
-       * <code>optional int64 totalCompactingKVs = 12;</code>
-       */
       boolean hasTotalCompactingKVs();
-      /**
-       * <code>optional int64 totalCompactingKVs = 12;</code>
-       */
       long getTotalCompactingKVs();
-
+      
       // optional int64 currentCompactedKVs = 13;
-      /**
-       * <code>optional int64 currentCompactedKVs = 13;</code>
-       */
       boolean hasCurrentCompactedKVs();
-      /**
-       * <code>optional int64 currentCompactedKVs = 13;</code>
-       */
       long getCurrentCompactedKVs();
     }
-    /**
-     * Protobuf type {@code org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatus.Region}
-     */
     public static final class Region extends
         com.google.protobuf.GeneratedMessage
         implements RegionOrBuilder {
       // Use Region.newBuilder() to construct.
-      private Region(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+      private Region(Builder builder) {
         super(builder);
-        this.unknownFields = builder.getUnknownFields();
       }
-      private Region(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
+      private Region(boolean noInit) {}
+      
       private static final Region defaultInstance;
       public static Region getDefaultInstance() {
         return defaultInstance;
       }
-
+      
       public Region getDefaultInstanceForType() {
         return defaultInstance;
       }
-
-      private final com.google.protobuf.UnknownFieldSet unknownFields;
-      @java.lang.Override
-      public final com.google.protobuf.UnknownFieldSet
-          getUnknownFields() {
-        return this.unknownFields;
-      }
-      private Region(
-          com.google.protobuf.CodedInputStream input,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws com.google.protobuf.InvalidProtocolBufferException {
-        initFields();
-        int mutable_bitField0_ = 0;
-        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
-            com.google.protobuf.UnknownFieldSet.newBuilder();
-        try {
-          boolean done = false;
-          while (!done) {
-            int tag = input.readTag();
-            switch (tag) {
-              case 0:
-                done = true;
-                break;
-              default: {
-                if (!parseUnknownField(input, unknownFields,
-                                       extensionRegistry, tag)) {
-                  done = true;
-                }
-                break;
-              }
-              case 10: {
-                bitField0_ |= 0x00000001;
-                name_ = input.readBytes();
-                break;
-              }
-              case 16: {
-                bitField0_ |= 0x00000002;
-                stores_ = input.readInt32();
-                break;
-              }
-              case 24: {
-                bitField0_ |= 0x00000004;
-                storefiles_ = input.readInt32();
-                break;
-              }
-              case 32: {
-                bitField0_ |= 0x00000008;
-                storefileSizeMB_ = input.readInt32();
-                break;
-              }
-              case 40: {
-                bitField0_ |= 0x00000010;
-                memstoreSizeMB_ = input.readInt32();
-                break;
-              }
-              case 48: {
-                bitField0_ |= 0x00000020;
-                storefileIndexSizeMB_ = input.readInt32();
-                break;
-              }
-              case 56: {
-                bitField0_ |= 0x00000040;
-                readRequestsCount_ = input.readInt64();
-                break;
-              }
-              case 64: {
-                bitField0_ |= 0x00000080;
-                writeRequestsCount_ = input.readInt64();
-                break;
-              }
-              case 72: {
-                bitField0_ |= 0x00000100;
-                rootIndexSizeKB_ = input.readInt32();
-                break;
-              }
-              case 80: {
-                bitField0_ |= 0x00000200;
-                totalStaticIndexSizeKB_ = input.readInt32();
-                break;
-              }
-              case 88: {
-                bitField0_ |= 0x00000400;
-                totalStaticBloomSizeKB_ = input.readInt32();
-                break;
-              }
-              case 96: {
-                bitField0_ |= 0x00000800;
-                totalCompactingKVs_ = input.readInt64();
-                break;
-              }
-              case 104: {
-                bitField0_ |= 0x00001000;
-                currentCompactedKVs_ = input.readInt64();
-                break;
-              }
-            }
-          }
-        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
-          throw e.setUnfinishedMessage(this);
-        } catch (java.io.IOException e) {
-          throw new com.google.protobuf.InvalidProtocolBufferException(
-              e.getMessage()).setUnfinishedMessage(this);
-        } finally {
-          this.unknownFields = unknownFields.build();
-          makeExtensionsImmutable();
-        }
-      }
+      
       public static final com.google.protobuf.Descriptors.Descriptor
           getDescriptor() {
         return org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_StorageClusterStatus_Region_descriptor;
       }
-
+      
       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
           internalGetFieldAccessorTable() {
-        return org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_StorageClusterStatus_Region_fieldAccessorTable
-            .ensureFieldAccessorsInitialized(
-                org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region.class, org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region.Builder.class);
-      }
-
-      public static com.google.protobuf.Parser<Region> PARSER =
-          new com.google.protobuf.AbstractParser<Region>() {
-        public Region parsePartialFrom(
-            com.google.protobuf.CodedInputStream input,
-            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-            throws com.google.protobuf.InvalidProtocolBufferException {
-          return new Region(input, extensionRegistry);
-        }
-      };
-
-      @java.lang.Override
-      public com.google.protobuf.Parser<Region> getParserForType() {
-        return PARSER;
+        return org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_StorageClusterStatus_Region_fieldAccessorTable;
       }
-
+      
       private int bitField0_;
       // required bytes name = 1;
       public static final int NAME_FIELD_NUMBER = 1;
       private com.google.protobuf.ByteString name_;
-      /**
-       * <code>required bytes name = 1;</code>
-       */
       public boolean hasName() {
         return ((bitField0_ & 0x00000001) == 0x00000001);
       }
-      /**
-       * <code>required bytes name = 1;</code>
-       */
       public com.google.protobuf.ByteString getName() {
         return name_;
       }
-
+      
       // optional int32 stores = 2;
       public static final int STORES_FIELD_NUMBER = 2;
       private int stores_;
-      /**
-       * <code>optional int32 stores = 2;</code>
-       */
       public boolean hasStores() {
         return ((bitField0_ & 0x00000002) == 0x00000002);
       }
-      /**
-       * <code>optional int32 stores = 2;</code>
-       */
       public int getStores() {
         return stores_;
       }
-
+      
       // optional int32 storefiles = 3;
       public static final int STOREFILES_FIELD_NUMBER = 3;
       private int storefiles_;
-      /**
-       * <code>optional int32 storefiles = 3;</code>
-       */
       public boolean hasStorefiles() {
         return ((bitField0_ & 0x00000004) == 0x00000004);
       }
-      /**
-       * <code>optional int32 storefiles = 3;</code>
-       */
       public int getStorefiles() {
         return storefiles_;
       }
-
+      
       // optional int32 storefileSizeMB = 4;
       public static final int STOREFILESIZEMB_FIELD_NUMBER = 4;
       private int storefileSizeMB_;
-      /**
-       * <code>optional int32 storefileSizeMB = 4;</code>
-       */
       public boolean hasStorefileSizeMB() {
         return ((bitField0_ & 0x00000008) == 0x00000008);
       }
-      /**
-       * <code>optional int32 storefileSizeMB = 4;</code>
-       */
       public int getStorefileSizeMB() {
         return storefileSizeMB_;
       }
-
+      
       // optional int32 memstoreSizeMB = 5;
       public static final int MEMSTORESIZEMB_FIELD_NUMBER = 5;
       private int memstoreSizeMB_;
-      /**
-       * <code>optional int32 memstoreSizeMB = 5;</code>
-       */
       public boolean hasMemstoreSizeMB() {
         return ((bitField0_ & 0x00000010) == 0x00000010);
       }
-      /**
-       * <code>optional int32 memstoreSizeMB = 5;</code>
-       */
       public int getMemstoreSizeMB() {
         return memstoreSizeMB_;
       }
-
+      
       // optional int32 storefileIndexSizeMB = 6;
       public static final int STOREFILEINDEXSIZEMB_FIELD_NUMBER = 6;
       private int storefileIndexSizeMB_;
-      /**
-       * <code>optional int32 storefileIndexSizeMB = 6;</code>
-       */
       public boolean hasStorefileIndexSizeMB() {
         return ((bitField0_ & 0x00000020) == 0x00000020);
       }
-      /**
-       * <code>optional int32 storefileIndexSizeMB = 6;</code>
-       */
       public int getStorefileIndexSizeMB() {
         return storefileIndexSizeMB_;
       }
-
+      
       // optional int64 readRequestsCount = 7;
       public static final int READREQUESTSCOUNT_FIELD_NUMBER = 7;
       private long readRequestsCount_;
-      /**
-       * <code>optional int64 readRequestsCount = 7;</code>
-       */
       public boolean hasReadRequestsCount() {
         return ((bitField0_ & 0x00000040) == 0x00000040);
       }
-      /**
-       * <code>optional int64 readRequestsCount = 7;</code>
-       */
       public long getReadRequestsCount() {
         return readRequestsCount_;
       }
-
+      
       // optional int64 writeRequestsCount = 8;
       public static final int WRITEREQUESTSCOUNT_FIELD_NUMBER = 8;
       private long writeRequestsCount_;
-      /**
-       * <code>optional int64 writeRequestsCount = 8;</code>
-       */
       public boolean hasWriteRequestsCount() {
         return ((bitField0_ & 0x00000080) == 0x00000080);
       }
-      /**
-       * <code>optional int64 writeRequestsCount = 8;</code>
-       */
       public long getWriteRequestsCount() {
         return writeRequestsCount_;
       }
-
+      
       // optional int32 rootIndexSizeKB = 9;
       public static final int ROOTINDEXSIZEKB_FIELD_NUMBER = 9;
       private int rootIndexSizeKB_;
-      /**
-       * <code>optional int32 rootIndexSizeKB = 9;</code>
-       */
       public boolean hasRootIndexSizeKB() {
         return ((bitField0_ & 0x00000100) == 0x00000100);
       }
-      /**
-       * <code>optional int32 rootIndexSizeKB = 9;</code>
-       */
       public int getRootIndexSizeKB() {
         return rootIndexSizeKB_;
       }
-
+      
       // optional int32 totalStaticIndexSizeKB = 10;
       public static final int TOTALSTATICINDEXSIZEKB_FIELD_NUMBER = 10;
       private int totalStaticIndexSizeKB_;
-      /**
-       * <code>optional int32 totalStaticIndexSizeKB = 10;</code>
-       */
       public boolean hasTotalStaticIndexSizeKB() {
         return ((bitField0_ & 0x00000200) == 0x00000200);
       }
-      /**
-       * <code>optional int32 totalStaticIndexSizeKB = 10;</code>
-       */
       public int getTotalStaticIndexSizeKB() {
         return totalStaticIndexSizeKB_;
       }
-
+      
       // optional int32 totalStaticBloomSizeKB = 11;
       public static final int TOTALSTATICBLOOMSIZEKB_FIELD_NUMBER = 11;
       private int totalStaticBloomSizeKB_;
-      /**
-       * <code>optional int32 totalStaticBloomSizeKB = 11;</code>
-       */
       public boolean hasTotalStaticBloomSizeKB() {
         return ((bitField0_ & 0x00000400) == 0x00000400);
       }
-      /**
-       * <code>optional int32 totalStaticBloomSizeKB = 11;</code>
-       */
       public int getTotalStaticBloomSizeKB() {
         return totalStaticBloomSizeKB_;
       }
-
+      
       // optional int64 totalCompactingKVs = 12;
       public static final int TOTALCOMPACTINGKVS_FIELD_NUMBER = 12;
       private long totalCompactingKVs_;
-      /**
-       * <code>optional int64 totalCompactingKVs = 12;</code>
-       */
       public boolean hasTotalCompactingKVs() {
         return ((bitField0_ & 0x00000800) == 0x00000800);
       }
-      /**
-       * <code>optional int64 totalCompactingKVs = 12;</code>
-       */
       public long getTotalCompactingKVs() {
         return totalCompactingKVs_;
       }
-
+      
       // optional int64 currentCompactedKVs = 13;
       public static final int CURRENTCOMPACTEDKVS_FIELD_NUMBER = 13;
       private long currentCompactedKVs_;
-      /**
-       * <code>optional int64 currentCompactedKVs = 13;</code>
-       */
       public boolean hasCurrentCompactedKVs() {
         return ((bitField0_ & 0x00001000) == 0x00001000);
       }
-      /**
-       * <code>optional int64 currentCompactedKVs = 13;</code>
-       */
       public long getCurrentCompactedKVs() {
         return currentCompactedKVs_;
       }
-
+      
       private void initFields() {
         name_ = com.google.protobuf.ByteString.EMPTY;
         stores_ = 0;
@@ -757,7 +299,7 @@ public final class StorageClusterStatusM
       public final boolean isInitialized() {
         byte isInitialized = memoizedIsInitialized;
         if (isInitialized != -1) return isInitialized == 1;
-
+        
         if (!hasName()) {
           memoizedIsInitialized = 0;
           return false;
@@ -765,7 +307,7 @@ public final class StorageClusterStatusM
         memoizedIsInitialized = 1;
         return true;
       }
-
+      
       public void writeTo(com.google.protobuf.CodedOutputStream output)
                           throws java.io.IOException {
         getSerializedSize();
@@ -810,12 +352,12 @@ public final class StorageClusterStatusM
         }
         getUnknownFields().writeTo(output);
       }
-
+      
       private int memoizedSerializedSize = -1;
       public int getSerializedSize() {
         int size = memoizedSerializedSize;
         if (size != -1) return size;
-
+      
         size = 0;
         if (((bitField0_ & 0x00000001) == 0x00000001)) {
           size += com.google.protobuf.CodedOutputStream
@@ -873,83 +415,94 @@ public final class StorageClusterStatusM
         memoizedSerializedSize = size;
         return size;
       }
-
+      
       private static final long serialVersionUID = 0L;
       @java.lang.Override
       protected java.lang.Object writeReplace()
           throws java.io.ObjectStreamException {
         return super.writeReplace();
       }
-
+      
       public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region parseFrom(
           com.google.protobuf.ByteString data)
           throws com.google.protobuf.InvalidProtocolBufferException {
-        return PARSER.parseFrom(data);
+        return newBuilder().mergeFrom(data).buildParsed();
       }
       public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region parseFrom(
           com.google.protobuf.ByteString data,
           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
           throws com.google.protobuf.InvalidProtocolBufferException {
-        return PARSER.parseFrom(data, extensionRegistry);
+        return newBuilder().mergeFrom(data, extensionRegistry)
+                 .buildParsed();
       }
       public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region parseFrom(byte[] data)
           throws com.google.protobuf.InvalidProtocolBufferException {
-        return PARSER.parseFrom(data);
+        return newBuilder().mergeFrom(data).buildParsed();
       }
       public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region parseFrom(
           byte[] data,
           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
           throws com.google.protobuf.InvalidProtocolBufferException {
-        return PARSER.parseFrom(data, extensionRegistry);
+        return newBuilder().mergeFrom(data, extensionRegistry)
+                 .buildParsed();
       }
       public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region parseFrom(java.io.InputStream input)
           throws java.io.IOException {
-        return PARSER.parseFrom(input);
+        return newBuilder().mergeFrom(input).buildParsed();
       }
       public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region parseFrom(
           java.io.InputStream input,
           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
           throws java.io.IOException {
-        return PARSER.parseFrom(input, extensionRegistry);
+        return newBuilder().mergeFrom(input, extensionRegistry)
+                 .buildParsed();
       }
       public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region parseDelimitedFrom(java.io.InputStream input)
           throws java.io.IOException {
-        return PARSER.parseDelimitedFrom(input);
+        Builder builder = newBuilder();
+        if (builder.mergeDelimitedFrom(input)) {
+          return builder.buildParsed();
+        } else {
+          return null;
+        }
       }
       public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region parseDelimitedFrom(
           java.io.InputStream input,
           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
           throws java.io.IOException {
-        return PARSER.parseDelimitedFrom(input, extensionRegistry);
+        Builder builder = newBuilder();
+        if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+          return builder.buildParsed();
+        } else {
+          return null;
+        }
       }
       public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region parseFrom(
           com.google.protobuf.CodedInputStream input)
           throws java.io.IOException {
-        return PARSER.parseFrom(input);
+        return newBuilder().mergeFrom(input).buildParsed();
       }
       public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region parseFrom(
           com.google.protobuf.CodedInputStream input,
           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
           throws java.io.IOException {
-        return PARSER.parseFrom(input, extensionRegistry);
+        return newBuilder().mergeFrom(input, extensionRegistry)
+                 .buildParsed();
       }
-
+      
       public static Builder newBuilder() { return Builder.create(); }
       public Builder newBuilderForType() { return newBuilder(); }
       public static Builder newBuilder(org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region prototype) {
         return newBuilder().mergeFrom(prototype);
       }
       public Builder toBuilder() { return newBuilder(this); }
-
+      
       @java.lang.Override
       protected Builder newBuilderForType(
           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
         Builder builder = new Builder(parent);
         return builder;
       }
-      /**
-       * Protobuf type {@code org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatus.Region}
-       */
       public static final class Builder extends
           com.google.protobuf.GeneratedMessage.Builder<Builder>
          implements org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.RegionOrBuilder {
@@ -957,21 +510,18 @@ public final class StorageClusterStatusM
             getDescriptor() {
           return org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_StorageClusterStatus_Region_descriptor;
         }
-
+        
         protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
             internalGetFieldAccessorTable() {
-          return org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_StorageClusterStatus_Region_fieldAccessorTable
-              .ensureFieldAccessorsInitialized(
-                  org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region.class, org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region.Builder.class);
+          return org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_StorageClusterStatus_Region_fieldAccessorTable;
         }
-
+        
         // Construct using org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region.newBuilder()
         private Builder() {
           maybeForceBuilderInitialization();
         }
-
-        private Builder(
-            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+        
+        private Builder(BuilderParent parent) {
           super(parent);
           maybeForceBuilderInitialization();
         }
@@ -982,7 +532,7 @@ public final class StorageClusterStatusM
         private static Builder create() {
           return new Builder();
         }
-
+        
         public Builder clear() {
           super.clear();
           name_ = com.google.protobuf.ByteString.EMPTY;
@@ -1013,20 +563,20 @@ public final class StorageClusterStatusM
           bitField0_ = (bitField0_ & ~0x00001000);
           return this;
         }
-
+        
         public Builder clone() {
           return create().mergeFrom(buildPartial());
         }
-
+        
         public com.google.protobuf.Descriptors.Descriptor
             getDescriptorForType() {
-          return org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_StorageClusterStatus_Region_descriptor;
+          return org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region.getDescriptor();
         }
-
+        
         public org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region getDefaultInstanceForType() {
           return org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region.getDefaultInstance();
         }
-
+        
         public org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region build() {
           org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region result = buildPartial();
           if (!result.isInitialized()) {
@@ -1034,7 +584,17 @@ public final class StorageClusterStatusM
           }
           return result;
         }
-
+        
+        private org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region buildParsed()
+            throws com.google.protobuf.InvalidProtocolBufferException {
+          org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region result = buildPartial();
+          if (!result.isInitialized()) {
+            throw newUninitializedMessageException(
+              result).asInvalidProtocolBufferException();
+          }
+          return result;
+        }
+        
         public org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region buildPartial() {
           org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region result = new org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region(this);
           int from_bitField0_ = bitField0_;
@@ -1095,7 +655,7 @@ public final class StorageClusterStatusM
           onBuilt();
           return result;
         }
-
+        
         public Builder mergeFrom(com.google.protobuf.Message other) {
           if (other instanceof org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region) {
             return mergeFrom((org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region)other);
@@ -1104,7 +664,7 @@ public final class StorageClusterStatusM
             return this;
           }
         }
-
+        
         public Builder mergeFrom(org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region other) {
           if (other == org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region.getDefaultInstance()) return this;
           if (other.hasName()) {
@@ -1149,7 +709,7 @@ public final class StorageClusterStatusM
           this.mergeUnknownFields(other.getUnknownFields());
           return this;
         }
-
+        
         public final boolean isInitialized() {
           if (!hasName()) {
             
@@ -1157,43 +717,109 @@ public final class StorageClusterStatusM
           }
           return true;
         }
-
+        
         public Builder mergeFrom(
             com.google.protobuf.CodedInputStream input,
             com.google.protobuf.ExtensionRegistryLite extensionRegistry)
             throws java.io.IOException {
-          org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region parsedMessage = null;
-          try {
-            parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
-          } catch (com.google.protobuf.InvalidProtocolBufferException e) {
-            parsedMessage = (org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region) e.getUnfinishedMessage();
-            throw e;
-          } finally {
-            if (parsedMessage != null) {
-              mergeFrom(parsedMessage);
+          com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+            com.google.protobuf.UnknownFieldSet.newBuilder(
+              this.getUnknownFields());
+          while (true) {
+            int tag = input.readTag();
+            switch (tag) {
+              case 0:
+                this.setUnknownFields(unknownFields.build());
+                onChanged();
+                return this;
+              default: {
+                if (!parseUnknownField(input, unknownFields,
+                                       extensionRegistry, tag)) {
+                  this.setUnknownFields(unknownFields.build());
+                  onChanged();
+                  return this;
+                }
+                break;
+              }
+              case 10: {
+                bitField0_ |= 0x00000001;
+                name_ = input.readBytes();
+                break;
+              }
+              case 16: {
+                bitField0_ |= 0x00000002;
+                stores_ = input.readInt32();
+                break;
+              }
+              case 24: {
+                bitField0_ |= 0x00000004;
+                storefiles_ = input.readInt32();
+                break;
+              }
+              case 32: {
+                bitField0_ |= 0x00000008;
+                storefileSizeMB_ = input.readInt32();
+                break;
+              }
+              case 40: {
+                bitField0_ |= 0x00000010;
+                memstoreSizeMB_ = input.readInt32();
+                break;
+              }
+              case 48: {
+                bitField0_ |= 0x00000020;
+                storefileIndexSizeMB_ = input.readInt32();
+                break;
+              }
+              case 56: {
+                bitField0_ |= 0x00000040;
+                readRequestsCount_ = input.readInt64();
+                break;
+              }
+              case 64: {
+                bitField0_ |= 0x00000080;
+                writeRequestsCount_ = input.readInt64();
+                break;
+              }
+              case 72: {
+                bitField0_ |= 0x00000100;
+                rootIndexSizeKB_ = input.readInt32();
+                break;
+              }
+              case 80: {
+                bitField0_ |= 0x00000200;
+                totalStaticIndexSizeKB_ = input.readInt32();
+                break;
+              }
+              case 88: {
+                bitField0_ |= 0x00000400;
+                totalStaticBloomSizeKB_ = input.readInt32();
+                break;
+              }
+              case 96: {
+                bitField0_ |= 0x00000800;
+                totalCompactingKVs_ = input.readInt64();
+                break;
+              }
+              case 104: {
+                bitField0_ |= 0x00001000;
+                currentCompactedKVs_ = input.readInt64();
+                break;
+              }
             }
           }
-          return this;
         }
+        
         private int bitField0_;
-
+        
         // required bytes name = 1;
         private com.google.protobuf.ByteString name_ = com.google.protobuf.ByteString.EMPTY;
-        /**
-         * <code>required bytes name = 1;</code>
-         */
         public boolean hasName() {
           return ((bitField0_ & 0x00000001) == 0x00000001);
         }
-        /**
-         * <code>required bytes name = 1;</code>
-         */
         public com.google.protobuf.ByteString getName() {
           return name_;
         }
-        /**
-         * <code>required bytes name = 1;</code>
-         */
         public Builder setName(com.google.protobuf.ByteString value) {
           if (value == null) {
     throw new NullPointerException();
@@ -1203,800 +829,431 @@ public final class StorageClusterStatusM
           onChanged();
           return this;
         }
-        /**
-         * <code>required bytes name = 1;</code>
-         */
         public Builder clearName() {
           bitField0_ = (bitField0_ & ~0x00000001);
           name_ = getDefaultInstance().getName();
           onChanged();
           return this;
         }
-
+        
         // optional int32 stores = 2;
         private int stores_ ;
-        /**
-         * <code>optional int32 stores = 2;</code>
-         */
         public boolean hasStores() {
           return ((bitField0_ & 0x00000002) == 0x00000002);
         }
-        /**
-         * <code>optional int32 stores = 2;</code>
-         */
         public int getStores() {
           return stores_;
         }
-        /**
-         * <code>optional int32 stores = 2;</code>
-         */
         public Builder setStores(int value) {
           bitField0_ |= 0x00000002;
           stores_ = value;
           onChanged();
           return this;
         }
-        /**
-         * <code>optional int32 stores = 2;</code>
-         */
         public Builder clearStores() {
           bitField0_ = (bitField0_ & ~0x00000002);
           stores_ = 0;
           onChanged();
           return this;
         }
-
+        
         // optional int32 storefiles = 3;
         private int storefiles_ ;
-        /**
-         * <code>optional int32 storefiles = 3;</code>
-         */
         public boolean hasStorefiles() {
           return ((bitField0_ & 0x00000004) == 0x00000004);
         }
-        /**
-         * <code>optional int32 storefiles = 3;</code>
-         */
         public int getStorefiles() {
           return storefiles_;
         }
-        /**
-         * <code>optional int32 storefiles = 3;</code>
-         */
         public Builder setStorefiles(int value) {
           bitField0_ |= 0x00000004;
           storefiles_ = value;
           onChanged();
           return this;
         }
-        /**
-         * <code>optional int32 storefiles = 3;</code>
-         */
         public Builder clearStorefiles() {
           bitField0_ = (bitField0_ & ~0x00000004);
           storefiles_ = 0;
           onChanged();
           return this;
         }
-
+        
         // optional int32 storefileSizeMB = 4;
         private int storefileSizeMB_ ;
-        /**
-         * <code>optional int32 storefileSizeMB = 4;</code>
-         */
         public boolean hasStorefileSizeMB() {
           return ((bitField0_ & 0x00000008) == 0x00000008);
         }
-        /**
-         * <code>optional int32 storefileSizeMB = 4;</code>
-         */
         public int getStorefileSizeMB() {
           return storefileSizeMB_;
         }
-        /**
-         * <code>optional int32 storefileSizeMB = 4;</code>
-         */
         public Builder setStorefileSizeMB(int value) {
           bitField0_ |= 0x00000008;
           storefileSizeMB_ = value;
           onChanged();
           return this;
         }
-        /**
-         * <code>optional int32 storefileSizeMB = 4;</code>
-         */
         public Builder clearStorefileSizeMB() {
           bitField0_ = (bitField0_ & ~0x00000008);
           storefileSizeMB_ = 0;
           onChanged();
           return this;
         }
-
+        
         // optional int32 memstoreSizeMB = 5;
         private int memstoreSizeMB_ ;
-        /**
-         * <code>optional int32 memstoreSizeMB = 5;</code>
-         */
         public boolean hasMemstoreSizeMB() {
           return ((bitField0_ & 0x00000010) == 0x00000010);
         }
-        /**
-         * <code>optional int32 memstoreSizeMB = 5;</code>
-         */
         public int getMemstoreSizeMB() {
           return memstoreSizeMB_;
         }
-        /**
-         * <code>optional int32 memstoreSizeMB = 5;</code>
-         */
         public Builder setMemstoreSizeMB(int value) {
           bitField0_ |= 0x00000010;
           memstoreSizeMB_ = value;
           onChanged();
           return this;
         }
-        /**
-         * <code>optional int32 memstoreSizeMB = 5;</code>
-         */
         public Builder clearMemstoreSizeMB() {
           bitField0_ = (bitField0_ & ~0x00000010);
           memstoreSizeMB_ = 0;
           onChanged();
           return this;
         }
-
+        
         // optional int32 storefileIndexSizeMB = 6;
         private int storefileIndexSizeMB_ ;
-        /**
-         * <code>optional int32 storefileIndexSizeMB = 6;</code>
-         */
         public boolean hasStorefileIndexSizeMB() {
           return ((bitField0_ & 0x00000020) == 0x00000020);
         }
-        /**
-         * <code>optional int32 storefileIndexSizeMB = 6;</code>
-         */
         public int getStorefileIndexSizeMB() {
           return storefileIndexSizeMB_;
         }
-        /**
-         * <code>optional int32 storefileIndexSizeMB = 6;</code>
-         */
         public Builder setStorefileIndexSizeMB(int value) {
           bitField0_ |= 0x00000020;
           storefileIndexSizeMB_ = value;
           onChanged();
           return this;
         }
-        /**
-         * <code>optional int32 storefileIndexSizeMB = 6;</code>
-         */
         public Builder clearStorefileIndexSizeMB() {
           bitField0_ = (bitField0_ & ~0x00000020);
           storefileIndexSizeMB_ = 0;
           onChanged();
           return this;
         }
-
+        
         // optional int64 readRequestsCount = 7;
         private long readRequestsCount_ ;
-        /**
-         * <code>optional int64 readRequestsCount = 7;</code>
-         */
         public boolean hasReadRequestsCount() {
           return ((bitField0_ & 0x00000040) == 0x00000040);
         }
-        /**
-         * <code>optional int64 readRequestsCount = 7;</code>
-         */
         public long getReadRequestsCount() {
           return readRequestsCount_;
         }
-        /**
-         * <code>optional int64 readRequestsCount = 7;</code>
-         */
         public Builder setReadRequestsCount(long value) {
           bitField0_ |= 0x00000040;
           readRequestsCount_ = value;
           onChanged();
           return this;
         }
-        /**
-         * <code>optional int64 readRequestsCount = 7;</code>
-         */
         public Builder clearReadRequestsCount() {
           bitField0_ = (bitField0_ & ~0x00000040);
           readRequestsCount_ = 0L;
           onChanged();
           return this;
         }
-
+        
         // optional int64 writeRequestsCount = 8;
         private long writeRequestsCount_ ;
-        /**
-         * <code>optional int64 writeRequestsCount = 8;</code>
-         */
         public boolean hasWriteRequestsCount() {
           return ((bitField0_ & 0x00000080) == 0x00000080);
         }
-        /**
-         * <code>optional int64 writeRequestsCount = 8;</code>
-         */
         public long getWriteRequestsCount() {
           return writeRequestsCount_;
         }
-        /**
-         * <code>optional int64 writeRequestsCount = 8;</code>
-         */
         public Builder setWriteRequestsCount(long value) {
           bitField0_ |= 0x00000080;
           writeRequestsCount_ = value;
           onChanged();
           return this;
         }
-        /**
-         * <code>optional int64 writeRequestsCount = 8;</code>
-         */
         public Builder clearWriteRequestsCount() {
           bitField0_ = (bitField0_ & ~0x00000080);
           writeRequestsCount_ = 0L;
           onChanged();
           return this;
         }
-
+        
         // optional int32 rootIndexSizeKB = 9;
         private int rootIndexSizeKB_ ;
-        /**
-         * <code>optional int32 rootIndexSizeKB = 9;</code>
-         */
         public boolean hasRootIndexSizeKB() {
           return ((bitField0_ & 0x00000100) == 0x00000100);
         }
-        /**
-         * <code>optional int32 rootIndexSizeKB = 9;</code>
-         */
         public int getRootIndexSizeKB() {
           return rootIndexSizeKB_;
         }
-        /**
-         * <code>optional int32 rootIndexSizeKB = 9;</code>
-         */
         public Builder setRootIndexSizeKB(int value) {
           bitField0_ |= 0x00000100;
           rootIndexSizeKB_ = value;
           onChanged();
           return this;
         }
-        /**
-         * <code>optional int32 rootIndexSizeKB = 9;</code>
-         */
         public Builder clearRootIndexSizeKB() {
           bitField0_ = (bitField0_ & ~0x00000100);
           rootIndexSizeKB_ = 0;
           onChanged();
           return this;
         }
-
+        
         // optional int32 totalStaticIndexSizeKB = 10;
         private int totalStaticIndexSizeKB_ ;
-        /**
-         * <code>optional int32 totalStaticIndexSizeKB = 10;</code>
-         */
         public boolean hasTotalStaticIndexSizeKB() {
           return ((bitField0_ & 0x00000200) == 0x00000200);
         }
-        /**
-         * <code>optional int32 totalStaticIndexSizeKB = 10;</code>
-         */
         public int getTotalStaticIndexSizeKB() {
           return totalStaticIndexSizeKB_;
         }
-        /**
-         * <code>optional int32 totalStaticIndexSizeKB = 10;</code>
-         */
         public Builder setTotalStaticIndexSizeKB(int value) {
           bitField0_ |= 0x00000200;
           totalStaticIndexSizeKB_ = value;
           onChanged();
           return this;
         }
-        /**
-         * <code>optional int32 totalStaticIndexSizeKB = 10;</code>
-         */
         public Builder clearTotalStaticIndexSizeKB() {
           bitField0_ = (bitField0_ & ~0x00000200);
           totalStaticIndexSizeKB_ = 0;
           onChanged();
           return this;
         }
-
+        
         // optional int32 totalStaticBloomSizeKB = 11;
         private int totalStaticBloomSizeKB_ ;
-        /**
-         * <code>optional int32 totalStaticBloomSizeKB = 11;</code>
-         */
         public boolean hasTotalStaticBloomSizeKB() {
           return ((bitField0_ & 0x00000400) == 0x00000400);
         }
-        /**
-         * <code>optional int32 totalStaticBloomSizeKB = 11;</code>
-         */
         public int getTotalStaticBloomSizeKB() {
           return totalStaticBloomSizeKB_;
         }
-        /**
-         * <code>optional int32 totalStaticBloomSizeKB = 11;</code>
-         */
         public Builder setTotalStaticBloomSizeKB(int value) {
           bitField0_ |= 0x00000400;
           totalStaticBloomSizeKB_ = value;
           onChanged();
           return this;
         }
-        /**
-         * <code>optional int32 totalStaticBloomSizeKB = 11;</code>
-         */
         public Builder clearTotalStaticBloomSizeKB() {
           bitField0_ = (bitField0_ & ~0x00000400);
           totalStaticBloomSizeKB_ = 0;
           onChanged();
           return this;
         }
-
+        
         // optional int64 totalCompactingKVs = 12;
         private long totalCompactingKVs_ ;
-        /**
-         * <code>optional int64 totalCompactingKVs = 12;</code>
-         */
         public boolean hasTotalCompactingKVs() {
           return ((bitField0_ & 0x00000800) == 0x00000800);
         }
-        /**
-         * <code>optional int64 totalCompactingKVs = 12;</code>
-         */
         public long getTotalCompactingKVs() {
           return totalCompactingKVs_;
         }
-        /**
-         * <code>optional int64 totalCompactingKVs = 12;</code>
-         */
         public Builder setTotalCompactingKVs(long value) {
           bitField0_ |= 0x00000800;
           totalCompactingKVs_ = value;
           onChanged();
           return this;
         }
-        /**
-         * <code>optional int64 totalCompactingKVs = 12;</code>
-         */
         public Builder clearTotalCompactingKVs() {
           bitField0_ = (bitField0_ & ~0x00000800);
           totalCompactingKVs_ = 0L;
           onChanged();
           return this;
         }
-
+        
         // optional int64 currentCompactedKVs = 13;
         private long currentCompactedKVs_ ;
-        /**
-         * <code>optional int64 currentCompactedKVs = 13;</code>
-         */
         public boolean hasCurrentCompactedKVs() {
           return ((bitField0_ & 0x00001000) == 0x00001000);
         }
-        /**
-         * <code>optional int64 currentCompactedKVs = 13;</code>
-         */
         public long getCurrentCompactedKVs() {
           return currentCompactedKVs_;
         }
-        /**
-         * <code>optional int64 currentCompactedKVs = 13;</code>
-         */
         public Builder setCurrentCompactedKVs(long value) {
           bitField0_ |= 0x00001000;
           currentCompactedKVs_ = value;
           onChanged();
           return this;
         }
-        /**
-         * <code>optional int64 currentCompactedKVs = 13;</code>
-         */
         public Builder clearCurrentCompactedKVs() {
           bitField0_ = (bitField0_ & ~0x00001000);
           currentCompactedKVs_ = 0L;
           onChanged();
           return this;
         }
-
+        
         // @@protoc_insertion_point(builder_scope:org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatus.Region)
       }
-
+      
       static {
         defaultInstance = new Region(true);
         defaultInstance.initFields();
       }
-
+      
       // @@protoc_insertion_point(class_scope:org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatus.Region)
     }
-
+    
     public interface NodeOrBuilder
         extends com.google.protobuf.MessageOrBuilder {
-
+      
       // required string name = 1;
-      /**
-       * <code>required string name = 1;</code>
-       *
-       * <pre>
-       * name:port
-       * </pre>
-       */
       boolean hasName();
-      /**
-       * <code>required string name = 1;</code>
-       *
-       * <pre>
-       * name:port
-       * </pre>
-       */
-      java.lang.String getName();
-      /**
-       * <code>required string name = 1;</code>
-       *
-       * <pre>
-       * name:port
-       * </pre>
-       */
-      com.google.protobuf.ByteString
-          getNameBytes();
-
+      String getName();
+      
       // optional int64 startCode = 2;
-      /**
-       * <code>optional int64 startCode = 2;</code>
-       */
       boolean hasStartCode();
-      /**
-       * <code>optional int64 startCode = 2;</code>
-       */
       long getStartCode();
-
+      
       // optional int32 requests = 3;
-      /**
-       * <code>optional int32 requests = 3;</code>
-       */
       boolean hasRequests();
-      /**
-       * <code>optional int32 requests = 3;</code>
-       */
       int getRequests();
-
+      
       // optional int32 heapSizeMB = 4;
-      /**
-       * <code>optional int32 heapSizeMB = 4;</code>
-       */
       boolean hasHeapSizeMB();
-      /**
-       * <code>optional int32 heapSizeMB = 4;</code>
-       */
       int getHeapSizeMB();
-
+      
       // optional int32 maxHeapSizeMB = 5;
-      /**
-       * <code>optional int32 maxHeapSizeMB = 5;</code>
-       */
       boolean hasMaxHeapSizeMB();
-      /**
-       * <code>optional int32 maxHeapSizeMB = 5;</code>
-       */
       int getMaxHeapSizeMB();
-
+      
       // repeated .org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatus.Region regions = 6;
-      /**
-       * <code>repeated .org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatus.Region regions = 6;</code>
-       */
       java.util.List<org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region> 
           getRegionsList();
-      /**
-       * <code>repeated .org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatus.Region regions = 6;</code>
-       */
       org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region getRegions(int index);
-      /**
-       * <code>repeated .org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatus.Region regions = 6;</code>
-       */
       int getRegionsCount();
-      /**
-       * <code>repeated .org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatus.Region regions = 6;</code>
-       */
       java.util.List<? extends org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.RegionOrBuilder> 
           getRegionsOrBuilderList();
-      /**
-       * <code>repeated .org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatus.Region regions = 6;</code>
-       */
       org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.RegionOrBuilder getRegionsOrBuilder(
           int index);
     }
-    /**
-     * Protobuf type {@code org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatus.Node}
-     */
     public static final class Node extends
         com.google.protobuf.GeneratedMessage
         implements NodeOrBuilder {
       // Use Node.newBuilder() to construct.
-      private Node(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+      private Node(Builder builder) {
         super(builder);
-        this.unknownFields = builder.getUnknownFields();
       }
-      private Node(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
+      private Node(boolean noInit) {}
+      
       private static final Node defaultInstance;
       public static Node getDefaultInstance() {
         return defaultInstance;
       }
-
+      
       public Node getDefaultInstanceForType() {
         return defaultInstance;
       }
-
-      private final com.google.protobuf.UnknownFieldSet unknownFields;
-      @java.lang.Override
-      public final com.google.protobuf.UnknownFieldSet
-          getUnknownFields() {
-        return this.unknownFields;
-      }
-      private Node(
-          com.google.protobuf.CodedInputStream input,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws com.google.protobuf.InvalidProtocolBufferException {
-        initFields();
-        int mutable_bitField0_ = 0;
-        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
-            com.google.protobuf.UnknownFieldSet.newBuilder();
-        try {
-          boolean done = false;
-          while (!done) {
-            int tag = input.readTag();
-            switch (tag) {
-              case 0:
-                done = true;
-                break;
-              default: {
-                if (!parseUnknownField(input, unknownFields,
-                                       extensionRegistry, tag)) {
-                  done = true;
-                }
-                break;
-              }
-              case 10: {
-                bitField0_ |= 0x00000001;
-                name_ = input.readBytes();
-                break;
-              }
-              case 16: {
-                bitField0_ |= 0x00000002;
-                startCode_ = input.readInt64();
-                break;
-              }
-              case 24: {
-                bitField0_ |= 0x00000004;
-                requests_ = input.readInt32();
-                break;
-              }
-              case 32: {
-                bitField0_ |= 0x00000008;
-                heapSizeMB_ = input.readInt32();
-                break;
-              }
-              case 40: {
-                bitField0_ |= 0x00000010;
-                maxHeapSizeMB_ = input.readInt32();
-                break;
-              }
-              case 50: {
-                if (!((mutable_bitField0_ & 0x00000020) == 0x00000020)) {
-                  regions_ = new java.util.ArrayList<org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region>();
-                  mutable_bitField0_ |= 0x00000020;
-                }
-                regions_.add(input.readMessage(org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region.PARSER, extensionRegistry));
-                break;
-              }
-            }
-          }
-        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
-          throw e.setUnfinishedMessage(this);
-        } catch (java.io.IOException e) {
-          throw new com.google.protobuf.InvalidProtocolBufferException(
-              e.getMessage()).setUnfinishedMessage(this);
-        } finally {
-          if (((mutable_bitField0_ & 0x00000020) == 0x00000020)) {
-            regions_ = java.util.Collections.unmodifiableList(regions_);
-          }
-          this.unknownFields = unknownFields.build();
-          makeExtensionsImmutable();
-        }
-      }
+      
       public static final com.google.protobuf.Descriptors.Descriptor
           getDescriptor() {
         return org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_StorageClusterStatus_Node_descriptor;
       }
-
+      
       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
           internalGetFieldAccessorTable() {
-        return org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_StorageClusterStatus_Node_fieldAccessorTable
-            .ensureFieldAccessorsInitialized(
-                org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node.class, org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node.Builder.class);
-      }
-
-      public static com.google.protobuf.Parser<Node> PARSER =
-          new com.google.protobuf.AbstractParser<Node>() {
-        public Node parsePartialFrom(
-            com.google.protobuf.CodedInputStream input,
-            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-            throws com.google.protobuf.InvalidProtocolBufferException {
-          return new Node(input, extensionRegistry);
-        }
-      };
-
-      @java.lang.Override
-      public com.google.protobuf.Parser<Node> getParserForType() {
-        return PARSER;
+        return org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_StorageClusterStatus_Node_fieldAccessorTable;
       }
-
+      
       private int bitField0_;
       // required string name = 1;
       public static final int NAME_FIELD_NUMBER = 1;
       private java.lang.Object name_;
-      /**
-       * <code>required string name = 1;</code>
-       *
-       * <pre>
-       * name:port
-       * </pre>
-       */
       public boolean hasName() {
         return ((bitField0_ & 0x00000001) == 0x00000001);
       }
-      /**
-       * <code>required string name = 1;</code>
-       *
-       * <pre>
-       * name:port
-       * </pre>
-       */
-      public java.lang.String getName() {
+      public String getName() {
         java.lang.Object ref = name_;
-        if (ref instanceof java.lang.String) {
-          return (java.lang.String) ref;
+        if (ref instanceof String) {
+          return (String) ref;
         } else {
           com.google.protobuf.ByteString bs = 
               (com.google.protobuf.ByteString) ref;
-          java.lang.String s = bs.toStringUtf8();
-          if (bs.isValidUtf8()) {
+          String s = bs.toStringUtf8();
+          if (com.google.protobuf.Internal.isValidUtf8(bs)) {
             name_ = s;
           }
           return s;
         }
       }
-      /**
-       * <code>required string name = 1;</code>
-       *
-       * <pre>
-       * name:port
-       * </pre>
-       */
-      public com.google.protobuf.ByteString
-          getNameBytes() {
+      private com.google.protobuf.ByteString getNameBytes() {
         java.lang.Object ref = name_;
-        if (ref instanceof java.lang.String) {
+        if (ref instanceof String) {
           com.google.protobuf.ByteString b = 
-              com.google.protobuf.ByteString.copyFromUtf8(
-                  (java.lang.String) ref);
+              com.google.protobuf.ByteString.copyFromUtf8((String) ref);
           name_ = b;
           return b;
         } else {
           return (com.google.protobuf.ByteString) ref;
         }
       }
-
+      
       // optional int64 startCode = 2;
       public static final int STARTCODE_FIELD_NUMBER = 2;
       private long startCode_;
-      /**
-       * <code>optional int64 startCode = 2;</code>
-       */
       public boolean hasStartCode() {
         return ((bitField0_ & 0x00000002) == 0x00000002);
       }
-      /**
-       * <code>optional int64 startCode = 2;</code>
-       */
       public long getStartCode() {
         return startCode_;
       }
-
+      
       // optional int32 requests = 3;
       public static final int REQUESTS_FIELD_NUMBER = 3;
       private int requests_;
-      /**
-       * <code>optional int32 requests = 3;</code>
-       */
       public boolean hasRequests() {
         return ((bitField0_ & 0x00000004) == 0x00000004);
       }
-      /**
-       * <code>optional int32 requests = 3;</code>
-       */
       public int getRequests() {
         return requests_;
       }
-
+      
       // optional int32 heapSizeMB = 4;
       public static final int HEAPSIZEMB_FIELD_NUMBER = 4;
       private int heapSizeMB_;
-      /**
-       * <code>optional int32 heapSizeMB = 4;</code>
-       */
       public boolean hasHeapSizeMB() {
         return ((bitField0_ & 0x00000008) == 0x00000008);
       }
-      /**
-       * <code>optional int32 heapSizeMB = 4;</code>
-       */
       public int getHeapSizeMB() {
         return heapSizeMB_;
       }
-
+      
       // optional int32 maxHeapSizeMB = 5;
       public static final int MAXHEAPSIZEMB_FIELD_NUMBER = 5;
       private int maxHeapSizeMB_;
-      /**
-       * <code>optional int32 maxHeapSizeMB = 5;</code>
-       */
       public boolean hasMaxHeapSizeMB() {
         return ((bitField0_ & 0x00000010) == 0x00000010);
       }
-      /**
-       * <code>optional int32 maxHeapSizeMB = 5;</code>
-       */
       public int getMaxHeapSizeMB() {
         return maxHeapSizeMB_;
       }
-
+      
       // repeated .org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatus.Region regions = 6;
       public static final int REGIONS_FIELD_NUMBER = 6;
       private java.util.List<org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region> regions_;
-      /**
-       * <code>repeated .org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatus.Region regions = 6;</code>
-       */
       public java.util.List<org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region> getRegionsList() {
         return regions_;
       }
-      /**
-       * <code>repeated .org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatus.Region regions = 6;</code>
-       */
       public java.util.List<? extends org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.RegionOrBuilder> 
           getRegionsOrBuilderList() {
         return regions_;
       }
-      /**
-       * <code>repeated .org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatus.Region regions = 6;</code>
-       */
       public int getRegionsCount() {
         return regions_.size();
       }
-      /**
-       * <code>repeated .org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatus.Region regions = 6;</code>
-       */
       public org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region getRegions(int index) {
         return regions_.get(index);
       }
-      /**
-       * <code>repeated .org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatus.Region regions = 6;</code>
-       */
       public org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.RegionOrBuilder getRegionsOrBuilder(
           int index) {
         return regions_.get(index);
       }
-
+      
       private void initFields() {
         name_ = "";
         startCode_ = 0L;
@@ -2009,7 +1266,7 @@ public final class StorageClusterStatusM
       public final boolean isInitialized() {
         byte isInitialized = memoizedIsInitialized;
         if (isInitialized != -1) return isInitialized == 1;
-
+        
         if (!hasName()) {
           memoizedIsInitialized = 0;
           return false;
@@ -2023,7 +1280,7 @@ public final class StorageClusterStatusM
         memoizedIsInitialized = 1;
         return true;
       }
-
+      
       public void writeTo(com.google.protobuf.CodedOutputStream output)
                           throws java.io.IOException {
         getSerializedSize();
@@ -2047,12 +1304,12 @@ public final class StorageClusterStatusM
         }
         getUnknownFields().writeTo(output);
       }
-
+      
       private int memoizedSerializedSize = -1;
       public int getSerializedSize() {
         int size = memoizedSerializedSize;
         if (size != -1) return size;
-
+      
         size = 0;
         if (((bitField0_ & 0x00000001) == 0x00000001)) {
           size += com.google.protobuf.CodedOutputStream
@@ -2082,83 +1339,94 @@ public final class StorageClusterStatusM
         memoizedSerializedSize = size;
         return size;
       }
-
+      
       private static final long serialVersionUID = 0L;
       @java.lang.Override
       protected java.lang.Object writeReplace()
           throws java.io.ObjectStreamException {
         return super.writeReplace();
       }
-
+      
       public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node parseFrom(
           com.google.protobuf.ByteString data)
           throws com.google.protobuf.InvalidProtocolBufferException {
-        return PARSER.parseFrom(data);
+        return newBuilder().mergeFrom(data).buildParsed();
       }
       public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node parseFrom(
           com.google.protobuf.ByteString data,
           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
           throws com.google.protobuf.InvalidProtocolBufferException {
-        return PARSER.parseFrom(data, extensionRegistry);
+        return newBuilder().mergeFrom(data, extensionRegistry)
+                 .buildParsed();
       }
       public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node parseFrom(byte[] data)
           throws com.google.protobuf.InvalidProtocolBufferException {
-        return PARSER.parseFrom(data);
+        return newBuilder().mergeFrom(data).buildParsed();
       }
       public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node parseFrom(
           byte[] data,
           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
           throws com.google.protobuf.InvalidProtocolBufferException {
-        return PARSER.parseFrom(data, extensionRegistry);
+        return newBuilder().mergeFrom(data, extensionRegistry)
+                 .buildParsed();
       }
       public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node parseFrom(java.io.InputStream input)
           throws java.io.IOException {
-        return PARSER.parseFrom(input);
+        return newBuilder().mergeFrom(input).buildParsed();
       }
       public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node parseFrom(
           java.io.InputStream input,
           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
           throws java.io.IOException {
-        return PARSER.parseFrom(input, extensionRegistry);
+        return newBuilder().mergeFrom(input, extensionRegistry)
+                 .buildParsed();
       }
       public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node parseDelimitedFrom(java.io.InputStream input)
           throws java.io.IOException {
-        return PARSER.parseDelimitedFrom(input);
+        Builder builder = newBuilder();
+        if (builder.mergeDelimitedFrom(input)) {
+          return builder.buildParsed();
+        } else {
+          return null;
+        }
       }
       public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node parseDelimitedFrom(
           java.io.InputStream input,
           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
           throws java.io.IOException {
-        return PARSER.parseDelimitedFrom(input, extensionRegistry);
+        Builder builder = newBuilder();
+        if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+          return builder.buildParsed();
+        } else {
+          return null;
+        }
       }
       public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node parseFrom(
           com.google.protobuf.CodedInputStream input)
           throws java.io.IOException {
-        return PARSER.parseFrom(input);
+        return newBuilder().mergeFrom(input).buildParsed();
       }
       public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node parseFrom(
           com.google.protobuf.CodedInputStream input,
           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
           throws java.io.IOException {
-        return PARSER.parseFrom(input, extensionRegistry);
+        return newBuilder().mergeFrom(input, extensionRegistry)
+                 .buildParsed();
       }
-
+      
       public static Builder newBuilder() { return Builder.create(); }
       public Builder newBuilderForType() { return newBuilder(); }
       public static Builder newBuilder(org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node prototype) {
         return newBuilder().mergeFrom(prototype);
       }
       public Builder toBuilder() { return newBuilder(this); }
-
+      
       @java.lang.Override
       protected Builder newBuilderForType(
           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
         Builder builder = new Builder(parent);
         return builder;
       }
-      /**
-       * Protobuf type {@code org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatus.Node}
-       */
       public static final class Builder extends
           com.google.protobuf.GeneratedMessage.Builder<Builder>
          implements org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.NodeOrBuilder {
@@ -2166,21 +1434,18 @@ public final class StorageClusterStatusM
             getDescriptor() {
           return org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_StorageClusterStatus_Node_descriptor;
         }
-
+        
         protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
             internalGetFieldAccessorTable() {
-          return org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_StorageClusterStatus_Node_fieldAccessorTable
-              .ensureFieldAccessorsInitialized(
-                  org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node.class, org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node.Builder.class);
+          return org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_StorageClusterStatus_Node_fieldAccessorTable;
         }
-
+        
         // Construct using org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node.newBuilder()
         private Builder() {
           maybeForceBuilderInitialization();
         }
-
-        private Builder(
-            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+        
+        private Builder(BuilderParent parent) {
           super(parent);
           maybeForceBuilderInitialization();
         }
@@ -2192,7 +1457,7 @@ public final class StorageClusterStatusM
         private static Builder create() {
           return new Builder();
         }
-
+        
         public Builder clear() {
           super.clear();
           name_ = "";
@@ -2213,20 +1478,20 @@ public final class StorageClusterStatusM
           }
           return this;
         }
-
+        
         public Builder clone() {
           return create().mergeFrom(buildPartial());
         }
-
+        
         public com.google.protobuf.Descriptors.Descriptor
             getDescriptorForType() {
-          return org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_StorageClusterStatus_Node_descriptor;
+          return org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node.getDescriptor();
         }
-
+        
         public org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node getDefaultInstanceForType() {
           return org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node.getDefaultInstance();
         }
-
+        
         public org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node build() {
           org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node result = buildPartial();
           if (!result.isInitialized()) {
@@ -2234,7 +1499,17 @@ public final class StorageClusterStatusM
           }
           return result;
         }
-
+        
+        private org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node buildParsed()
+            throws com.google.protobuf.InvalidProtocolBufferException {
+          org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node result = buildPartial();
+          if (!result.isInitialized()) {
+            throw newUninitializedMessageException(
+              result).asInvalidProtocolBufferException();
+          }
+          return result;
+        }
+        
         public org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node buildPartial() {
           org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node result = new org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node(this);
           int from_bitField0_ = bitField0_;
@@ -2272,7 +1547,7 @@ public final class StorageClusterStatusM
           onBuilt();
           return result;
         }
-
+        
         public Builder mergeFrom(com.google.protobuf.Message other) {
           if (other instanceof org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node) {
             return mergeFrom((org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node)other);
@@ -2281,13 +1556,11 @@ public final class StorageClusterStatusM
             return this;
           }
         }
-
+        
         public Builder mergeFrom(org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node other) {
           if (other == org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node.getDefaultInstance()) return this;
           if (other.hasName()) {
-            bitField0_ |= 0x00000001;
-            name_ = other.name_;
-            onChanged();
+            setName(other.getName());
           }
           if (other.hasStartCode()) {
             setStartCode(other.getStartCode());
@@ -2330,7 +1603,7 @@ public final class StorageClusterStatusM
           this.mergeUnknownFields(other.getUnknownFields());
           return this;
         }
-
+        
         public final boolean isInitialized() {
           if (!hasName()) {
             
@@ -2344,85 +1617,83 @@ public final class StorageClusterStatusM
           }
           return true;
         }
-
+        
         public Builder mergeFrom(
             com.google.protobuf.CodedInputStream input,
             com.google.protobuf.ExtensionRegistryLite extensionRegistry)
             throws java.io.IOException {
-          org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node parsedMessage = null;
-          try {
-            parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
-          } catch (com.google.protobuf.InvalidProtocolBufferException e) {
-            parsedMessage = (org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node) e.getUnfinishedMessage();
-            throw e;
-          } finally {
-            if (parsedMessage != null) {
-              mergeFrom(parsedMessage);
+          com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+            com.google.protobuf.UnknownFieldSet.newBuilder(
+              this.getUnknownFields());
+          while (true) {
+            int tag = input.readTag();
+            switch (tag) {
+              case 0:
+                this.setUnknownFields(unknownFields.build());
+                onChanged();
+                return this;
+              default: {
+                if (!parseUnknownField(input, unknownFields,
+                                       extensionRegistry, tag)) {
+                  this.setUnknownFields(unknownFields.build());
+                  onChanged();
+                  return this;
+                }
+                break;
+              }
+              case 10: {
+                bitField0_ |= 0x00000001;
+                name_ = input.readBytes();
+                break;
+              }
+              case 16: {
+                bitField0_ |= 0x00000002;
+                startCode_ = input.readInt64();
+                break;
+              }
+              case 24: {
+                bitField0_ |= 0x00000004;
+                requests_ = input.readInt32();
+                break;
+              }
+              case 32: {
+                bitField0_ |= 0x00000008;
+                heapSizeMB_ = input.readInt32();
+                break;
+              }
+              case 40: {
+                bitField0_ |= 0x00000010;
+                maxHeapSizeMB_ = input.readInt32();
+                break;
+              }
+              case 50: {
+                org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region.Builder subBuilder = org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region.newBuilder();
+                input.readMessage(subBuilder, extensionRegistry);
+                addRegions(subBuilder.buildPartial());
+                break;
+              }
             }
           }
-          return this;
         }
+        
         private int bitField0_;
-
+        
         // required string name = 1;
         private java.lang.Object name_ = "";
-        /**
-         * <code>required string name = 1;</code>
-         *
-         * <pre>
-         * name:port
-         * </pre>
-         */
         public boolean hasName() {
           return ((bitField0_ & 0x00000001) == 0x00000001);
         }
-        /**
-         * <code>required string name = 1;</code>
-         *
-         * <pre>
-         * name:port
-         * </pre>
-         */
-        public java.lang.String getName() {
+        public String getName() {
           java.lang.Object ref = name_;
-          if (!(ref instanceof java.lang.String)) {
-            java.lang.String s = ((com.google.protobuf.ByteString) ref)
-                .toStringUtf8();
+          if (!(ref instanceof String)) {
+            String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
             name_ = s;
             return s;
           } else {
-            return (java.lang.String) ref;
-          }
-        }
-        /**
-         * <code>required string name = 1;</code>
-         *
-         * <pre>
-         * name:port
-         * </pre>
-         */
-        public com.google.protobuf.ByteString
-            getNameBytes() {
-          java.lang.Object ref = name_;
-          if (ref instanceof String) {
-            com.google.protobuf.ByteString b = 
-                com.google.protobuf.ByteString.copyFromUtf8(
-                    (java.lang.String) ref);
-            name_ = b;
-            return b;
-          } else {
-            return (com.google.protobuf.ByteString) ref;
+            return (String) ref;
           }
         }
-        /**
-         * <code>required string name = 1;</code>
-         *
-         * <pre>

[... 1401 lines stripped ...]


Mime
View raw message