hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From apurt...@apache.org
Subject svn commit: r789136 [4/8] - in /hadoop/hbase/trunk/src/contrib: ./ stargate/ stargate/lib/ stargate/src/ stargate/src/java/ stargate/src/java/org/ stargate/src/java/org/apache/ stargate/src/java/org/apache/hadoop/ stargate/src/java/org/apache/hadoop/hb...
Date Sun, 28 Jun 2009 18:21:49 GMT
Added: hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/protobuf/generated/ColumnSchemaMessage.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/protobuf/generated/ColumnSchemaMessage.java?rev=789136&view=auto
==============================================================================
--- hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/protobuf/generated/ColumnSchemaMessage.java (added)
+++ hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/protobuf/generated/ColumnSchemaMessage.java Sun Jun 28 18:21:45 2009
@@ -0,0 +1,861 @@
+// Generated by the protocol buffer compiler.  DO NOT EDIT!
+
+package org.apache.hadoop.hbase.stargate.protobuf.generated;
+
+public final class ColumnSchemaMessage {
+  private ColumnSchemaMessage() {}
+  public static void registerAllExtensions(
+      com.google.protobuf.ExtensionRegistry registry) {
+  }
+  public static final class ColumnSchema extends
+      com.google.protobuf.GeneratedMessage {
+    // Use ColumnSchema.newBuilder() to construct.
+    private ColumnSchema() {}
+    
+    private static final ColumnSchema defaultInstance = new ColumnSchema();
+    public static ColumnSchema getDefaultInstance() {
+      return defaultInstance;
+    }
+    
+    public ColumnSchema getDefaultInstanceForType() {
+      return defaultInstance;
+    }
+    
+    public static final com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_ColumnSchema_descriptor;
+    }
+    
+    @Override
+    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_ColumnSchema_fieldAccessorTable;
+    }
+    
+    public static final class Attribute extends
+        com.google.protobuf.GeneratedMessage {
+      // Use Attribute.newBuilder() to construct.
+      private Attribute() {}
+      
+      private static final Attribute defaultInstance = new Attribute();
+      public static Attribute getDefaultInstance() {
+        return defaultInstance;
+      }
+      
+      public Attribute getDefaultInstanceForType() {
+        return defaultInstance;
+      }
+      
+      public static final com.google.protobuf.Descriptors.Descriptor
+          getDescriptor() {
+        return org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_ColumnSchema_Attribute_descriptor;
+      }
+      
+      @Override
+      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+          internalGetFieldAccessorTable() {
+        return org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_ColumnSchema_Attribute_fieldAccessorTable;
+      }
+      
+      // required string name = 1;
+      public static final int NAME_FIELD_NUMBER = 1;
+      private boolean hasName;
+      private java.lang.String name_ = "";
+      public boolean hasName() { return hasName; }
+      public java.lang.String getName() { return name_; }
+      
+      // required string value = 2;
+      public static final int VALUE_FIELD_NUMBER = 2;
+      private boolean hasValue;
+      private java.lang.String value_ = "";
+      public boolean hasValue() { return hasValue; }
+      public java.lang.String getValue() { return value_; }
+      
+      @Override
+      public final boolean isInitialized() {
+        if (!hasName) return false;
+        if (!hasValue) return false;
+        return true;
+      }
+      
+      @Override
+      public void writeTo(com.google.protobuf.CodedOutputStream output)
+                          throws java.io.IOException {
+        if (hasName()) {
+          output.writeString(1, getName());
+        }
+        if (hasValue()) {
+          output.writeString(2, getValue());
+        }
+        getUnknownFields().writeTo(output);
+      }
+      
+      private int memoizedSerializedSize = -1;
+      @Override
+      public int getSerializedSize() {
+        int size = memoizedSerializedSize;
+        if (size != -1) return size;
+      
+        size = 0;
+        if (hasName()) {
+          size += com.google.protobuf.CodedOutputStream
+            .computeStringSize(1, getName());
+        }
+        if (hasValue()) {
+          size += com.google.protobuf.CodedOutputStream
+            .computeStringSize(2, getValue());
+        }
+        size += getUnknownFields().getSerializedSize();
+        memoizedSerializedSize = size;
+        return size;
+      }
+      
+      public static org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute parseFrom(
+          com.google.protobuf.ByteString data)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        return newBuilder().mergeFrom(data).buildParsed();
+      }
+      public static org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute parseFrom(
+          com.google.protobuf.ByteString data,
+          com.google.protobuf.ExtensionRegistry extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        return newBuilder().mergeFrom(data, extensionRegistry)
+                 .buildParsed();
+      }
+      public static org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute parseFrom(byte[] data)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        return newBuilder().mergeFrom(data).buildParsed();
+      }
+      public static org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute parseFrom(
+          byte[] data,
+          com.google.protobuf.ExtensionRegistry extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        return newBuilder().mergeFrom(data, extensionRegistry)
+                 .buildParsed();
+      }
+      public static org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute parseFrom(java.io.InputStream input)
+          throws java.io.IOException {
+        return newBuilder().mergeFrom(input).buildParsed();
+      }
+      public static org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute parseFrom(
+          java.io.InputStream input,
+          com.google.protobuf.ExtensionRegistry extensionRegistry)
+          throws java.io.IOException {
+        return newBuilder().mergeFrom(input, extensionRegistry)
+                 .buildParsed();
+      }
+      public static org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute parseDelimitedFrom(java.io.InputStream input)
+          throws java.io.IOException {
+        return newBuilder().mergeDelimitedFrom(input).buildParsed();
+      }
+      public static org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute parseDelimitedFrom(
+          java.io.InputStream input,
+          com.google.protobuf.ExtensionRegistry extensionRegistry)
+          throws java.io.IOException {
+        return newBuilder().mergeDelimitedFrom(input, extensionRegistry)
+                 .buildParsed();
+      }
+      public static org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute parseFrom(
+          com.google.protobuf.CodedInputStream input)
+          throws java.io.IOException {
+        return newBuilder().mergeFrom(input).buildParsed();
+      }
+      public static org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute parseFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistry extensionRegistry)
+          throws java.io.IOException {
+        return newBuilder().mergeFrom(input, extensionRegistry)
+                 .buildParsed();
+      }
+      
+      public static Builder newBuilder() { return new Builder(); }
+      public Builder newBuilderForType() { return new Builder(); }
+      public static Builder newBuilder(org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute prototype) {
+        return new Builder().mergeFrom(prototype);
+      }
+      public Builder toBuilder() { return newBuilder(this); }
+      
+      public static final class Builder extends
+          com.google.protobuf.GeneratedMessage.Builder<Builder> {
+        // Construct using org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute.newBuilder()
+        private Builder() {}
+        
+        org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute result = new org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute();
+        
+        @Override
+        protected org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute internalGetResult() {
+          return result;
+        }
+        
+        @Override
+        public Builder clear() {
+          result = new org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute();
+          return this;
+        }
+        
+        @Override
+        public Builder clone() {
+          return new Builder().mergeFrom(result);
+        }
+        
+        @Override
+        public com.google.protobuf.Descriptors.Descriptor
+            getDescriptorForType() {
+          return org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute.getDescriptor();
+        }
+        
+        public org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute getDefaultInstanceForType() {
+          return org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute.getDefaultInstance();
+        }
+        
+        public org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute build() {
+          if (result != null && !isInitialized()) {
+            throw new com.google.protobuf.UninitializedMessageException(
+              result);
+          }
+          return buildPartial();
+        }
+        
+        private org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute buildParsed()
+            throws com.google.protobuf.InvalidProtocolBufferException {
+          if (!isInitialized()) {
+            throw new com.google.protobuf.UninitializedMessageException(
+              result).asInvalidProtocolBufferException();
+          }
+          return buildPartial();
+        }
+        
+        public org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute buildPartial() {
+          if (result == null) {
+            throw new IllegalStateException(
+              "build() has already been called on this Builder.");  }
+          org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute returnMe = result;
+          result = null;
+          return returnMe;
+        }
+        
+        @Override
+        public Builder mergeFrom(com.google.protobuf.Message other) {
+          if (other instanceof org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute) {
+            return mergeFrom((org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute)other);
+          } else {
+            super.mergeFrom(other);
+            return this;
+          }
+        }
+        
+        public Builder mergeFrom(org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute other) {
+          if (other == org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute.getDefaultInstance()) return this;
+          if (other.hasName()) {
+            setName(other.getName());
+          }
+          if (other.hasValue()) {
+            setValue(other.getValue());
+          }
+          this.mergeUnknownFields(other.getUnknownFields());
+          return this;
+        }
+        
+        @Override
+        public Builder mergeFrom(
+            com.google.protobuf.CodedInputStream input)
+            throws java.io.IOException {
+          return mergeFrom(input,
+            com.google.protobuf.ExtensionRegistry.getEmptyRegistry());
+        }
+        
+        @Override
+        public Builder mergeFrom(
+            com.google.protobuf.CodedInputStream input,
+            com.google.protobuf.ExtensionRegistry extensionRegistry)
+            throws java.io.IOException {
+          com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+            com.google.protobuf.UnknownFieldSet.newBuilder(
+              this.getUnknownFields());
+          while (true) {
+            int tag = input.readTag();
+            switch (tag) {
+              case 0:
+                this.setUnknownFields(unknownFields.build());
+                return this;
+              default: {
+                if (!parseUnknownField(input, unknownFields,
+                                       extensionRegistry, tag)) {
+                  this.setUnknownFields(unknownFields.build());
+                  return this;
+                }
+                break;
+              }
+              case 10: {
+                setName(input.readString());
+                break;
+              }
+              case 18: {
+                setValue(input.readString());
+                break;
+              }
+            }
+          }
+        }
+        
+        
+        // required string name = 1;
+        public boolean hasName() {
+          return result.hasName();
+        }
+        public java.lang.String getName() {
+          return result.getName();
+        }
+        public Builder setName(java.lang.String value) {
+          if (value == null) {
+    throw new NullPointerException();
+  }
+  result.hasName = true;
+          result.name_ = value;
+          return this;
+        }
+        public Builder clearName() {
+          result.hasName = false;
+          result.name_ = "";
+          return this;
+        }
+        
+        // required string value = 2;
+        public boolean hasValue() {
+          return result.hasValue();
+        }
+        public java.lang.String getValue() {
+          return result.getValue();
+        }
+        public Builder setValue(java.lang.String value) {
+          if (value == null) {
+    throw new NullPointerException();
+  }
+  result.hasValue = true;
+          result.value_ = value;
+          return this;
+        }
+        public Builder clearValue() {
+          result.hasValue = false;
+          result.value_ = "";
+          return this;
+        }
+      }
+      
+      static {
+        org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.getDescriptor();
+      }
+    }
+    
+    // optional string name = 1;
+    public static final int NAME_FIELD_NUMBER = 1;
+    private boolean hasName;
+    private java.lang.String name_ = "";
+    public boolean hasName() { return hasName; }
+    public java.lang.String getName() { return name_; }
+    
+    // repeated .org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchema.Attribute attrs = 2;
+    public static final int ATTRS_FIELD_NUMBER = 2;
+    private java.util.List<org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute> attrs_ =
+      java.util.Collections.emptyList();
+    public java.util.List<org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute> getAttrsList() {
+      return attrs_;
+    }
+    public int getAttrsCount() { return attrs_.size(); }
+    public org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute getAttrs(int index) {
+      return attrs_.get(index);
+    }
+    
+    // optional int32 ttl = 3;
+    public static final int TTL_FIELD_NUMBER = 3;
+    private boolean hasTtl;
+    private int ttl_ = 0;
+    public boolean hasTtl() { return hasTtl; }
+    public int getTtl() { return ttl_; }
+    
+    // optional int32 maxVersions = 4;
+    public static final int MAXVERSIONS_FIELD_NUMBER = 4;
+    private boolean hasMaxVersions;
+    private int maxVersions_ = 0;
+    public boolean hasMaxVersions() { return hasMaxVersions; }
+    public int getMaxVersions() { return maxVersions_; }
+    
+    // optional string compression = 5;
+    public static final int COMPRESSION_FIELD_NUMBER = 5;
+    private boolean hasCompression;
+    private java.lang.String compression_ = "";
+    public boolean hasCompression() { return hasCompression; }
+    public java.lang.String getCompression() { return compression_; }
+    
+    @Override
+    public final boolean isInitialized() {
+      for (org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute element : getAttrsList()) {
+        if (!element.isInitialized()) return false;
+      }
+      return true;
+    }
+    
+    @Override
+    public void writeTo(com.google.protobuf.CodedOutputStream output)
+                        throws java.io.IOException {
+      if (hasName()) {
+        output.writeString(1, getName());
+      }
+      for (org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute element : getAttrsList()) {
+        output.writeMessage(2, element);
+      }
+      if (hasTtl()) {
+        output.writeInt32(3, getTtl());
+      }
+      if (hasMaxVersions()) {
+        output.writeInt32(4, getMaxVersions());
+      }
+      if (hasCompression()) {
+        output.writeString(5, getCompression());
+      }
+      getUnknownFields().writeTo(output);
+    }
+    
+    private int memoizedSerializedSize = -1;
+    @Override
+    public int getSerializedSize() {
+      int size = memoizedSerializedSize;
+      if (size != -1) return size;
+    
+      size = 0;
+      if (hasName()) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeStringSize(1, getName());
+      }
+      for (org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute element : getAttrsList()) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeMessageSize(2, element);
+      }
+      if (hasTtl()) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeInt32Size(3, getTtl());
+      }
+      if (hasMaxVersions()) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeInt32Size(4, getMaxVersions());
+      }
+      if (hasCompression()) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeStringSize(5, getCompression());
+      }
+      size += getUnknownFields().getSerializedSize();
+      memoizedSerializedSize = size;
+      return size;
+    }
+    
+    public static org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema parseFrom(
+        com.google.protobuf.ByteString data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return newBuilder().mergeFrom(data).buildParsed();
+    }
+    public static org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema parseFrom(
+        com.google.protobuf.ByteString data,
+        com.google.protobuf.ExtensionRegistry extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return newBuilder().mergeFrom(data, extensionRegistry)
+               .buildParsed();
+    }
+    public static org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema parseFrom(byte[] data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return newBuilder().mergeFrom(data).buildParsed();
+    }
+    public static org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema parseFrom(
+        byte[] data,
+        com.google.protobuf.ExtensionRegistry extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return newBuilder().mergeFrom(data, extensionRegistry)
+               .buildParsed();
+    }
+    public static org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema parseFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return newBuilder().mergeFrom(input).buildParsed();
+    }
+    public static org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema parseFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistry extensionRegistry)
+        throws java.io.IOException {
+      return newBuilder().mergeFrom(input, extensionRegistry)
+               .buildParsed();
+    }
+    public static org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema parseDelimitedFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return newBuilder().mergeDelimitedFrom(input).buildParsed();
+    }
+    public static org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema parseDelimitedFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistry extensionRegistry)
+        throws java.io.IOException {
+      return newBuilder().mergeDelimitedFrom(input, extensionRegistry)
+               .buildParsed();
+    }
+    public static org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema parseFrom(
+        com.google.protobuf.CodedInputStream input)
+        throws java.io.IOException {
+      return newBuilder().mergeFrom(input).buildParsed();
+    }
+    public static org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema parseFrom(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistry extensionRegistry)
+        throws java.io.IOException {
+      return newBuilder().mergeFrom(input, extensionRegistry)
+               .buildParsed();
+    }
+    
+    public static Builder newBuilder() { return new Builder(); }
+    public Builder newBuilderForType() { return new Builder(); }
+    public static Builder newBuilder(org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema prototype) {
+      return new Builder().mergeFrom(prototype);
+    }
+    public Builder toBuilder() { return newBuilder(this); }
+    
+    public static final class Builder extends
+        com.google.protobuf.GeneratedMessage.Builder<Builder> {
+      // Construct using org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.newBuilder()
+      private Builder() {}
+      
+      org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema result = new org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema();
+      
+      @Override
+      protected org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema internalGetResult() {
+        return result;
+      }
+      
+      @Override
+      public Builder clear() {
+        result = new org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema();
+        return this;
+      }
+      
+      @Override
+      public Builder clone() {
+        return new Builder().mergeFrom(result);
+      }
+      
+      @Override
+      public com.google.protobuf.Descriptors.Descriptor
+          getDescriptorForType() {
+        return org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.getDescriptor();
+      }
+      
+      public org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema getDefaultInstanceForType() {
+        return org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.getDefaultInstance();
+      }
+      
+      public org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema build() {
+        if (result != null && !isInitialized()) {
+          throw new com.google.protobuf.UninitializedMessageException(
+            result);
+        }
+        return buildPartial();
+      }
+      
+      private org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema buildParsed()
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        if (!isInitialized()) {
+          throw new com.google.protobuf.UninitializedMessageException(
+            result).asInvalidProtocolBufferException();
+        }
+        return buildPartial();
+      }
+      
+      public org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema buildPartial() {
+        if (result == null) {
+          throw new IllegalStateException(
+            "build() has already been called on this Builder.");  }
+        if (result.attrs_ != java.util.Collections.EMPTY_LIST) {
+          result.attrs_ =
+            java.util.Collections.unmodifiableList(result.attrs_);
+        }
+        org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema returnMe = result;
+        result = null;
+        return returnMe;
+      }
+      
+      @Override
+      public Builder mergeFrom(com.google.protobuf.Message other) {
+        if (other instanceof org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema) {
+          return mergeFrom((org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema)other);
+        } else {
+          super.mergeFrom(other);
+          return this;
+        }
+      }
+      
+      public Builder mergeFrom(org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema other) {
+        if (other == org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.getDefaultInstance()) return this;
+        if (other.hasName()) {
+          setName(other.getName());
+        }
+        if (!other.attrs_.isEmpty()) {
+          if (result.attrs_.isEmpty()) {
+            result.attrs_ = new java.util.ArrayList<org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute>();
+          }
+          result.attrs_.addAll(other.attrs_);
+        }
+        if (other.hasTtl()) {
+          setTtl(other.getTtl());
+        }
+        if (other.hasMaxVersions()) {
+          setMaxVersions(other.getMaxVersions());
+        }
+        if (other.hasCompression()) {
+          setCompression(other.getCompression());
+        }
+        this.mergeUnknownFields(other.getUnknownFields());
+        return this;
+      }
+      
+      @Override
+      public Builder mergeFrom(
+          com.google.protobuf.CodedInputStream input)
+          throws java.io.IOException {
+        return mergeFrom(input,
+          com.google.protobuf.ExtensionRegistry.getEmptyRegistry());
+      }
+      
+      @Override
+      public Builder mergeFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistry extensionRegistry)
+          throws java.io.IOException {
+        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          com.google.protobuf.UnknownFieldSet.newBuilder(
+            this.getUnknownFields());
+        while (true) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              this.setUnknownFields(unknownFields.build());
+              return this;
+            default: {
+              if (!parseUnknownField(input, unknownFields,
+                                     extensionRegistry, tag)) {
+                this.setUnknownFields(unknownFields.build());
+                return this;
+              }
+              break;
+            }
+            case 10: {
+              setName(input.readString());
+              break;
+            }
+            case 18: {
+              org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute.Builder subBuilder = org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute.newBuilder();
+              input.readMessage(subBuilder, extensionRegistry);
+              addAttrs(subBuilder.buildPartial());
+              break;
+            }
+            case 24: {
+              setTtl(input.readInt32());
+              break;
+            }
+            case 32: {
+              setMaxVersions(input.readInt32());
+              break;
+            }
+            case 42: {
+              setCompression(input.readString());
+              break;
+            }
+          }
+        }
+      }
+      
+      
+      // optional string name = 1;
+      public boolean hasName() {
+        return result.hasName();
+      }
+      public java.lang.String getName() {
+        return result.getName();
+      }
+      public Builder setName(java.lang.String value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  result.hasName = true;
+        result.name_ = value;
+        return this;
+      }
+      public Builder clearName() {
+        result.hasName = false;
+        result.name_ = "";
+        return this;
+      }
+      
+      // repeated .org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchema.Attribute attrs = 2;
+      public java.util.List<org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute> getAttrsList() {
+        return java.util.Collections.unmodifiableList(result.attrs_);
+      }
+      public int getAttrsCount() {
+        return result.getAttrsCount();
+      }
+      public org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute getAttrs(int index) {
+        return result.getAttrs(index);
+      }
+      public Builder setAttrs(int index, org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute value) {
+        if (value == null) {
+          throw new NullPointerException();
+        }
+        result.attrs_.set(index, value);
+        return this;
+      }
+      public Builder setAttrs(int index, org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute.Builder builderForValue) {
+        result.attrs_.set(index, builderForValue.build());
+        return this;
+      }
+      public Builder addAttrs(org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute value) {
+        if (value == null) {
+          throw new NullPointerException();
+        }
+        if (result.attrs_.isEmpty()) {
+          result.attrs_ = new java.util.ArrayList<org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute>();
+        }
+        result.attrs_.add(value);
+        return this;
+      }
+      public Builder addAttrs(org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute.Builder builderForValue) {
+        if (result.attrs_.isEmpty()) {
+          result.attrs_ = new java.util.ArrayList<org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute>();
+        }
+        result.attrs_.add(builderForValue.build());
+        return this;
+      }
+      public Builder addAllAttrs(
+          java.lang.Iterable<? extends org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute> values) {
+        if (result.attrs_.isEmpty()) {
+          result.attrs_ = new java.util.ArrayList<org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute>();
+        }
+        super.addAll(values, result.attrs_);
+        return this;
+      }
+      public Builder clearAttrs() {
+        result.attrs_ = java.util.Collections.emptyList();
+        return this;
+      }
+      
+      // optional int32 ttl = 3;
+      public boolean hasTtl() {
+        return result.hasTtl();
+      }
+      public int getTtl() {
+        return result.getTtl();
+      }
+      public Builder setTtl(int value) {
+        result.hasTtl = true;
+        result.ttl_ = value;
+        return this;
+      }
+      public Builder clearTtl() {
+        result.hasTtl = false;
+        result.ttl_ = 0;
+        return this;
+      }
+      
+      // optional int32 maxVersions = 4;
+      public boolean hasMaxVersions() {
+        return result.hasMaxVersions();
+      }
+      public int getMaxVersions() {
+        return result.getMaxVersions();
+      }
+      public Builder setMaxVersions(int value) {
+        result.hasMaxVersions = true;
+        result.maxVersions_ = value;
+        return this;
+      }
+      public Builder clearMaxVersions() {
+        result.hasMaxVersions = false;
+        result.maxVersions_ = 0;
+        return this;
+      }
+      
+      // optional string compression = 5;
+      public boolean hasCompression() {
+        return result.hasCompression();
+      }
+      public java.lang.String getCompression() {
+        return result.getCompression();
+      }
+      public Builder setCompression(java.lang.String value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  result.hasCompression = true;
+        result.compression_ = value;
+        return this;
+      }
+      public Builder clearCompression() {
+        result.hasCompression = false;
+        result.compression_ = "";
+        return this;
+      }
+    }
+    
+    static {
+      org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.getDescriptor();
+    }
+  }
+  
+  private static com.google.protobuf.Descriptors.Descriptor
+    internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_ColumnSchema_descriptor;
+  private static
+    com.google.protobuf.GeneratedMessage.FieldAccessorTable
+      internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_ColumnSchema_fieldAccessorTable;
+  private static com.google.protobuf.Descriptors.Descriptor
+    internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_ColumnSchema_Attribute_descriptor;
+  private static
+    com.google.protobuf.GeneratedMessage.FieldAccessorTable
+      internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_ColumnSchema_Attribute_fieldAccessorTable;
+  
+  public static com.google.protobuf.Descriptors.FileDescriptor
+      getDescriptor() {
+    return descriptor;
+  }
+  private static com.google.protobuf.Descriptors.FileDescriptor
+      descriptor;
+  static {
+    java.lang.String descriptorData =
+      "\n\031ColumnSchemaMessage.proto\0223org.apache." +
+      "hadoop.hbase.stargate.protobuf.generated" +
+      "\"\331\001\n\014ColumnSchema\022\014\n\004name\030\001 \001(\t\022Z\n\005attrs" +
+      "\030\002 \003(\0132K.org.apache.hadoop.hbase.stargat" +
+      "e.protobuf.generated.ColumnSchema.Attrib" +
+      "ute\022\013\n\003ttl\030\003 \001(\005\022\023\n\013maxVersions\030\004 \001(\005\022\023\n" +
+      "\013compression\030\005 \001(\t\032(\n\tAttribute\022\014\n\004name\030" +
+      "\001 \002(\t\022\r\n\005value\030\002 \002(\t";
+    com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
+      new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
+        public com.google.protobuf.ExtensionRegistry assignDescriptors(
+            com.google.protobuf.Descriptors.FileDescriptor root) {
+          descriptor = root;
+          internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_ColumnSchema_descriptor =
+            getDescriptor().getMessageTypes().get(0);
+          internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_ColumnSchema_fieldAccessorTable = new
+            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+              internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_ColumnSchema_descriptor,
+              new java.lang.String[] { "Name", "Attrs", "Ttl", "MaxVersions", "Compression", },
+              org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.class,
+              org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Builder.class);
+          internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_ColumnSchema_Attribute_descriptor =
+            internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_ColumnSchema_descriptor.getNestedTypes().get(0);
+          internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_ColumnSchema_Attribute_fieldAccessorTable = new
+            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+              internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_ColumnSchema_Attribute_descriptor,
+              new java.lang.String[] { "Name", "Value", },
+              org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute.class,
+              org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute.Builder.class);
+          return null;
+        }
+      };
+    com.google.protobuf.Descriptors.FileDescriptor
+      .internalBuildGeneratedFileFrom(descriptorData,
+        new com.google.protobuf.Descriptors.FileDescriptor[] {
+        }, assigner);
+  }
+}

Added: hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/protobuf/generated/ScannerMessage.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/protobuf/generated/ScannerMessage.java?rev=789136&view=auto
==============================================================================
--- hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/protobuf/generated/ScannerMessage.java (added)
+++ hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/protobuf/generated/ScannerMessage.java Sun Jun 28 18:21:45 2009
@@ -0,0 +1,558 @@
+// Generated by the protocol buffer compiler.  DO NOT EDIT!
+
+package org.apache.hadoop.hbase.stargate.protobuf.generated;
+
+public final class ScannerMessage {
+  private ScannerMessage() {}
+  public static void registerAllExtensions(
+      com.google.protobuf.ExtensionRegistry registry) {
+  }
+  public static final class Scanner extends
+      com.google.protobuf.GeneratedMessage {
+    // Use Scanner.newBuilder() to construct.
+    private Scanner() {}
+    
+    private static final Scanner defaultInstance = new Scanner();
+    public static Scanner getDefaultInstance() {
+      return defaultInstance;
+    }
+    
+    public Scanner getDefaultInstanceForType() {
+      return defaultInstance;
+    }
+    
+    public static final com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_Scanner_descriptor;
+    }
+    
+    @Override
+    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_Scanner_fieldAccessorTable;
+    }
+    
+    // optional bytes startRow = 1;
+    public static final int STARTROW_FIELD_NUMBER = 1;
+    private boolean hasStartRow;
+    private com.google.protobuf.ByteString startRow_ = com.google.protobuf.ByteString.EMPTY;
+    public boolean hasStartRow() { return hasStartRow; }
+    public com.google.protobuf.ByteString getStartRow() { return startRow_; }
+    
+    // optional bytes endRow = 2;
+    public static final int ENDROW_FIELD_NUMBER = 2;
+    private boolean hasEndRow;
+    private com.google.protobuf.ByteString endRow_ = com.google.protobuf.ByteString.EMPTY;
+    public boolean hasEndRow() { return hasEndRow; }
+    public com.google.protobuf.ByteString getEndRow() { return endRow_; }
+    
+    // repeated bytes columns = 3;
+    public static final int COLUMNS_FIELD_NUMBER = 3;
+    private java.util.List<com.google.protobuf.ByteString> columns_ =
+      java.util.Collections.emptyList();
+    public java.util.List<com.google.protobuf.ByteString> getColumnsList() {
+      return columns_;
+    }
+    public int getColumnsCount() { return columns_.size(); }
+    public com.google.protobuf.ByteString getColumns(int index) {
+      return columns_.get(index);
+    }
+    
+    // optional int32 batch = 4;
+    public static final int BATCH_FIELD_NUMBER = 4;
+    private boolean hasBatch;
+    private int batch_ = 0;
+    public boolean hasBatch() { return hasBatch; }
+    public int getBatch() { return batch_; }
+    
+    // optional int64 startTime = 5;
+    public static final int STARTTIME_FIELD_NUMBER = 5;
+    private boolean hasStartTime;
+    private long startTime_ = 0L;
+    public boolean hasStartTime() { return hasStartTime; }
+    public long getStartTime() { return startTime_; }
+    
+    // optional int64 endTime = 6;
+    public static final int ENDTIME_FIELD_NUMBER = 6;
+    private boolean hasEndTime;
+    private long endTime_ = 0L;
+    public boolean hasEndTime() { return hasEndTime; }
+    public long getEndTime() { return endTime_; }
+    
+    @Override
+    public final boolean isInitialized() {
+      return true;
+    }
+    
+    @Override
+    public void writeTo(com.google.protobuf.CodedOutputStream output)
+                        throws java.io.IOException {
+      if (hasStartRow()) {
+        output.writeBytes(1, getStartRow());
+      }
+      if (hasEndRow()) {
+        output.writeBytes(2, getEndRow());
+      }
+      for (com.google.protobuf.ByteString element : getColumnsList()) {
+        output.writeBytes(3, element);
+      }
+      if (hasBatch()) {
+        output.writeInt32(4, getBatch());
+      }
+      if (hasStartTime()) {
+        output.writeInt64(5, getStartTime());
+      }
+      if (hasEndTime()) {
+        output.writeInt64(6, getEndTime());
+      }
+      getUnknownFields().writeTo(output);
+    }
+    
+    private int memoizedSerializedSize = -1;
+    @Override
+    public int getSerializedSize() {
+      int size = memoizedSerializedSize;
+      if (size != -1) return size;
+    
+      size = 0;
+      if (hasStartRow()) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeBytesSize(1, getStartRow());
+      }
+      if (hasEndRow()) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeBytesSize(2, getEndRow());
+      }
+      {
+        int dataSize = 0;
+        for (com.google.protobuf.ByteString element : getColumnsList()) {
+          dataSize += com.google.protobuf.CodedOutputStream
+            .computeBytesSizeNoTag(element);
+        }
+        size += dataSize;
+        size += 1 * getColumnsList().size();
+      }
+      if (hasBatch()) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeInt32Size(4, getBatch());
+      }
+      if (hasStartTime()) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeInt64Size(5, getStartTime());
+      }
+      if (hasEndTime()) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeInt64Size(6, getEndTime());
+      }
+      size += getUnknownFields().getSerializedSize();
+      memoizedSerializedSize = size;
+      return size;
+    }
+    
+    public static org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner parseFrom(
+        com.google.protobuf.ByteString data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return newBuilder().mergeFrom(data).buildParsed();
+    }
+    public static org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner parseFrom(
+        com.google.protobuf.ByteString data,
+        com.google.protobuf.ExtensionRegistry extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return newBuilder().mergeFrom(data, extensionRegistry)
+               .buildParsed();
+    }
+    public static org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner parseFrom(byte[] data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return newBuilder().mergeFrom(data).buildParsed();
+    }
+    public static org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner parseFrom(
+        byte[] data,
+        com.google.protobuf.ExtensionRegistry extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return newBuilder().mergeFrom(data, extensionRegistry)
+               .buildParsed();
+    }
+    public static org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner parseFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return newBuilder().mergeFrom(input).buildParsed();
+    }
+    public static org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner parseFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistry extensionRegistry)
+        throws java.io.IOException {
+      return newBuilder().mergeFrom(input, extensionRegistry)
+               .buildParsed();
+    }
+    public static org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner parseDelimitedFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return newBuilder().mergeDelimitedFrom(input).buildParsed();
+    }
+    public static org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner parseDelimitedFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistry extensionRegistry)
+        throws java.io.IOException {
+      return newBuilder().mergeDelimitedFrom(input, extensionRegistry)
+               .buildParsed();
+    }
+    public static org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner parseFrom(
+        com.google.protobuf.CodedInputStream input)
+        throws java.io.IOException {
+      return newBuilder().mergeFrom(input).buildParsed();
+    }
+    public static org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner parseFrom(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistry extensionRegistry)
+        throws java.io.IOException {
+      return newBuilder().mergeFrom(input, extensionRegistry)
+               .buildParsed();
+    }
+    
+    public static Builder newBuilder() { return new Builder(); }
+    public Builder newBuilderForType() { return new Builder(); }
+    public static Builder newBuilder(org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner prototype) {
+      return new Builder().mergeFrom(prototype);
+    }
+    public Builder toBuilder() { return newBuilder(this); }
+    
+    public static final class Builder extends
+        com.google.protobuf.GeneratedMessage.Builder<Builder> {
+      // Construct using org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner.newBuilder()
+      private Builder() {}
+      
+      org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner result = new org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner();
+      
+      @Override
+      protected org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner internalGetResult() {
+        return result;
+      }
+      
+      @Override
+      public Builder clear() {
+        result = new org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner();
+        return this;
+      }
+      
+      @Override
+      public Builder clone() {
+        return new Builder().mergeFrom(result);
+      }
+      
+      @Override
+      public com.google.protobuf.Descriptors.Descriptor
+          getDescriptorForType() {
+        return org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner.getDescriptor();
+      }
+      
+      public org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner getDefaultInstanceForType() {
+        return org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner.getDefaultInstance();
+      }
+      
+      public org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner build() {
+        if (result != null && !isInitialized()) {
+          throw new com.google.protobuf.UninitializedMessageException(
+            result);
+        }
+        return buildPartial();
+      }
+      
+      private org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner buildParsed()
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        if (!isInitialized()) {
+          throw new com.google.protobuf.UninitializedMessageException(
+            result).asInvalidProtocolBufferException();
+        }
+        return buildPartial();
+      }
+      
+      public org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner buildPartial() {
+        if (result == null) {
+          throw new IllegalStateException(
+            "build() has already been called on this Builder.");  }
+        if (result.columns_ != java.util.Collections.EMPTY_LIST) {
+          result.columns_ =
+            java.util.Collections.unmodifiableList(result.columns_);
+        }
+        org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner returnMe = result;
+        result = null;
+        return returnMe;
+      }
+      
+      @Override
+      public Builder mergeFrom(com.google.protobuf.Message other) {
+        if (other instanceof org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner) {
+          return mergeFrom((org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner)other);
+        } else {
+          super.mergeFrom(other);
+          return this;
+        }
+      }
+      
+      public Builder mergeFrom(org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner other) {
+        if (other == org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner.getDefaultInstance()) return this;
+        if (other.hasStartRow()) {
+          setStartRow(other.getStartRow());
+        }
+        if (other.hasEndRow()) {
+          setEndRow(other.getEndRow());
+        }
+        if (!other.columns_.isEmpty()) {
+          if (result.columns_.isEmpty()) {
+            result.columns_ = new java.util.ArrayList<com.google.protobuf.ByteString>();
+          }
+          result.columns_.addAll(other.columns_);
+        }
+        if (other.hasBatch()) {
+          setBatch(other.getBatch());
+        }
+        if (other.hasStartTime()) {
+          setStartTime(other.getStartTime());
+        }
+        if (other.hasEndTime()) {
+          setEndTime(other.getEndTime());
+        }
+        this.mergeUnknownFields(other.getUnknownFields());
+        return this;
+      }
+      
+      @Override
+      public Builder mergeFrom(
+          com.google.protobuf.CodedInputStream input)
+          throws java.io.IOException {
+        return mergeFrom(input,
+          com.google.protobuf.ExtensionRegistry.getEmptyRegistry());
+      }
+      
+      @Override
+      public Builder mergeFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistry extensionRegistry)
+          throws java.io.IOException {
+        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          com.google.protobuf.UnknownFieldSet.newBuilder(
+            this.getUnknownFields());
+        while (true) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              this.setUnknownFields(unknownFields.build());
+              return this;
+            default: {
+              if (!parseUnknownField(input, unknownFields,
+                                     extensionRegistry, tag)) {
+                this.setUnknownFields(unknownFields.build());
+                return this;
+              }
+              break;
+            }
+            case 10: {
+              setStartRow(input.readBytes());
+              break;
+            }
+            case 18: {
+              setEndRow(input.readBytes());
+              break;
+            }
+            case 26: {
+              addColumns(input.readBytes());
+              break;
+            }
+            case 32: {
+              setBatch(input.readInt32());
+              break;
+            }
+            case 40: {
+              setStartTime(input.readInt64());
+              break;
+            }
+            case 48: {
+              setEndTime(input.readInt64());
+              break;
+            }
+          }
+        }
+      }
+      
+      
+      // optional bytes startRow = 1;
+      public boolean hasStartRow() {
+        return result.hasStartRow();
+      }
+      public com.google.protobuf.ByteString getStartRow() {
+        return result.getStartRow();
+      }
+      public Builder setStartRow(com.google.protobuf.ByteString value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  result.hasStartRow = true;
+        result.startRow_ = value;
+        return this;
+      }
+      public Builder clearStartRow() {
+        result.hasStartRow = false;
+        result.startRow_ = com.google.protobuf.ByteString.EMPTY;
+        return this;
+      }
+      
+      // optional bytes endRow = 2;
+      public boolean hasEndRow() {
+        return result.hasEndRow();
+      }
+      public com.google.protobuf.ByteString getEndRow() {
+        return result.getEndRow();
+      }
+      public Builder setEndRow(com.google.protobuf.ByteString value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  result.hasEndRow = true;
+        result.endRow_ = value;
+        return this;
+      }
+      public Builder clearEndRow() {
+        result.hasEndRow = false;
+        result.endRow_ = com.google.protobuf.ByteString.EMPTY;
+        return this;
+      }
+      
+      // repeated bytes columns = 3;
+      public java.util.List<com.google.protobuf.ByteString> getColumnsList() {
+        return java.util.Collections.unmodifiableList(result.columns_);
+      }
+      public int getColumnsCount() {
+        return result.getColumnsCount();
+      }
+      public com.google.protobuf.ByteString getColumns(int index) {
+        return result.getColumns(index);
+      }
+      public Builder setColumns(int index, com.google.protobuf.ByteString value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  result.columns_.set(index, value);
+        return this;
+      }
+      public Builder addColumns(com.google.protobuf.ByteString value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  if (result.columns_.isEmpty()) {
+          result.columns_ = new java.util.ArrayList<com.google.protobuf.ByteString>();
+        }
+        result.columns_.add(value);
+        return this;
+      }
+      public Builder addAllColumns(
+          java.lang.Iterable<? extends com.google.protobuf.ByteString> values) {
+        if (result.columns_.isEmpty()) {
+          result.columns_ = new java.util.ArrayList<com.google.protobuf.ByteString>();
+        }
+        super.addAll(values, result.columns_);
+        return this;
+      }
+      public Builder clearColumns() {
+        result.columns_ = java.util.Collections.emptyList();
+        return this;
+      }
+      
+      // optional int32 batch = 4;
+      public boolean hasBatch() {
+        return result.hasBatch();
+      }
+      public int getBatch() {
+        return result.getBatch();
+      }
+      public Builder setBatch(int value) {
+        result.hasBatch = true;
+        result.batch_ = value;
+        return this;
+      }
+      public Builder clearBatch() {
+        result.hasBatch = false;
+        result.batch_ = 0;
+        return this;
+      }
+      
+      // optional int64 startTime = 5;
+      public boolean hasStartTime() {
+        return result.hasStartTime();
+      }
+      public long getStartTime() {
+        return result.getStartTime();
+      }
+      public Builder setStartTime(long value) {
+        result.hasStartTime = true;
+        result.startTime_ = value;
+        return this;
+      }
+      public Builder clearStartTime() {
+        result.hasStartTime = false;
+        result.startTime_ = 0L;
+        return this;
+      }
+      
+      // optional int64 endTime = 6;
+      public boolean hasEndTime() {
+        return result.hasEndTime();
+      }
+      public long getEndTime() {
+        return result.getEndTime();
+      }
+      public Builder setEndTime(long value) {
+        result.hasEndTime = true;
+        result.endTime_ = value;
+        return this;
+      }
+      public Builder clearEndTime() {
+        result.hasEndTime = false;
+        result.endTime_ = 0L;
+        return this;
+      }
+    }
+    
+    static {
+      org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.getDescriptor();
+    }
+  }
+  
+  private static com.google.protobuf.Descriptors.Descriptor
+    internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_Scanner_descriptor;
+  private static
+    com.google.protobuf.GeneratedMessage.FieldAccessorTable
+      internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_Scanner_fieldAccessorTable;
+  
+  public static com.google.protobuf.Descriptors.FileDescriptor
+      getDescriptor() {
+    return descriptor;
+  }
+  private static com.google.protobuf.Descriptors.FileDescriptor
+      descriptor;
+  static {
+    java.lang.String descriptorData =
+      "\n\024ScannerMessage.proto\0223org.apache.hadoo" +
+      "p.hbase.stargate.protobuf.generated\"o\n\007S" +
+      "canner\022\020\n\010startRow\030\001 \001(\014\022\016\n\006endRow\030\002 \001(\014" +
+      "\022\017\n\007columns\030\003 \003(\014\022\r\n\005batch\030\004 \001(\005\022\021\n\tstar" +
+      "tTime\030\005 \001(\003\022\017\n\007endTime\030\006 \001(\003";
+    com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
+      new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
+        public com.google.protobuf.ExtensionRegistry assignDescriptors(
+            com.google.protobuf.Descriptors.FileDescriptor root) {
+          descriptor = root;
+          internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_Scanner_descriptor =
+            getDescriptor().getMessageTypes().get(0);
+          internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_Scanner_fieldAccessorTable = new
+            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+              internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_Scanner_descriptor,
+              new java.lang.String[] { "StartRow", "EndRow", "Columns", "Batch", "StartTime", "EndTime", },
+              org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner.class,
+              org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner.Builder.class);
+          return null;
+        }
+      };
+    com.google.protobuf.Descriptors.FileDescriptor
+      .internalBuildGeneratedFileFrom(descriptorData,
+        new com.google.protobuf.Descriptors.FileDescriptor[] {
+        }, assigner);
+  }
+}



Mime
View raw message