hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ga...@apache.org
Subject svn commit: r1399529 - in /hbase/trunk/hbase-server/src: main/java/org/apache/hadoop/hbase/coprocessor/ main/java/org/apache/hadoop/hbase/protobuf/ main/java/org/apache/hadoop/hbase/protobuf/generated/ main/protobuf/ test/java/org/apache/hadoop/hbase/c...
Date Thu, 18 Oct 2012 06:38:34 GMT
Author: garyh
Date: Thu Oct 18 06:38:33 2012
New Revision: 1399529

URL: http://svn.apache.org/viewvc?rev=1399529&view=rev
Log:
HBASE-6786 Convert MultiRowMutationProtocol to protocol buffer service (Devaraj Das)

Added:
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MultiRowMutation.java
    hbase/trunk/hbase-server/src/main/protobuf/MultiRowMutation.proto
Removed:
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MultiRowMutationProtocol.java
Modified:
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MultiRowMutationEndpoint.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MultiRowMutationEndpoint.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MultiRowMutationEndpoint.java?rev=1399529&r1=1399528&r2=1399529&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MultiRowMutationEndpoint.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MultiRowMutationEndpoint.java Thu Oct 18 06:38:33 2012
@@ -18,55 +18,134 @@
 package org.apache.hadoop.hbase.coprocessor;
 
 import java.io.IOException;
+import java.util.ArrayList;
 import java.util.List;
 import java.util.SortedSet;
 import java.util.TreeSet;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.hbase.Coprocessor;
+import org.apache.hadoop.hbase.CoprocessorEnvironment;
 import org.apache.hadoop.hbase.DoNotRetryIOException;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.client.Mutation;
+import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
 import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.WrongRegionException;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
+import org.apache.hadoop.hbase.protobuf.ResponseConverter;
+import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate;
+import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.MutateType;
+import org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest;
+import org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse;
+import org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiRowMutationService;
+
+import com.google.protobuf.RpcCallback;
+import com.google.protobuf.RpcController;
+import com.google.protobuf.Service;
 
 /**
  * This class demonstrates how to implement atomic multi row transactions using
  * {@link HRegion#mutateRowsWithLocks(java.util.Collection, java.util.Collection)}
  * and Coprocessor endpoints.
+ *
+ * Defines a protocol to perform multi row transactions.
+ * See {@link MultiRowMutationEndpoint} for the implementation.
+ * </br>
+ * See
+ * {@link HRegion#mutateRowsWithLocks(java.util.Collection, java.util.Collection)}
+ * for details and limitations.
+ * </br>
+ * Example:
+ * <code><pre>
+ * List<Mutation> mutations = ...;
+ * Put p1 = new Put(row1);
+ * Put p2 = new Put(row2);
+ * ...
+ * Mutate m1 = ProtobufUtil.toMutate(MutateType.PUT, p1);
+ * Mutate m2 = ProtobufUtil.toMutate(MutateType.PUT, p2);
+ * MultiMutateRequest.Builder mrmBuilder = MultiMutateRequest.newBuilder();
+ * mrmBuilder.addMutatationRequest(m1);
+ * mrmBuilder.addMutatationRequest(m2);
+ * CoprocessorRpcChannel channel = t.coprocessorService(ROW);
+ * MultiRowMutationService.BlockingInterface service = 
+ *    MultiRowMutationService.newBlockingStub(channel);
+ * MultiMutateRequest mrm = mrmBuilder.build();
+ * service.mutateRows(null, mrm);
+ * </pre></code>
  */
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
-public class MultiRowMutationEndpoint extends BaseEndpointCoprocessor implements
-    MultiRowMutationProtocol {
-
+public class MultiRowMutationEndpoint extends MultiRowMutationService implements
+CoprocessorService, Coprocessor {
+  private RegionCoprocessorEnvironment env;
   @Override
-  public void mutateRows(List<Mutation> mutations) throws IOException {
-    // get the coprocessor environment
-    RegionCoprocessorEnvironment env = (RegionCoprocessorEnvironment) getEnvironment();
-
-    // set of rows to lock, sorted to avoid deadlocks
-    SortedSet<byte[]> rowsToLock = new TreeSet<byte[]>(Bytes.BYTES_COMPARATOR);
-
-    HRegionInfo regionInfo = env.getRegion().getRegionInfo();
-    for (Mutation m : mutations) {
-      // check whether rows are in range for this region
-      if (!HRegion.rowIsInRange(regionInfo, m.getRow())) {
-        String msg = "Requested row out of range '"
-            + Bytes.toStringBinary(m.getRow()) + "'";
-        if (rowsToLock.isEmpty()) {
-          // if this is the first row, region might have moved,
-          // allow client to retry
-          throw new WrongRegionException(msg);
-        } else {
-          // rows are split between regions, do not retry
-          throw new DoNotRetryIOException(msg);
+  public void mutateRows(RpcController controller, MultiMutateRequest request, 
+      RpcCallback<MultiMutateResponse> done) {
+    MultiMutateResponse response = MultiMutateResponse.getDefaultInstance();
+    try {
+      // set of rows to lock, sorted to avoid deadlocks
+      SortedSet<byte[]> rowsToLock = new TreeSet<byte[]>(Bytes.BYTES_COMPARATOR);
+      List<Mutate> mutateRequestList = request.getMutatationRequestList();
+      List<Mutation> mutations = new ArrayList<Mutation>(mutateRequestList.size());
+      for (Mutate m : mutateRequestList) {
+        mutations.add(ProtobufUtil.toMutation(m));
+      }
+
+      HRegionInfo regionInfo = env.getRegion().getRegionInfo();
+      for (Mutation m : mutations) {
+        // check whether rows are in range for this region
+        if (!HRegion.rowIsInRange(regionInfo, m.getRow())) {
+          String msg = "Requested row out of range '"
+              + Bytes.toStringBinary(m.getRow()) + "'";
+          if (rowsToLock.isEmpty()) {
+            // if this is the first row, region might have moved,
+            // allow client to retry
+            throw new WrongRegionException(msg);
+          } else {
+            // rows are split between regions, do not retry
+            throw new DoNotRetryIOException(msg);
+          }
         }
+        rowsToLock.add(m.getRow());
       }
-      rowsToLock.add(m.getRow());
+      // call utility method on region
+      env.getRegion().mutateRowsWithLocks(mutations, rowsToLock);
+    } catch (IOException e) {
+      ResponseConverter.setControllerException(controller, e);
     }
-    // call utility method on region
-    env.getRegion().mutateRowsWithLocks(mutations, rowsToLock);
+    done.run(response);
+  }
+
+
+  @Override
+  public Service getService() {
+    return this;
+  }
+
+  /**
+   * Stores a reference to the coprocessor environment provided by the
+   * {@link org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost} from the region where this
+   * coprocessor is loaded.  Since this is a coprocessor endpoint, it always expects to be loaded
+   * on a table region, so always expects this to be an instance of
+   * {@link RegionCoprocessorEnvironment}.
+   * @param env the environment provided by the coprocessor host
+   * @throws IOException if the provided environment is not an instance of
+   * {@code RegionCoprocessorEnvironment}
+   */
+  @Override
+  public void start(CoprocessorEnvironment env) throws IOException {
+    if (env instanceof RegionCoprocessorEnvironment) {
+      this.env = (RegionCoprocessorEnvironment)env;
+    } else {
+      throw new CoprocessorException("Must be loaded on a table region!");
+    }
+  }
+
+  @Override
+  public void stop(CoprocessorEnvironment env) throws IOException {
+    // nothing to do
   }
 }

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java?rev=1399529&r1=1399528&r2=1399529&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java Thu Oct 18 06:38:33 2012
@@ -531,6 +531,27 @@ public final class ProtobufUtil {
   }
 
   /**
+   * Convert a MutateRequest to Mutation
+   * 
+   * @param proto the protocol buffer Mutate to convert
+   * @return the converted Mutation
+   * @throws IOException 
+   */
+  public static Mutation toMutation(final Mutate proto) throws IOException {
+    MutateType type = proto.getMutateType();
+    if (type == MutateType.APPEND) {
+      return toAppend(proto);
+    }
+    if (type == MutateType.DELETE) {
+      return toDelete(proto);
+    }
+    if (type == MutateType.PUT) {
+      return toPut(proto);
+    }
+    throw new IOException("Not an understood mutate type " + type);
+  }
+
+  /**
    * Convert a protocol buffer Mutate to an Increment
    *
    * @param proto the protocol buffer Mutate to convert

Added: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MultiRowMutation.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MultiRowMutation.java?rev=1399529&view=auto
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MultiRowMutation.java (added)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MultiRowMutation.java Thu Oct 18 06:38:33 2012
@@ -0,0 +1,1184 @@
+// Generated by the protocol buffer compiler.  DO NOT EDIT!
+// source: MultiRowMutation.proto
+
+package org.apache.hadoop.hbase.protobuf.generated;
+
+public final class MultiRowMutation {
+  private MultiRowMutation() {}
+  public static void registerAllExtensions(
+      com.google.protobuf.ExtensionRegistry registry) {
+  }
+  public interface MultiMutateRequestOrBuilder
+      extends com.google.protobuf.MessageOrBuilder {
+    
+    // repeated .Mutate mutatationRequest = 1;
+    java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate> 
+        getMutatationRequestList();
+    org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate getMutatationRequest(int index);
+    int getMutatationRequestCount();
+    java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateOrBuilder> 
+        getMutatationRequestOrBuilderList();
+    org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateOrBuilder getMutatationRequestOrBuilder(
+        int index);
+  }
+  public static final class MultiMutateRequest extends
+      com.google.protobuf.GeneratedMessage
+      implements MultiMutateRequestOrBuilder {
+    // Use MultiMutateRequest.newBuilder() to construct.
+    private MultiMutateRequest(Builder builder) {
+      super(builder);
+    }
+    private MultiMutateRequest(boolean noInit) {}
+    
+    private static final MultiMutateRequest defaultInstance;
+    public static MultiMutateRequest getDefaultInstance() {
+      return defaultInstance;
+    }
+    
+    public MultiMutateRequest getDefaultInstanceForType() {
+      return defaultInstance;
+    }
+    
+    public static final com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateRequest_descriptor;
+    }
+    
+    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateRequest_fieldAccessorTable;
+    }
+    
+    // repeated .Mutate mutatationRequest = 1;
+    public static final int MUTATATIONREQUEST_FIELD_NUMBER = 1;
+    private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate> mutatationRequest_;
+    public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate> getMutatationRequestList() {
+      return mutatationRequest_;
+    }
+    public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateOrBuilder> 
+        getMutatationRequestOrBuilderList() {
+      return mutatationRequest_;
+    }
+    public int getMutatationRequestCount() {
+      return mutatationRequest_.size();
+    }
+    public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate getMutatationRequest(int index) {
+      return mutatationRequest_.get(index);
+    }
+    public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateOrBuilder getMutatationRequestOrBuilder(
+        int index) {
+      return mutatationRequest_.get(index);
+    }
+    
+    private void initFields() {
+      mutatationRequest_ = java.util.Collections.emptyList();
+    }
+    private byte memoizedIsInitialized = -1;
+    public final boolean isInitialized() {
+      byte isInitialized = memoizedIsInitialized;
+      if (isInitialized != -1) return isInitialized == 1;
+      
+      for (int i = 0; i < getMutatationRequestCount(); i++) {
+        if (!getMutatationRequest(i).isInitialized()) {
+          memoizedIsInitialized = 0;
+          return false;
+        }
+      }
+      memoizedIsInitialized = 1;
+      return true;
+    }
+    
+    public void writeTo(com.google.protobuf.CodedOutputStream output)
+                        throws java.io.IOException {
+      getSerializedSize();
+      for (int i = 0; i < mutatationRequest_.size(); i++) {
+        output.writeMessage(1, mutatationRequest_.get(i));
+      }
+      getUnknownFields().writeTo(output);
+    }
+    
+    private int memoizedSerializedSize = -1;
+    public int getSerializedSize() {
+      int size = memoizedSerializedSize;
+      if (size != -1) return size;
+    
+      size = 0;
+      for (int i = 0; i < mutatationRequest_.size(); i++) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeMessageSize(1, mutatationRequest_.get(i));
+      }
+      size += getUnknownFields().getSerializedSize();
+      memoizedSerializedSize = size;
+      return size;
+    }
+    
+    private static final long serialVersionUID = 0L;
+    @java.lang.Override
+    protected java.lang.Object writeReplace()
+        throws java.io.ObjectStreamException {
+      return super.writeReplace();
+    }
+    
+    @java.lang.Override
+    public boolean equals(final java.lang.Object obj) {
+      if (obj == this) {
+       return true;
+      }
+      if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest)) {
+        return super.equals(obj);
+      }
+      org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest other = (org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest) obj;
+      
+      boolean result = true;
+      result = result && getMutatationRequestList()
+          .equals(other.getMutatationRequestList());
+      result = result &&
+          getUnknownFields().equals(other.getUnknownFields());
+      return result;
+    }
+    
+    @java.lang.Override
+    public int hashCode() {
+      int hash = 41;
+      hash = (19 * hash) + getDescriptorForType().hashCode();
+      if (getMutatationRequestCount() > 0) {
+        hash = (37 * hash) + MUTATATIONREQUEST_FIELD_NUMBER;
+        hash = (53 * hash) + getMutatationRequestList().hashCode();
+      }
+      hash = (29 * hash) + getUnknownFields().hashCode();
+      return hash;
+    }
+    
+    public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseFrom(
+        com.google.protobuf.ByteString data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return newBuilder().mergeFrom(data).buildParsed();
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseFrom(
+        com.google.protobuf.ByteString data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return newBuilder().mergeFrom(data, extensionRegistry)
+               .buildParsed();
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseFrom(byte[] data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return newBuilder().mergeFrom(data).buildParsed();
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseFrom(
+        byte[] data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return newBuilder().mergeFrom(data, extensionRegistry)
+               .buildParsed();
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return newBuilder().mergeFrom(input).buildParsed();
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return newBuilder().mergeFrom(input, extensionRegistry)
+               .buildParsed();
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseDelimitedFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      Builder builder = newBuilder();
+      if (builder.mergeDelimitedFrom(input)) {
+        return builder.buildParsed();
+      } else {
+        return null;
+      }
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseDelimitedFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      Builder builder = newBuilder();
+      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+        return builder.buildParsed();
+      } else {
+        return null;
+      }
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseFrom(
+        com.google.protobuf.CodedInputStream input)
+        throws java.io.IOException {
+      return newBuilder().mergeFrom(input).buildParsed();
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseFrom(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return newBuilder().mergeFrom(input, extensionRegistry)
+               .buildParsed();
+    }
+    
+    public static Builder newBuilder() { return Builder.create(); }
+    public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest prototype) {
+      return newBuilder().mergeFrom(prototype);
+    }
+    public Builder toBuilder() { return newBuilder(this); }
+    
+    @java.lang.Override
+    protected Builder newBuilderForType(
+        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+      Builder builder = new Builder(parent);
+      return builder;
+    }
+    public static final class Builder extends
+        com.google.protobuf.GeneratedMessage.Builder<Builder>
+       implements org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequestOrBuilder {
+      public static final com.google.protobuf.Descriptors.Descriptor
+          getDescriptor() {
+        return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateRequest_descriptor;
+      }
+      
+      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+          internalGetFieldAccessorTable() {
+        return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateRequest_fieldAccessorTable;
+      }
+      
+      // Construct using org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest.newBuilder()
+      private Builder() {
+        maybeForceBuilderInitialization();
+      }
+      
+      private Builder(BuilderParent parent) {
+        super(parent);
+        maybeForceBuilderInitialization();
+      }
+      private void maybeForceBuilderInitialization() {
+        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+          getMutatationRequestFieldBuilder();
+        }
+      }
+      private static Builder create() {
+        return new Builder();
+      }
+      
+      public Builder clear() {
+        super.clear();
+        if (mutatationRequestBuilder_ == null) {
+          mutatationRequest_ = java.util.Collections.emptyList();
+          bitField0_ = (bitField0_ & ~0x00000001);
+        } else {
+          mutatationRequestBuilder_.clear();
+        }
+        return this;
+      }
+      
+      public Builder clone() {
+        return create().mergeFrom(buildPartial());
+      }
+      
+      public com.google.protobuf.Descriptors.Descriptor
+          getDescriptorForType() {
+        return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest.getDescriptor();
+      }
+      
+      public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest getDefaultInstanceForType() {
+        return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest.getDefaultInstance();
+      }
+      
+      public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest build() {
+        org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(result);
+        }
+        return result;
+      }
+      
+      private org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest buildParsed()
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(
+            result).asInvalidProtocolBufferException();
+        }
+        return result;
+      }
+      
+      public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest buildPartial() {
+        org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest result = new org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest(this);
+        int from_bitField0_ = bitField0_;
+        if (mutatationRequestBuilder_ == null) {
+          if (((bitField0_ & 0x00000001) == 0x00000001)) {
+            mutatationRequest_ = java.util.Collections.unmodifiableList(mutatationRequest_);
+            bitField0_ = (bitField0_ & ~0x00000001);
+          }
+          result.mutatationRequest_ = mutatationRequest_;
+        } else {
+          result.mutatationRequest_ = mutatationRequestBuilder_.build();
+        }
+        onBuilt();
+        return result;
+      }
+      
+      public Builder mergeFrom(com.google.protobuf.Message other) {
+        if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest) {
+          return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest)other);
+        } else {
+          super.mergeFrom(other);
+          return this;
+        }
+      }
+      
+      public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest other) {
+        if (other == org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest.getDefaultInstance()) return this;
+        if (mutatationRequestBuilder_ == null) {
+          if (!other.mutatationRequest_.isEmpty()) {
+            if (mutatationRequest_.isEmpty()) {
+              mutatationRequest_ = other.mutatationRequest_;
+              bitField0_ = (bitField0_ & ~0x00000001);
+            } else {
+              ensureMutatationRequestIsMutable();
+              mutatationRequest_.addAll(other.mutatationRequest_);
+            }
+            onChanged();
+          }
+        } else {
+          if (!other.mutatationRequest_.isEmpty()) {
+            if (mutatationRequestBuilder_.isEmpty()) {
+              mutatationRequestBuilder_.dispose();
+              mutatationRequestBuilder_ = null;
+              mutatationRequest_ = other.mutatationRequest_;
+              bitField0_ = (bitField0_ & ~0x00000001);
+              mutatationRequestBuilder_ = 
+                com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
+                   getMutatationRequestFieldBuilder() : null;
+            } else {
+              mutatationRequestBuilder_.addAllMessages(other.mutatationRequest_);
+            }
+          }
+        }
+        this.mergeUnknownFields(other.getUnknownFields());
+        return this;
+      }
+      
+      public final boolean isInitialized() {
+        for (int i = 0; i < getMutatationRequestCount(); i++) {
+          if (!getMutatationRequest(i).isInitialized()) {
+            
+            return false;
+          }
+        }
+        return true;
+      }
+      
+      public Builder mergeFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          com.google.protobuf.UnknownFieldSet.newBuilder(
+            this.getUnknownFields());
+        while (true) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              this.setUnknownFields(unknownFields.build());
+              onChanged();
+              return this;
+            default: {
+              if (!parseUnknownField(input, unknownFields,
+                                     extensionRegistry, tag)) {
+                this.setUnknownFields(unknownFields.build());
+                onChanged();
+                return this;
+              }
+              break;
+            }
+            case 10: {
+              org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.newBuilder();
+              input.readMessage(subBuilder, extensionRegistry);
+              addMutatationRequest(subBuilder.buildPartial());
+              break;
+            }
+          }
+        }
+      }
+      
+      private int bitField0_;
+      
+      // repeated .Mutate mutatationRequest = 1;
+      private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate> mutatationRequest_ =
+        java.util.Collections.emptyList();
+      private void ensureMutatationRequestIsMutable() {
+        if (!((bitField0_ & 0x00000001) == 0x00000001)) {
+          mutatationRequest_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate>(mutatationRequest_);
+          bitField0_ |= 0x00000001;
+         }
+      }
+      
+      private com.google.protobuf.RepeatedFieldBuilder<
+          org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateOrBuilder> mutatationRequestBuilder_;
+      
+      public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate> getMutatationRequestList() {
+        if (mutatationRequestBuilder_ == null) {
+          return java.util.Collections.unmodifiableList(mutatationRequest_);
+        } else {
+          return mutatationRequestBuilder_.getMessageList();
+        }
+      }
+      public int getMutatationRequestCount() {
+        if (mutatationRequestBuilder_ == null) {
+          return mutatationRequest_.size();
+        } else {
+          return mutatationRequestBuilder_.getCount();
+        }
+      }
+      public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate getMutatationRequest(int index) {
+        if (mutatationRequestBuilder_ == null) {
+          return mutatationRequest_.get(index);
+        } else {
+          return mutatationRequestBuilder_.getMessage(index);
+        }
+      }
+      public Builder setMutatationRequest(
+          int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate value) {
+        if (mutatationRequestBuilder_ == null) {
+          if (value == null) {
+            throw new NullPointerException();
+          }
+          ensureMutatationRequestIsMutable();
+          mutatationRequest_.set(index, value);
+          onChanged();
+        } else {
+          mutatationRequestBuilder_.setMessage(index, value);
+        }
+        return this;
+      }
+      public Builder setMutatationRequest(
+          int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder builderForValue) {
+        if (mutatationRequestBuilder_ == null) {
+          ensureMutatationRequestIsMutable();
+          mutatationRequest_.set(index, builderForValue.build());
+          onChanged();
+        } else {
+          mutatationRequestBuilder_.setMessage(index, builderForValue.build());
+        }
+        return this;
+      }
+      public Builder addMutatationRequest(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate value) {
+        if (mutatationRequestBuilder_ == null) {
+          if (value == null) {
+            throw new NullPointerException();
+          }
+          ensureMutatationRequestIsMutable();
+          mutatationRequest_.add(value);
+          onChanged();
+        } else {
+          mutatationRequestBuilder_.addMessage(value);
+        }
+        return this;
+      }
+      public Builder addMutatationRequest(
+          int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate value) {
+        if (mutatationRequestBuilder_ == null) {
+          if (value == null) {
+            throw new NullPointerException();
+          }
+          ensureMutatationRequestIsMutable();
+          mutatationRequest_.add(index, value);
+          onChanged();
+        } else {
+          mutatationRequestBuilder_.addMessage(index, value);
+        }
+        return this;
+      }
+      public Builder addMutatationRequest(
+          org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder builderForValue) {
+        if (mutatationRequestBuilder_ == null) {
+          ensureMutatationRequestIsMutable();
+          mutatationRequest_.add(builderForValue.build());
+          onChanged();
+        } else {
+          mutatationRequestBuilder_.addMessage(builderForValue.build());
+        }
+        return this;
+      }
+      public Builder addMutatationRequest(
+          int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder builderForValue) {
+        if (mutatationRequestBuilder_ == null) {
+          ensureMutatationRequestIsMutable();
+          mutatationRequest_.add(index, builderForValue.build());
+          onChanged();
+        } else {
+          mutatationRequestBuilder_.addMessage(index, builderForValue.build());
+        }
+        return this;
+      }
+      public Builder addAllMutatationRequest(
+          java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate> values) {
+        if (mutatationRequestBuilder_ == null) {
+          ensureMutatationRequestIsMutable();
+          super.addAll(values, mutatationRequest_);
+          onChanged();
+        } else {
+          mutatationRequestBuilder_.addAllMessages(values);
+        }
+        return this;
+      }
+      public Builder clearMutatationRequest() {
+        if (mutatationRequestBuilder_ == null) {
+          mutatationRequest_ = java.util.Collections.emptyList();
+          bitField0_ = (bitField0_ & ~0x00000001);
+          onChanged();
+        } else {
+          mutatationRequestBuilder_.clear();
+        }
+        return this;
+      }
+      public Builder removeMutatationRequest(int index) {
+        if (mutatationRequestBuilder_ == null) {
+          ensureMutatationRequestIsMutable();
+          mutatationRequest_.remove(index);
+          onChanged();
+        } else {
+          mutatationRequestBuilder_.remove(index);
+        }
+        return this;
+      }
+      public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder getMutatationRequestBuilder(
+          int index) {
+        return getMutatationRequestFieldBuilder().getBuilder(index);
+      }
+      public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateOrBuilder getMutatationRequestOrBuilder(
+          int index) {
+        if (mutatationRequestBuilder_ == null) {
+          return mutatationRequest_.get(index);  } else {
+          return mutatationRequestBuilder_.getMessageOrBuilder(index);
+        }
+      }
+      public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateOrBuilder> 
+           getMutatationRequestOrBuilderList() {
+        if (mutatationRequestBuilder_ != null) {
+          return mutatationRequestBuilder_.getMessageOrBuilderList();
+        } else {
+          return java.util.Collections.unmodifiableList(mutatationRequest_);
+        }
+      }
+      public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder addMutatationRequestBuilder() {
+        return getMutatationRequestFieldBuilder().addBuilder(
+            org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.getDefaultInstance());
+      }
+      public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder addMutatationRequestBuilder(
+          int index) {
+        return getMutatationRequestFieldBuilder().addBuilder(
+            index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.getDefaultInstance());
+      }
+      public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder> 
+           getMutatationRequestBuilderList() {
+        return getMutatationRequestFieldBuilder().getBuilderList();
+      }
+      private com.google.protobuf.RepeatedFieldBuilder<
+          org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateOrBuilder> 
+          getMutatationRequestFieldBuilder() {
+        if (mutatationRequestBuilder_ == null) {
+          mutatationRequestBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
+              org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateOrBuilder>(
+                  mutatationRequest_,
+                  ((bitField0_ & 0x00000001) == 0x00000001),
+                  getParentForChildren(),
+                  isClean());
+          mutatationRequest_ = null;
+        }
+        return mutatationRequestBuilder_;
+      }
+      
+      // @@protoc_insertion_point(builder_scope:MultiMutateRequest)
+    }
+    
+    static {
+      defaultInstance = new MultiMutateRequest(true);
+      defaultInstance.initFields();
+    }
+    
+    // @@protoc_insertion_point(class_scope:MultiMutateRequest)
+  }
+  
+  public interface MultiMutateResponseOrBuilder
+      extends com.google.protobuf.MessageOrBuilder {
+  }
+  public static final class MultiMutateResponse extends
+      com.google.protobuf.GeneratedMessage
+      implements MultiMutateResponseOrBuilder {
+    // Use MultiMutateResponse.newBuilder() to construct.
+    private MultiMutateResponse(Builder builder) {
+      super(builder);
+    }
+    private MultiMutateResponse(boolean noInit) {}
+    
+    private static final MultiMutateResponse defaultInstance;
+    public static MultiMutateResponse getDefaultInstance() {
+      return defaultInstance;
+    }
+    
+    public MultiMutateResponse getDefaultInstanceForType() {
+      return defaultInstance;
+    }
+    
+    public static final com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateResponse_descriptor;
+    }
+    
+    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateResponse_fieldAccessorTable;
+    }
+    
+    private void initFields() {
+    }
+    private byte memoizedIsInitialized = -1;
+    public final boolean isInitialized() {
+      byte isInitialized = memoizedIsInitialized;
+      if (isInitialized != -1) return isInitialized == 1;
+      
+      memoizedIsInitialized = 1;
+      return true;
+    }
+    
+    public void writeTo(com.google.protobuf.CodedOutputStream output)
+                        throws java.io.IOException {
+      getSerializedSize();
+      getUnknownFields().writeTo(output);
+    }
+    
+    private int memoizedSerializedSize = -1;
+    public int getSerializedSize() {
+      int size = memoizedSerializedSize;
+      if (size != -1) return size;
+    
+      size = 0;
+      size += getUnknownFields().getSerializedSize();
+      memoizedSerializedSize = size;
+      return size;
+    }
+    
+    private static final long serialVersionUID = 0L;
+    @java.lang.Override
+    protected java.lang.Object writeReplace()
+        throws java.io.ObjectStreamException {
+      return super.writeReplace();
+    }
+    
+    @java.lang.Override
+    public boolean equals(final java.lang.Object obj) {
+      if (obj == this) {
+       return true;
+      }
+      if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse)) {
+        return super.equals(obj);
+      }
+      org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse other = (org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse) obj;
+      
+      boolean result = true;
+      result = result &&
+          getUnknownFields().equals(other.getUnknownFields());
+      return result;
+    }
+    
+    @java.lang.Override
+    public int hashCode() {
+      int hash = 41;
+      hash = (19 * hash) + getDescriptorForType().hashCode();
+      hash = (29 * hash) + getUnknownFields().hashCode();
+      return hash;
+    }
+    
+    public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseFrom(
+        com.google.protobuf.ByteString data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return newBuilder().mergeFrom(data).buildParsed();
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseFrom(
+        com.google.protobuf.ByteString data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return newBuilder().mergeFrom(data, extensionRegistry)
+               .buildParsed();
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseFrom(byte[] data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return newBuilder().mergeFrom(data).buildParsed();
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseFrom(
+        byte[] data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return newBuilder().mergeFrom(data, extensionRegistry)
+               .buildParsed();
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return newBuilder().mergeFrom(input).buildParsed();
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return newBuilder().mergeFrom(input, extensionRegistry)
+               .buildParsed();
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseDelimitedFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      Builder builder = newBuilder();
+      if (builder.mergeDelimitedFrom(input)) {
+        return builder.buildParsed();
+      } else {
+        return null;
+      }
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseDelimitedFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      Builder builder = newBuilder();
+      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+        return builder.buildParsed();
+      } else {
+        return null;
+      }
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseFrom(
+        com.google.protobuf.CodedInputStream input)
+        throws java.io.IOException {
+      return newBuilder().mergeFrom(input).buildParsed();
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseFrom(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return newBuilder().mergeFrom(input, extensionRegistry)
+               .buildParsed();
+    }
+    
+    public static Builder newBuilder() { return Builder.create(); }
+    public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse prototype) {
+      return newBuilder().mergeFrom(prototype);
+    }
+    public Builder toBuilder() { return newBuilder(this); }
+    
+    @java.lang.Override
+    protected Builder newBuilderForType(
+        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+      Builder builder = new Builder(parent);
+      return builder;
+    }
+    public static final class Builder extends
+        com.google.protobuf.GeneratedMessage.Builder<Builder>
+       implements org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponseOrBuilder {
+      public static final com.google.protobuf.Descriptors.Descriptor
+          getDescriptor() {
+        return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateResponse_descriptor;
+      }
+      
+      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+          internalGetFieldAccessorTable() {
+        return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateResponse_fieldAccessorTable;
+      }
+      
+      // Construct using org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.newBuilder()
+      private Builder() {
+        maybeForceBuilderInitialization();
+      }
+      
+      private Builder(BuilderParent parent) {
+        super(parent);
+        maybeForceBuilderInitialization();
+      }
+      private void maybeForceBuilderInitialization() {
+        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+        }
+      }
+      private static Builder create() {
+        return new Builder();
+      }
+      
+      public Builder clear() {
+        super.clear();
+        return this;
+      }
+      
+      public Builder clone() {
+        return create().mergeFrom(buildPartial());
+      }
+      
+      public com.google.protobuf.Descriptors.Descriptor
+          getDescriptorForType() {
+        return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.getDescriptor();
+      }
+      
+      public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse getDefaultInstanceForType() {
+        return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.getDefaultInstance();
+      }
+      
+      public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse build() {
+        org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(result);
+        }
+        return result;
+      }
+      
+      private org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse buildParsed()
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(
+            result).asInvalidProtocolBufferException();
+        }
+        return result;
+      }
+      
+      public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse buildPartial() {
+        org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse result = new org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse(this);
+        onBuilt();
+        return result;
+      }
+      
+      public Builder mergeFrom(com.google.protobuf.Message other) {
+        if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse) {
+          return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse)other);
+        } else {
+          super.mergeFrom(other);
+          return this;
+        }
+      }
+      
+      public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse other) {
+        if (other == org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.getDefaultInstance()) return this;
+        this.mergeUnknownFields(other.getUnknownFields());
+        return this;
+      }
+      
+      public final boolean isInitialized() {
+        return true;
+      }
+      
+      public Builder mergeFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          com.google.protobuf.UnknownFieldSet.newBuilder(
+            this.getUnknownFields());
+        while (true) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              this.setUnknownFields(unknownFields.build());
+              onChanged();
+              return this;
+            default: {
+              if (!parseUnknownField(input, unknownFields,
+                                     extensionRegistry, tag)) {
+                this.setUnknownFields(unknownFields.build());
+                onChanged();
+                return this;
+              }
+              break;
+            }
+          }
+        }
+      }
+      
+      
+      // @@protoc_insertion_point(builder_scope:MultiMutateResponse)
+    }
+    
+    static {
+      defaultInstance = new MultiMutateResponse(true);
+      defaultInstance.initFields();
+    }
+    
+    // @@protoc_insertion_point(class_scope:MultiMutateResponse)
+  }
+  
+  public static abstract class MultiRowMutationService
+      implements com.google.protobuf.Service {
+    protected MultiRowMutationService() {}
+    
+    public interface Interface {
+      public abstract void mutateRows(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse> done);
+      
+    }
+    
+    public static com.google.protobuf.Service newReflectiveService(
+        final Interface impl) {
+      return new MultiRowMutationService() {
+        @java.lang.Override
+        public  void mutateRows(
+            com.google.protobuf.RpcController controller,
+            org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest request,
+            com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse> done) {
+          impl.mutateRows(controller, request, done);
+        }
+        
+      };
+    }
+    
+    public static com.google.protobuf.BlockingService
+        newReflectiveBlockingService(final BlockingInterface impl) {
+      return new com.google.protobuf.BlockingService() {
+        public final com.google.protobuf.Descriptors.ServiceDescriptor
+            getDescriptorForType() {
+          return getDescriptor();
+        }
+        
+        public final com.google.protobuf.Message callBlockingMethod(
+            com.google.protobuf.Descriptors.MethodDescriptor method,
+            com.google.protobuf.RpcController controller,
+            com.google.protobuf.Message request)
+            throws com.google.protobuf.ServiceException {
+          if (method.getService() != getDescriptor()) {
+            throw new java.lang.IllegalArgumentException(
+              "Service.callBlockingMethod() given method descriptor for " +
+              "wrong service type.");
+          }
+          switch(method.getIndex()) {
+            case 0:
+              return impl.mutateRows(controller, (org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest)request);
+            default:
+              throw new java.lang.AssertionError("Can't get here.");
+          }
+        }
+        
+        public final com.google.protobuf.Message
+            getRequestPrototype(
+            com.google.protobuf.Descriptors.MethodDescriptor method) {
+          if (method.getService() != getDescriptor()) {
+            throw new java.lang.IllegalArgumentException(
+              "Service.getRequestPrototype() given method " +
+              "descriptor for wrong service type.");
+          }
+          switch(method.getIndex()) {
+            case 0:
+              return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest.getDefaultInstance();
+            default:
+              throw new java.lang.AssertionError("Can't get here.");
+          }
+        }
+        
+        public final com.google.protobuf.Message
+            getResponsePrototype(
+            com.google.protobuf.Descriptors.MethodDescriptor method) {
+          if (method.getService() != getDescriptor()) {
+            throw new java.lang.IllegalArgumentException(
+              "Service.getResponsePrototype() given method " +
+              "descriptor for wrong service type.");
+          }
+          switch(method.getIndex()) {
+            case 0:
+              return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.getDefaultInstance();
+            default:
+              throw new java.lang.AssertionError("Can't get here.");
+          }
+        }
+        
+      };
+    }
+    
+    public abstract void mutateRows(
+        com.google.protobuf.RpcController controller,
+        org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest request,
+        com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse> done);
+    
+    public static final
+        com.google.protobuf.Descriptors.ServiceDescriptor
+        getDescriptor() {
+      return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.getDescriptor().getServices().get(0);
+    }
+    public final com.google.protobuf.Descriptors.ServiceDescriptor
+        getDescriptorForType() {
+      return getDescriptor();
+    }
+    
+    public final void callMethod(
+        com.google.protobuf.Descriptors.MethodDescriptor method,
+        com.google.protobuf.RpcController controller,
+        com.google.protobuf.Message request,
+        com.google.protobuf.RpcCallback<
+          com.google.protobuf.Message> done) {
+      if (method.getService() != getDescriptor()) {
+        throw new java.lang.IllegalArgumentException(
+          "Service.callMethod() given method descriptor for wrong " +
+          "service type.");
+      }
+      switch(method.getIndex()) {
+        case 0:
+          this.mutateRows(controller, (org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest)request,
+            com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse>specializeCallback(
+              done));
+          return;
+        default:
+          throw new java.lang.AssertionError("Can't get here.");
+      }
+    }
+    
+    public final com.google.protobuf.Message
+        getRequestPrototype(
+        com.google.protobuf.Descriptors.MethodDescriptor method) {
+      if (method.getService() != getDescriptor()) {
+        throw new java.lang.IllegalArgumentException(
+          "Service.getRequestPrototype() given method " +
+          "descriptor for wrong service type.");
+      }
+      switch(method.getIndex()) {
+        case 0:
+          return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest.getDefaultInstance();
+        default:
+          throw new java.lang.AssertionError("Can't get here.");
+      }
+    }
+    
+    public final com.google.protobuf.Message
+        getResponsePrototype(
+        com.google.protobuf.Descriptors.MethodDescriptor method) {
+      if (method.getService() != getDescriptor()) {
+        throw new java.lang.IllegalArgumentException(
+          "Service.getResponsePrototype() given method " +
+          "descriptor for wrong service type.");
+      }
+      switch(method.getIndex()) {
+        case 0:
+          return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.getDefaultInstance();
+        default:
+          throw new java.lang.AssertionError("Can't get here.");
+      }
+    }
+    
+    public static Stub newStub(
+        com.google.protobuf.RpcChannel channel) {
+      return new Stub(channel);
+    }
+    
+    public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiRowMutationService implements Interface {
+      private Stub(com.google.protobuf.RpcChannel channel) {
+        this.channel = channel;
+      }
+      
+      private final com.google.protobuf.RpcChannel channel;
+      
+      public com.google.protobuf.RpcChannel getChannel() {
+        return channel;
+      }
+      
+      public  void mutateRows(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse> done) {
+        channel.callMethod(
+          getDescriptor().getMethods().get(0),
+          controller,
+          request,
+          org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.getDefaultInstance(),
+          com.google.protobuf.RpcUtil.generalizeCallback(
+            done,
+            org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.class,
+            org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.getDefaultInstance()));
+      }
+    }
+    
+    public static BlockingInterface newBlockingStub(
+        com.google.protobuf.BlockingRpcChannel channel) {
+      return new BlockingStub(channel);
+    }
+    
+    public interface BlockingInterface {
+      public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse mutateRows(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest request)
+          throws com.google.protobuf.ServiceException;
+    }
+    
+    private static final class BlockingStub implements BlockingInterface {
+      private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) {
+        this.channel = channel;
+      }
+      
+      private final com.google.protobuf.BlockingRpcChannel channel;
+      
+      public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse mutateRows(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest request)
+          throws com.google.protobuf.ServiceException {
+        return (org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse) channel.callBlockingMethod(
+          getDescriptor().getMethods().get(0),
+          controller,
+          request,
+          org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.getDefaultInstance());
+      }
+      
+    }
+  }
+  
+  private static com.google.protobuf.Descriptors.Descriptor
+    internal_static_MultiMutateRequest_descriptor;
+  private static
+    com.google.protobuf.GeneratedMessage.FieldAccessorTable
+      internal_static_MultiMutateRequest_fieldAccessorTable;
+  private static com.google.protobuf.Descriptors.Descriptor
+    internal_static_MultiMutateResponse_descriptor;
+  private static
+    com.google.protobuf.GeneratedMessage.FieldAccessorTable
+      internal_static_MultiMutateResponse_fieldAccessorTable;
+  
+  public static com.google.protobuf.Descriptors.FileDescriptor
+      getDescriptor() {
+    return descriptor;
+  }
+  private static com.google.protobuf.Descriptors.FileDescriptor
+      descriptor;
+  static {
+    java.lang.String[] descriptorData = {
+      "\n\026MultiRowMutation.proto\032\014Client.proto\"8" +
+      "\n\022MultiMutateRequest\022\"\n\021mutatationReques" +
+      "t\030\001 \003(\0132\007.Mutate\"\025\n\023MultiMutateResponse2" +
+      "R\n\027MultiRowMutationService\0227\n\nmutateRows" +
+      "\022\023.MultiMutateRequest\032\024.MultiMutateRespo" +
+      "nseBF\n*org.apache.hadoop.hbase.protobuf." +
+      "generatedB\020MultiRowMutationH\001\210\001\001\240\001\001"
+    };
+    com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
+      new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
+        public com.google.protobuf.ExtensionRegistry assignDescriptors(
+            com.google.protobuf.Descriptors.FileDescriptor root) {
+          descriptor = root;
+          internal_static_MultiMutateRequest_descriptor =
+            getDescriptor().getMessageTypes().get(0);
+          internal_static_MultiMutateRequest_fieldAccessorTable = new
+            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+              internal_static_MultiMutateRequest_descriptor,
+              new java.lang.String[] { "MutatationRequest", },
+              org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest.class,
+              org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest.Builder.class);
+          internal_static_MultiMutateResponse_descriptor =
+            getDescriptor().getMessageTypes().get(1);
+          internal_static_MultiMutateResponse_fieldAccessorTable = new
+            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+              internal_static_MultiMutateResponse_descriptor,
+              new java.lang.String[] { },
+              org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.class,
+              org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.Builder.class);
+          return null;
+        }
+      };
+    com.google.protobuf.Descriptors.FileDescriptor
+      .internalBuildGeneratedFileFrom(descriptorData,
+        new com.google.protobuf.Descriptors.FileDescriptor[] {
+          org.apache.hadoop.hbase.protobuf.generated.ClientProtos.getDescriptor(),
+        }, assigner);
+  }
+  
+  // @@protoc_insertion_point(outer_class_scope)
+}

Added: hbase/trunk/hbase-server/src/main/protobuf/MultiRowMutation.proto
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/protobuf/MultiRowMutation.proto?rev=1399529&view=auto
==============================================================================
--- hbase/trunk/hbase-server/src/main/protobuf/MultiRowMutation.proto (added)
+++ hbase/trunk/hbase-server/src/main/protobuf/MultiRowMutation.proto Thu Oct 18 06:38:33 2012
@@ -0,0 +1,35 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+import "Client.proto";
+option java_package = "org.apache.hadoop.hbase.protobuf.generated";
+option java_outer_classname = "MultiRowMutation";
+option java_generate_equals_and_hash = true;
+option java_generic_services = true;
+option optimize_for = SPEED;
+
+message MultiMutateRequest {
+  repeated Mutate mutatationRequest = 1;
+}
+
+message MultiMutateResponse {
+}
+
+service MultiRowMutationService {
+  rpc mutateRows(MultiMutateRequest) 
+      returns(MultiMutateResponse);
+}

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java?rev=1399529&r1=1399528&r2=1399529&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java Thu Oct 18 06:38:33 2012
@@ -46,10 +46,10 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.*;
+import org.apache.hadoop.hbase.client.coprocessor.Batch;
 import org.apache.hadoop.hbase.client.metrics.ScanMetrics;
 import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
 import org.apache.hadoop.hbase.coprocessor.MultiRowMutationEndpoint;
-import org.apache.hadoop.hbase.coprocessor.MultiRowMutationProtocol;
 import org.apache.hadoop.hbase.filter.BinaryComparator;
 import org.apache.hadoop.hbase.filter.CompareFilter;
 import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
@@ -64,6 +64,12 @@ import org.apache.hadoop.hbase.filter.Si
 import org.apache.hadoop.hbase.filter.WhileMatchFilter;
 import org.apache.hadoop.hbase.io.hfile.BlockCache;
 import org.apache.hadoop.hbase.io.hfile.CacheConfig;
+import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
+import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
+import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate;
+import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.MutateType;
+import org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest;
+import org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiRowMutationService;
 import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.HRegionServer;
 import org.apache.hadoop.hbase.regionserver.NoSuchColumnFamilyException;
@@ -4175,16 +4181,22 @@ public class TestFromClientSide {
     final byte [] ROW1 = Bytes.toBytes("testRow1");
 
     HTable t = TEST_UTIL.createTable(TABLENAME, FAMILY);
-    List<Mutation> mrm = new ArrayList<Mutation>();
     Put p = new Put(ROW);
     p.add(FAMILY, QUALIFIER, VALUE);
-    mrm.add(p);
+    Mutate m1 = ProtobufUtil.toMutate(MutateType.PUT, p);
+
     p = new Put(ROW1);
     p.add(FAMILY, QUALIFIER, VALUE);
-    mrm.add(p);
-    MultiRowMutationProtocol mr = t.coprocessorProxy(
-        MultiRowMutationProtocol.class, ROW);
-    mr.mutateRows(mrm);
+    Mutate m2 = ProtobufUtil.toMutate(MutateType.PUT, p);
+
+    MultiMutateRequest.Builder mrmBuilder = MultiMutateRequest.newBuilder();
+    mrmBuilder.addMutatationRequest(m1);
+    mrmBuilder.addMutatationRequest(m2);
+    MultiMutateRequest mrm = mrmBuilder.build();
+    CoprocessorRpcChannel channel = t.coprocessorService(ROW);
+    MultiRowMutationService.BlockingInterface service = 
+       MultiRowMutationService.newBlockingStub(channel);
+    service.mutateRows(null, mrm);
     Get g = new Get(ROW);
     Result r = t.get(g);
     assertEquals(0, Bytes.compareTo(VALUE, r.getValue(FAMILY, QUALIFIER)));



Mime
View raw message