Return-Path: X-Original-To: apmail-hbase-commits-archive@www.apache.org Delivered-To: apmail-hbase-commits-archive@www.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id 4A89E182B8 for ; Fri, 7 Aug 2015 16:14:01 +0000 (UTC) Received: (qmail 11629 invoked by uid 500); 7 Aug 2015 16:13:58 -0000 Delivered-To: apmail-hbase-commits-archive@hbase.apache.org Received: (qmail 11431 invoked by uid 500); 7 Aug 2015 16:13:58 -0000 Mailing-List: contact commits-help@hbase.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: dev@hbase.apache.org Delivered-To: mailing list commits@hbase.apache.org Received: (qmail 11403 invoked by uid 99); 7 Aug 2015 16:13:58 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Fri, 07 Aug 2015 16:13:58 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id D314EE0986; Fri, 7 Aug 2015 16:13:57 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: apurtell@apache.org To: commits@hbase.apache.org Date: Fri, 07 Aug 2015 16:13:58 -0000 Message-Id: In-Reply-To: <4b12dc79fc6d43c6bc83134900b33cde@git.apache.org> References: <4b12dc79fc6d43c6bc83134900b33cde@git.apache.org> X-Mailer: ASF-Git Admin Mailer Subject: [2/6] hbase git commit: HBASE-13825 Use ProtobufUtil#mergeFrom and ProtobufUtil#mergeDelimitedFrom in place of builder methods of same name HBASE-13825 Use ProtobufUtil#mergeFrom and ProtobufUtil#mergeDelimitedFrom in place of builder methods of same name Incorporates HBASE-14076 Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/b194052e Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/b194052e Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/b194052e Branch: refs/heads/branch-1 Commit: b194052ec09f8519660b472fc447c20eacdf1bbd Parents: 51061f0 Author: Andrew Purtell Authored: Thu Aug 6 21:48:04 2015 -0700 Committer: Andrew Purtell Committed: Thu Aug 6 21:48:04 2015 -0700 ---------------------------------------------------------------------- .../java/org/apache/hadoop/hbase/ClusterId.java | 7 +- .../apache/hadoop/hbase/HColumnDescriptor.java | 6 +- .../org/apache/hadoop/hbase/HRegionInfo.java | 10 +- .../apache/hadoop/hbase/HTableDescriptor.java | 7 +- .../apache/hadoop/hbase/RegionTransition.java | 12 +- .../org/apache/hadoop/hbase/client/HTable.java | 9 +- .../hbase/ipc/AsyncServerResponseHandler.java | 3 +- .../hbase/ipc/MasterCoprocessorRpcChannel.java | 5 +- .../hbase/ipc/RegionCoprocessorRpcChannel.java | 5 +- .../ipc/RegionServerCoprocessorRpcChannel.java | 5 +- .../apache/hadoop/hbase/ipc/RpcClientImpl.java | 2 +- .../hadoop/hbase/protobuf/ProtobufUtil.java | 111 +++++++++++++++++- .../replication/ReplicationPeerZKImpl.java | 7 +- .../replication/ReplicationPeersZKImpl.java | 6 +- .../token/AuthenticationTokenIdentifier.java | 7 +- .../zookeeper/ZKTableStateClientSideReader.java | 9 +- .../apache/hadoop/hbase/zookeeper/ZKUtil.java | 32 ++++-- .../org/apache/hadoop/hbase/types/PBCell.java | 2 + .../hadoop/hbase/rest/model/CellModel.java | 3 +- .../hadoop/hbase/rest/model/CellSetModel.java | 3 +- .../hadoop/hbase/rest/model/ScannerModel.java | 3 +- .../rest/model/StorageClusterStatusModel.java | 3 +- .../hadoop/hbase/rest/model/TableInfoModel.java | 3 +- .../hadoop/hbase/rest/model/TableListModel.java | 3 +- .../hbase/rest/model/TableSchemaModel.java | 3 +- .../hadoop/hbase/rest/model/VersionModel.java | 3 +- .../org/apache/hadoop/hbase/SplitLogTask.java | 12 +- .../org/apache/hadoop/hbase/ipc/RpcServer.java | 10 +- .../hbase/mapreduce/MutationSerialization.java | 6 +- .../hbase/mapreduce/ResultSerialization.java | 4 +- .../hadoop/hbase/master/MasterRpcServices.java | 5 +- .../hadoop/hbase/master/TableLockManager.java | 10 +- .../hadoop/hbase/regionserver/HRegion.java | 5 +- .../hbase/regionserver/HRegionServer.java | 6 +- .../regionserver/wal/ProtobufLogReader.java | 11 +- .../security/access/AccessControlLists.java | 21 ++-- .../hbase/security/access/AccessController.java | 10 +- .../HbaseObjectWritableFor96Migration.java | 3 +- .../security/visibility/VisibilityUtils.java | 18 ++- .../org/apache/hadoop/hbase/util/FSUtils.java | 8 +- .../hbase/zookeeper/LoadBalancerTracker.java | 8 +- .../hbase/zookeeper/RegionServerTracker.java | 2 +- .../hbase/zookeeper/ZKTableStateManager.java | 9 +- .../hbase/mapreduce/TestSerialization.java | 115 +++++++++++++++++++ 44 files changed, 390 insertions(+), 142 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hbase/blob/b194052e/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterId.java ---------------------------------------------------------------------- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterId.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterId.java index 5be224c..9056d3e 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterId.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterId.java @@ -18,13 +18,13 @@ package org.apache.hadoop.hbase; -import com.google.protobuf.InvalidProtocolBufferException; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos; import org.apache.hadoop.hbase.util.Bytes; +import java.io.IOException; import java.util.UUID; /** @@ -66,8 +66,9 @@ public class ClusterId { ClusterIdProtos.ClusterId.Builder builder = ClusterIdProtos.ClusterId.newBuilder(); ClusterIdProtos.ClusterId cid = null; try { - cid = builder.mergeFrom(bytes, pblen, bytes.length - pblen).build(); - } catch (InvalidProtocolBufferException e) { + ProtobufUtil.mergeFrom(builder, bytes, pblen, bytes.length - pblen); + cid = builder.build(); + } catch (IOException e) { throw new DeserializationException(e); } return convert(cid); http://git-wip-us.apache.org/repos/asf/hbase/blob/b194052e/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java ---------------------------------------------------------------------- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java index ae4656f..542d928 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java @@ -46,7 +46,6 @@ import org.apache.hadoop.io.WritableComparable; import com.google.common.base.Preconditions; import org.apache.hadoop.hbase.util.ByteStringer; -import com.google.protobuf.InvalidProtocolBufferException; /** * An HColumnDescriptor contains information about a column family such as the @@ -1420,8 +1419,9 @@ public class HColumnDescriptor implements WritableComparable ColumnFamilySchema.Builder builder = ColumnFamilySchema.newBuilder(); ColumnFamilySchema cfs = null; try { - cfs = builder.mergeFrom(bytes, pblen, bytes.length - pblen).build(); - } catch (InvalidProtocolBufferException e) { + ProtobufUtil.mergeFrom(builder, bytes, pblen, bytes.length - pblen); + cfs = builder.build(); + } catch (IOException e) { throw new DeserializationException(e); } return convert(cfs); http://git-wip-us.apache.org/repos/asf/hbase/blob/b194052e/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java ---------------------------------------------------------------------- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java index 31bb016..80b7c42 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java @@ -48,8 +48,6 @@ import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.util.PairOfSameType; import org.apache.hadoop.io.DataInputBuffer; -import com.google.protobuf.InvalidProtocolBufferException; - /** * Information about a region. A region is a range of keys in the whole keyspace of a table, an * identifier (a timestamp) for differentiating between subset ranges (after region split) @@ -1118,11 +1116,11 @@ public class HRegionInfo implements Comparable { if (ProtobufUtil.isPBMagicPrefix(bytes, offset, len)) { int pblen = ProtobufUtil.lengthOfPBMagic(); try { - HBaseProtos.RegionInfo ri = - HBaseProtos.RegionInfo.newBuilder(). - mergeFrom(bytes, pblen + offset, len - pblen).build(); + HBaseProtos.RegionInfo.Builder builder = HBaseProtos.RegionInfo.newBuilder(); + ProtobufUtil.mergeFrom(builder, bytes, pblen + offset, len - pblen); + HBaseProtos.RegionInfo ri = builder.build(); return convert(ri); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } } else { http://git-wip-us.apache.org/repos/asf/hbase/blob/b194052e/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java ---------------------------------------------------------------------- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java index a6d443c..92566e3 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java @@ -56,8 +56,6 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Writables; import org.apache.hadoop.io.WritableComparable; -import com.google.protobuf.InvalidProtocolBufferException; - /** * HTableDescriptor contains the details about an HBase table such as the descriptors of * all the column families, is the table a catalog table, -ROOT- or @@ -1585,8 +1583,9 @@ public class HTableDescriptor implements WritableComparable { TableSchema.Builder builder = TableSchema.newBuilder(); TableSchema ts; try { - ts = builder.mergeFrom(bytes, pblen, bytes.length - pblen).build(); - } catch (InvalidProtocolBufferException e) { + ProtobufUtil.mergeFrom(builder, bytes, pblen, bytes.length - pblen); + ts = builder.build(); + } catch (IOException e) { throw new DeserializationException(e); } return convert(ts); http://git-wip-us.apache.org/repos/asf/hbase/blob/b194052e/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionTransition.java ---------------------------------------------------------------------- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionTransition.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionTransition.java index c74e11f..f367718 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionTransition.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionTransition.java @@ -17,8 +17,9 @@ */ package org.apache.hadoop.hbase; +import java.io.IOException; + import org.apache.hadoop.hbase.util.ByteStringer; -import com.google.protobuf.InvalidProtocolBufferException; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.exceptions.DeserializationException; @@ -121,10 +122,11 @@ public class RegionTransition { ProtobufUtil.expectPBMagicPrefix(data); try { int prefixLen = ProtobufUtil.lengthOfPBMagic(); - ZooKeeperProtos.RegionTransition rt = ZooKeeperProtos.RegionTransition.newBuilder(). - mergeFrom(data, prefixLen, data.length - prefixLen).build(); - return new RegionTransition(rt); - } catch (InvalidProtocolBufferException e) { + ZooKeeperProtos.RegionTransition.Builder builder = + ZooKeeperProtos.RegionTransition.newBuilder(); + ProtobufUtil.mergeFrom(builder, data, prefixLen, data.length - prefixLen); + return new RegionTransition(builder.build()); + } catch (IOException e) { throw new DeserializationException(e); } } http://git-wip-us.apache.org/repos/asf/hbase/blob/b194052e/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java ---------------------------------------------------------------------- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java index 4361be7..22aee5a 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java @@ -75,7 +75,6 @@ import org.apache.hadoop.hbase.util.Threads; import com.google.common.annotations.VisibleForTesting; import com.google.protobuf.Descriptors; -import com.google.protobuf.InvalidProtocolBufferException; import com.google.protobuf.Message; import com.google.protobuf.Service; import com.google.protobuf.ServiceException; @@ -1884,10 +1883,10 @@ public class HTable implements HTableInterface, RegionLocator { ", value=" + serviceResult.getValue().getValue()); } try { - callback.update(region, row, - (R) responsePrototype.newBuilderForType().mergeFrom( - serviceResult.getValue().getValue()).build()); - } catch (InvalidProtocolBufferException e) { + Message.Builder builder = responsePrototype.newBuilderForType(); + ProtobufUtil.mergeFrom(builder, serviceResult.getValue().getValue()); + callback.update(region, row, (R) builder.build()); + } catch (IOException e) { LOG.error("Unexpected response type from endpoint " + methodDescriptor.getFullName(), e); callbackErrorExceptions.add(e); http://git-wip-us.apache.org/repos/asf/hbase/blob/b194052e/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncServerResponseHandler.java ---------------------------------------------------------------------- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncServerResponseHandler.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncServerResponseHandler.java index 1404e6f..f7aa8a9 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncServerResponseHandler.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncServerResponseHandler.java @@ -28,6 +28,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.classification.InterfaceAudience; +import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.generated.RPCProtos; import org.apache.hadoop.ipc.RemoteException; @@ -90,7 +91,7 @@ public class AsyncServerResponseHandler extends ChannelInboundHandlerAdapter { // Call may be null because it may have timedout and been cleaned up on this side already if (call.responseDefaultType != null) { Message.Builder builder = call.responseDefaultType.newBuilderForType(); - builder.mergeDelimitedFrom(in); + ProtobufUtil.mergeDelimitedFrom(builder, in); value = builder.build(); } CellScanner cellBlockScanner = null; http://git-wip-us.apache.org/repos/asf/hbase/blob/b194052e/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/MasterCoprocessorRpcChannel.java ---------------------------------------------------------------------- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/MasterCoprocessorRpcChannel.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/MasterCoprocessorRpcChannel.java index 7446ea2..2d2edaf 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/MasterCoprocessorRpcChannel.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/MasterCoprocessorRpcChannel.java @@ -68,8 +68,9 @@ public class MasterCoprocessorRpcChannel extends CoprocessorRpcChannel{ CoprocessorServiceResponse result = ProtobufUtil.execService(connection.getMaster(), call); Message response = null; if (result.getValue().hasValue()) { - response = responsePrototype.newBuilderForType() - .mergeFrom(result.getValue().getValue()).build(); + Message.Builder builder = responsePrototype.newBuilderForType(); + ProtobufUtil.mergeFrom(builder, result.getValue().getValue()); + response = builder.build(); } else { response = responsePrototype.getDefaultInstanceForType(); } http://git-wip-us.apache.org/repos/asf/hbase/blob/b194052e/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionCoprocessorRpcChannel.java ---------------------------------------------------------------------- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionCoprocessorRpcChannel.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionCoprocessorRpcChannel.java index e7e2d6e..009156e 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionCoprocessorRpcChannel.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionCoprocessorRpcChannel.java @@ -96,8 +96,9 @@ public class RegionCoprocessorRpcChannel extends CoprocessorRpcChannel{ .callWithRetries(callable, operationTimeout); Message response = null; if (result.getValue().hasValue()) { - response = responsePrototype.newBuilderForType() - .mergeFrom(result.getValue().getValue()).build(); + Message.Builder builder = responsePrototype.newBuilderForType(); + ProtobufUtil.mergeFrom(builder, result.getValue().getValue()); + response = builder.build(); } else { response = responsePrototype.getDefaultInstanceForType(); } http://git-wip-us.apache.org/repos/asf/hbase/blob/b194052e/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionServerCoprocessorRpcChannel.java ---------------------------------------------------------------------- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionServerCoprocessorRpcChannel.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionServerCoprocessorRpcChannel.java index f0ba971..01cdd28 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionServerCoprocessorRpcChannel.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionServerCoprocessorRpcChannel.java @@ -62,8 +62,9 @@ public class RegionServerCoprocessorRpcChannel extends CoprocessorRpcChannel { ProtobufUtil.execRegionServerService(connection.getClient(serverName), call); Message response = null; if (result.getValue().hasValue()) { - response = - responsePrototype.newBuilderForType().mergeFrom(result.getValue().getValue()).build(); + Message.Builder builder = responsePrototype.newBuilderForType(); + ProtobufUtil.mergeFrom(builder, result.getValue().getValue()); + response = builder.build(); } else { response = responsePrototype.getDefaultInstanceForType(); } http://git-wip-us.apache.org/repos/asf/hbase/blob/b194052e/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClientImpl.java ---------------------------------------------------------------------- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClientImpl.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClientImpl.java index fcfd620..fd3bab0 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClientImpl.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClientImpl.java @@ -959,7 +959,7 @@ public class RpcClientImpl extends AbstractRpcClient { Message value = null; if (call.responseDefaultType != null) { Builder builder = call.responseDefaultType.newBuilderForType(); - builder.mergeDelimitedFrom(in); + ProtobufUtil.mergeDelimitedFrom(builder, in); value = builder.build(); } CellScanner cellBlockScanner = null; http://git-wip-us.apache.org/repos/asf/hbase/blob/b194052e/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java ---------------------------------------------------------------------- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java index dc36bac..313c7fe 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java @@ -22,6 +22,7 @@ import static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpeci import java.io.ByteArrayOutputStream; import java.io.IOException; +import java.io.InputStream; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; @@ -67,6 +68,7 @@ import org.apache.hadoop.hbase.client.metrics.ScanMetrics; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.filter.ByteArrayComparable; import org.apache.hadoop.hbase.filter.Filter; +import org.apache.hadoop.hbase.io.LimitInputStream; import org.apache.hadoop.hbase.io.TimeRange; import org.apache.hadoop.hbase.protobuf.generated.RPCProtos; import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos; @@ -155,6 +157,7 @@ import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.ListMultimap; import com.google.common.collect.Lists; import com.google.protobuf.ByteString; +import com.google.protobuf.CodedInputStream; import com.google.protobuf.InvalidProtocolBufferException; import com.google.protobuf.Message; import com.google.protobuf.Parser; @@ -2765,8 +2768,9 @@ public final class ProtobufUtil { ClientProtos.CellVisibility.Builder builder = ClientProtos.CellVisibility.newBuilder(); ClientProtos.CellVisibility proto = null; try { - proto = builder.mergeFrom(protoBytes).build(); - } catch (InvalidProtocolBufferException e) { + ProtobufUtil.mergeFrom(builder, protoBytes); + proto = builder.build(); + } catch (IOException e) { throw new DeserializationException(e); } return toCellVisibility(proto); @@ -2807,8 +2811,9 @@ public final class ProtobufUtil { ClientProtos.Authorizations.Builder builder = ClientProtos.Authorizations.newBuilder(); ClientProtos.Authorizations proto = null; try { - proto = builder.mergeFrom(protoBytes).build(); - } catch (InvalidProtocolBufferException e) { + ProtobufUtil.mergeFrom(builder, protoBytes); + proto = builder.build(); + } catch (IOException e) { throw new DeserializationException(e); } return toAuthorizations(proto); @@ -3070,6 +3075,104 @@ public final class ProtobufUtil { return desc.build(); } + /** + * This version of protobuf's mergeDelimitedFrom avoids the hard-coded 64MB limit for decoding + * buffers + * @param builder current message builder + * @param in Inputsream with delimited protobuf data + * @throws IOException + */ + public static void mergeDelimitedFrom(Message.Builder builder, InputStream in) + throws IOException { + // This used to be builder.mergeDelimitedFrom(in); + // but is replaced to allow us to bump the protobuf size limit. + final int firstByte = in.read(); + if (firstByte != -1) { + final int size = CodedInputStream.readRawVarint32(firstByte, in); + final InputStream limitedInput = new LimitInputStream(in, size); + final CodedInputStream codedInput = CodedInputStream.newInstance(limitedInput); + codedInput.setSizeLimit(size); + builder.mergeFrom(codedInput); + codedInput.checkLastTagWas(0); + } + } + + /** + * This version of protobuf's mergeFrom avoids the hard-coded 64MB limit for decoding + * buffers where the message size is known + * @param builder current message builder + * @param in InputStream containing protobuf data + * @param size known size of protobuf data + * @throws IOException + */ + public static void mergeFrom(Message.Builder builder, InputStream in, int size) + throws IOException { + final CodedInputStream codedInput = CodedInputStream.newInstance(in); + codedInput.setSizeLimit(size); + builder.mergeFrom(codedInput); + codedInput.checkLastTagWas(0); + } + + /** + * This version of protobuf's mergeFrom avoids the hard-coded 64MB limit for decoding + * buffers where the message size is not known + * @param builder current message builder + * @param in InputStream containing protobuf data + * @throws IOException + */ + public static void mergeFrom(Message.Builder builder, InputStream in) + throws IOException { + final CodedInputStream codedInput = CodedInputStream.newInstance(in); + codedInput.setSizeLimit(Integer.MAX_VALUE); + builder.mergeFrom(codedInput); + codedInput.checkLastTagWas(0); + } + + /** + * This version of protobuf's mergeFrom avoids the hard-coded 64MB limit for decoding + * buffers when working with ByteStrings + * @param builder current message builder + * @param bs ByteString containing the + * @throws IOException + */ + public static void mergeFrom(Message.Builder builder, ByteString bs) throws IOException { + final CodedInputStream codedInput = bs.newCodedInput(); + codedInput.setSizeLimit(bs.size()); + builder.mergeFrom(codedInput); + codedInput.checkLastTagWas(0); + } + + /** + * This version of protobuf's mergeFrom avoids the hard-coded 64MB limit for decoding + * buffers when working with byte arrays + * @param builder current message builder + * @param b byte array + * @throws IOException + */ + public static void mergeFrom(Message.Builder builder, byte[] b) throws IOException { + final CodedInputStream codedInput = CodedInputStream.newInstance(b); + codedInput.setSizeLimit(b.length); + builder.mergeFrom(codedInput); + codedInput.checkLastTagWas(0); + } + + /** + * This version of protobuf's mergeFrom avoids the hard-coded 64MB limit for decoding + * buffers when working with byte arrays + * @param builder current message builder + * @param b byte array + * @param offset + * @param length + * @throws IOException + */ + public static void mergeFrom(Message.Builder builder, byte[] b, int offset, int length) + throws IOException { + final CodedInputStream codedInput = CodedInputStream.newInstance(b, offset, length); + codedInput.setSizeLimit(length); + builder.mergeFrom(codedInput); + codedInput.checkLastTagWas(0); + } + public static ReplicationLoadSink toReplicationLoadSink( ClusterStatusProtos.ReplicationLoadSink cls) { return new ReplicationLoadSink(cls.getAgeOfLastAppliedOp(), cls.getTimeStampsOfLastAppliedOp()); http://git-wip-us.apache.org/repos/asf/hbase/blob/b194052e/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerZKImpl.java ---------------------------------------------------------------------- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerZKImpl.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerZKImpl.java index d20ab44..e1ff078 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerZKImpl.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerZKImpl.java @@ -41,8 +41,6 @@ import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.KeeperException.NodeExistsException; -import com.google.protobuf.InvalidProtocolBufferException; - @InterfaceAudience.Private public class ReplicationPeerZKImpl implements ReplicationPeer, Abortable, Closeable { private static final Log LOG = LogFactory.getLog(ReplicationPeerZKImpl.class); @@ -199,9 +197,10 @@ public class ReplicationPeerZKImpl implements ReplicationPeer, Abortable, Closea ZooKeeperProtos.ReplicationState.newBuilder(); ZooKeeperProtos.ReplicationState state; try { - state = builder.mergeFrom(bytes, pblen, bytes.length - pblen).build(); + ProtobufUtil.mergeFrom(builder, bytes, pblen, bytes.length - pblen); + state = builder.build(); return state.getState(); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } } http://git-wip-us.apache.org/repos/asf/hbase/blob/b194052e/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeersZKImpl.java ---------------------------------------------------------------------- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeersZKImpl.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeersZKImpl.java index de6f79e..aa2348e 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeersZKImpl.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeersZKImpl.java @@ -48,7 +48,6 @@ import org.apache.hadoop.hbase.zookeeper.ZKUtil.ZKUtilOp; import org.apache.zookeeper.KeeperException; import com.google.protobuf.ByteString; -import com.google.protobuf.InvalidProtocolBufferException; /** * This class provides an implementation of the ReplicationPeers interface using Zookeeper. The @@ -492,8 +491,9 @@ public class ReplicationPeersZKImpl extends ReplicationStateZKBase implements Re ZooKeeperProtos.ReplicationPeer.newBuilder(); ZooKeeperProtos.ReplicationPeer peer; try { - peer = builder.mergeFrom(bytes, pblen, bytes.length - pblen).build(); - } catch (InvalidProtocolBufferException e) { + ProtobufUtil.mergeFrom(builder, bytes, pblen, bytes.length - pblen); + peer = builder.build(); + } catch (IOException e) { throw new DeserializationException(e); } return convert(peer); http://git-wip-us.apache.org/repos/asf/hbase/blob/b194052e/hbase-client/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenIdentifier.java ---------------------------------------------------------------------- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenIdentifier.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenIdentifier.java index 0fb6969..4299003 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenIdentifier.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenIdentifier.java @@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.security.token; import com.google.protobuf.ByteString; import org.apache.hadoop.hbase.classification.InterfaceAudience; +import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos; import org.apache.hadoop.io.Text; import org.apache.hadoop.security.UserGroupInformation; @@ -136,8 +137,10 @@ public class AuthenticationTokenIdentifier extends TokenIdentifier { int len = in.readInt(); byte[] inBytes = new byte[len]; in.readFully(inBytes); - AuthenticationProtos.TokenIdentifier identifier = - AuthenticationProtos.TokenIdentifier.newBuilder().mergeFrom(inBytes).build(); + AuthenticationProtos.TokenIdentifier.Builder builder = + AuthenticationProtos.TokenIdentifier.newBuilder(); + ProtobufUtil.mergeFrom(builder, inBytes); + AuthenticationProtos.TokenIdentifier identifier = builder.build(); // sanity check on type if (!identifier.hasKind() || identifier.getKind() != AuthenticationProtos.TokenIdentifier.Kind.HBASE_AUTH_TOKEN) { http://git-wip-us.apache.org/repos/asf/hbase/blob/b194052e/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKTableStateClientSideReader.java ---------------------------------------------------------------------- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKTableStateClientSideReader.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKTableStateClientSideReader.java index d12231b..03579ff 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKTableStateClientSideReader.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKTableStateClientSideReader.java @@ -19,8 +19,6 @@ */ package org.apache.hadoop.hbase.zookeeper; -import com.google.protobuf.InvalidProtocolBufferException; - import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.exceptions.DeserializationException; @@ -28,6 +26,7 @@ import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos; import org.apache.zookeeper.KeeperException; +import java.io.IOException; import java.util.HashSet; import java.util.List; import java.util.Set; @@ -189,9 +188,9 @@ public class ZKTableStateClientSideReader { ProtobufUtil.expectPBMagicPrefix(data); ZooKeeperProtos.Table.Builder builder = ZooKeeperProtos.Table.newBuilder(); int magicLen = ProtobufUtil.lengthOfPBMagic(); - ZooKeeperProtos.Table t = builder.mergeFrom(data, magicLen, data.length - magicLen).build(); - return t.getState(); - } catch (InvalidProtocolBufferException e) { + ProtobufUtil.mergeFrom(builder, data, magicLen, data.length - magicLen); + return builder.getState(); + } catch (IOException e) { KeeperException ke = new KeeperException.DataInconsistencyException(); ke.initCause(e); throw ke; http://git-wip-us.apache.org/repos/asf/hbase/blob/b194052e/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java ---------------------------------------------------------------------- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java index 97cba5e..97b86a6 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java @@ -1956,12 +1956,14 @@ public class ZKUtil { } // parse the data of the above peer znode. try { - String clusterKey = ZooKeeperProtos.ReplicationPeer.newBuilder(). - mergeFrom(data, pblen, data.length - pblen).getClusterkey(); - sb.append("\n").append(znodeToProcess).append(": ").append(clusterKey); - // add the peer-state. - appendPeerState(zkw, znodeToProcess, sb); - } catch (InvalidProtocolBufferException ipbe) { + ZooKeeperProtos.ReplicationPeer.Builder builder = + ZooKeeperProtos.ReplicationPeer.newBuilder(); + ProtobufUtil.mergeFrom(builder, data, pblen, data.length - pblen); + String clusterKey = builder.getClusterkey(); + sb.append("\n").append(znodeToProcess).append(": ").append(clusterKey); + // add the peer-state. + appendPeerState(zkw, znodeToProcess, sb); + } catch (IOException ipbe) { LOG.warn("Got Exception while parsing peer: " + znodeToProcess, ipbe); } } @@ -1979,8 +1981,12 @@ public class ZKUtil { byte[] peerStateData; try { peerStateData = ZKUtil.getData(zkw, peerStateZnode); - sb.append(ZooKeeperProtos.ReplicationState.newBuilder() - .mergeFrom(peerStateData, pblen, peerStateData.length - pblen).getState().name()); + ZooKeeperProtos.ReplicationState.Builder builder = + ZooKeeperProtos.ReplicationState.newBuilder(); + ProtobufUtil.mergeFrom(builder, peerStateData, pblen, peerStateData.length - pblen); + sb.append(builder.getState().name()); + } catch (IOException ipbe) { + LOG.warn("Got Exception while parsing peer: " + znodeToProcess, ipbe); } catch (InterruptedException e) { zkw.interruptedException(e); return; @@ -2201,8 +2207,9 @@ public class ZKUtil { ZooKeeperProtos.ReplicationHLogPosition.newBuilder(); ZooKeeperProtos.ReplicationHLogPosition position; try { - position = builder.mergeFrom(bytes, pblen, bytes.length - pblen).build(); - } catch (InvalidProtocolBufferException e) { + ProtobufUtil.mergeFrom(builder, bytes, pblen, bytes.length - pblen); + position = builder.build(); + } catch (IOException e) { throw new DeserializationException(e); } return position.getPosition(); @@ -2257,8 +2264,9 @@ public class ZKUtil { int pblen = ProtobufUtil.lengthOfPBMagic(); RegionStoreSequenceIds storeIds = null; try { - storeIds = regionSequenceIdsBuilder.mergeFrom(bytes, pblen, bytes.length - pblen).build(); - } catch (InvalidProtocolBufferException e) { + ProtobufUtil.mergeFrom(regionSequenceIdsBuilder, bytes, pblen, bytes.length - pblen); + storeIds = regionSequenceIdsBuilder.build(); + } catch (IOException e) { throw new DeserializationException(e); } return storeIds; http://git-wip-us.apache.org/repos/asf/hbase/blob/b194052e/hbase-examples/src/main/java/org/apache/hadoop/hbase/types/PBCell.java ---------------------------------------------------------------------- diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/types/PBCell.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/types/PBCell.java index 96ecf28..c063aa9 100644 --- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/types/PBCell.java +++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/types/PBCell.java @@ -37,6 +37,7 @@ public class PBCell extends PBType { public int skip(PositionedByteRange src) { CellProtos.Cell.Builder builder = CellProtos.Cell.newBuilder(); CodedInputStream is = inputStreamFromByteRange(src); + is.setSizeLimit(src.getLength()); try { builder.mergeFrom(is); int consumed = is.getTotalBytesRead(); @@ -51,6 +52,7 @@ public class PBCell extends PBType { public CellProtos.Cell decode(PositionedByteRange src) { CellProtos.Cell.Builder builder = CellProtos.Cell.newBuilder(); CodedInputStream is = inputStreamFromByteRange(src); + is.setSizeLimit(src.getLength()); try { CellProtos.Cell ret = builder.mergeFrom(is).build(); src.setPosition(src.getPosition() + is.getTotalBytesRead()); http://git-wip-us.apache.org/repos/asf/hbase/blob/b194052e/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/CellModel.java ---------------------------------------------------------------------- diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/CellModel.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/CellModel.java index 7c0c11f..214d574 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/CellModel.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/CellModel.java @@ -33,6 +33,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.rest.ProtobufMessageHandler; import org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell; import org.codehaus.jackson.annotate.JsonProperty; @@ -198,7 +199,7 @@ public class CellModel implements ProtobufMessageHandler, Serializable { public ProtobufMessageHandler getObjectFromMessage(byte[] message) throws IOException { Cell.Builder builder = Cell.newBuilder(); - builder.mergeFrom(message); + ProtobufUtil.mergeFrom(builder, message); setColumn(builder.getColumn().toByteArray()); setValue(builder.getData().toByteArray()); if (builder.hasTimestamp()) { http://git-wip-us.apache.org/repos/asf/hbase/blob/b194052e/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/CellSetModel.java ---------------------------------------------------------------------- diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/CellSetModel.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/CellSetModel.java index 094da36..3f76f24 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/CellSetModel.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/CellSetModel.java @@ -32,6 +32,7 @@ import javax.xml.bind.annotation.XmlRootElement; import org.apache.hadoop.hbase.util.ByteStringer; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.rest.ProtobufMessageHandler; import org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell; import org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet; @@ -133,7 +134,7 @@ public class CellSetModel implements Serializable, ProtobufMessageHandler { public ProtobufMessageHandler getObjectFromMessage(byte[] message) throws IOException { CellSet.Builder builder = CellSet.newBuilder(); - builder.mergeFrom(message); + ProtobufUtil.mergeFrom(builder, message); for (CellSet.Row row: builder.getRowsList()) { RowModel rowModel = new RowModel(row.getKey().toByteArray()); for (Cell cell: row.getValuesList()) { http://git-wip-us.apache.org/repos/asf/hbase/blob/b194052e/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java ---------------------------------------------------------------------- diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java index fa13c8b..27f6b45 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java @@ -69,6 +69,7 @@ import org.apache.hadoop.hbase.filter.SubstringComparator; import org.apache.hadoop.hbase.filter.TimestampsFilter; import org.apache.hadoop.hbase.filter.ValueFilter; import org.apache.hadoop.hbase.filter.WhileMatchFilter; +import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.rest.ProtobufMessageHandler; import org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner; import org.apache.hadoop.hbase.security.visibility.Authorizations; @@ -827,7 +828,7 @@ public class ScannerModel implements ProtobufMessageHandler, Serializable { public ProtobufMessageHandler getObjectFromMessage(byte[] message) throws IOException { Scanner.Builder builder = Scanner.newBuilder(); - builder.mergeFrom(message); + ProtobufUtil.mergeFrom(builder, message); if (builder.hasStartRow()) { startRow = builder.getStartRow().toByteArray(); } http://git-wip-us.apache.org/repos/asf/hbase/blob/b194052e/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterStatusModel.java ---------------------------------------------------------------------- diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterStatusModel.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterStatusModel.java index 32a1691..4827b6e 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterStatusModel.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterStatusModel.java @@ -31,6 +31,7 @@ import javax.xml.bind.annotation.XmlRootElement; import org.apache.hadoop.hbase.util.ByteStringer; import org.apache.hadoop.hbase.classification.InterfaceAudience; +import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.rest.ProtobufMessageHandler; import org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus; import org.apache.hadoop.hbase.util.Bytes; @@ -748,7 +749,7 @@ public class StorageClusterStatusModel public ProtobufMessageHandler getObjectFromMessage(byte[] message) throws IOException { StorageClusterStatus.Builder builder = StorageClusterStatus.newBuilder(); - builder.mergeFrom(message); + ProtobufUtil.mergeFrom(builder, message); if (builder.hasRegions()) { regions = builder.getRegions(); } http://git-wip-us.apache.org/repos/asf/hbase/blob/b194052e/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/TableInfoModel.java ---------------------------------------------------------------------- diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/TableInfoModel.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/TableInfoModel.java index 700e766..d796f8b 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/TableInfoModel.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/TableInfoModel.java @@ -30,6 +30,7 @@ import javax.xml.bind.annotation.XmlRootElement; import org.apache.hadoop.hbase.util.ByteStringer; import org.apache.hadoop.hbase.classification.InterfaceAudience; +import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.rest.ProtobufMessageHandler; import org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo; @@ -146,7 +147,7 @@ public class TableInfoModel implements Serializable, ProtobufMessageHandler { public ProtobufMessageHandler getObjectFromMessage(byte[] message) throws IOException { TableInfo.Builder builder = TableInfo.newBuilder(); - builder.mergeFrom(message); + ProtobufUtil.mergeFrom(builder, message); setName(builder.getName()); for (TableInfo.Region region: builder.getRegionsList()) { add(new TableRegionModel(builder.getName(), region.getId(), http://git-wip-us.apache.org/repos/asf/hbase/blob/b194052e/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/TableListModel.java ---------------------------------------------------------------------- diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/TableListModel.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/TableListModel.java index 2ed4e80..37d5461 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/TableListModel.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/TableListModel.java @@ -28,6 +28,7 @@ import javax.xml.bind.annotation.XmlElementRef; import javax.xml.bind.annotation.XmlRootElement; import org.apache.hadoop.hbase.classification.InterfaceAudience; +import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.rest.ProtobufMessageHandler; import org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList; @@ -104,7 +105,7 @@ public class TableListModel implements Serializable, ProtobufMessageHandler { public ProtobufMessageHandler getObjectFromMessage(byte[] message) throws IOException { TableList.Builder builder = TableList.newBuilder(); - builder.mergeFrom(message); + ProtobufUtil.mergeFrom(builder, message); for (String table: builder.getNameList()) { this.add(new TableModel(table)); } http://git-wip-us.apache.org/repos/asf/hbase/blob/b194052e/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/TableSchemaModel.java ---------------------------------------------------------------------- diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/TableSchemaModel.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/TableSchemaModel.java index d843e79..52e40b8 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/TableSchemaModel.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/TableSchemaModel.java @@ -39,6 +39,7 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; +import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.rest.ProtobufMessageHandler; import org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema; import org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema; @@ -309,7 +310,7 @@ public class TableSchemaModel implements Serializable, ProtobufMessageHandler { public ProtobufMessageHandler getObjectFromMessage(byte[] message) throws IOException { TableSchema.Builder builder = TableSchema.newBuilder(); - builder.mergeFrom(message); + ProtobufUtil.mergeFrom(builder, message); this.setName(builder.getName()); for (TableSchema.Attribute attr: builder.getAttrsList()) { this.addAttribute(attr.getName(), attr.getValue()); http://git-wip-us.apache.org/repos/asf/hbase/blob/b194052e/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/VersionModel.java ---------------------------------------------------------------------- diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/VersionModel.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/VersionModel.java index 8b08279..ef131d3 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/VersionModel.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/VersionModel.java @@ -27,6 +27,7 @@ import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlRootElement; import org.apache.hadoop.hbase.classification.InterfaceAudience; +import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.rest.ProtobufMessageHandler; import org.apache.hadoop.hbase.rest.RESTServlet; import org.apache.hadoop.hbase.rest.protobuf.generated.VersionMessage.Version; @@ -188,7 +189,7 @@ public class VersionModel implements Serializable, ProtobufMessageHandler { public ProtobufMessageHandler getObjectFromMessage(byte[] message) throws IOException { Version.Builder builder = Version.newBuilder(); - builder.mergeFrom(message); + ProtobufUtil.mergeFrom(builder, message); if (builder.hasRestVersion()) { restVersion = builder.getRestVersion(); } http://git-wip-us.apache.org/repos/asf/hbase/blob/b194052e/hbase-server/src/main/java/org/apache/hadoop/hbase/SplitLogTask.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/SplitLogTask.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/SplitLogTask.java index abb0b0a..1feb417 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/SplitLogTask.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/SplitLogTask.java @@ -17,6 +17,8 @@ */ package org.apache.hadoop.hbase; +import java.io.IOException; + import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; @@ -25,8 +27,6 @@ import org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos; import org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode; import org.apache.hadoop.hbase.util.Bytes; -import com.google.protobuf.InvalidProtocolBufferException; - /** * State of a WAL log split during distributed splitting. State is kept up in zookeeper. * Encapsulates protobuf serialization/deserialization so we don't leak generated pb outside of @@ -160,10 +160,10 @@ public class SplitLogTask { ProtobufUtil.expectPBMagicPrefix(data); try { int prefixLen = ProtobufUtil.lengthOfPBMagic(); - ZooKeeperProtos.SplitLogTask slt = ZooKeeperProtos.SplitLogTask.newBuilder(). - mergeFrom(data, prefixLen, data.length - prefixLen).build(); - return new SplitLogTask(slt); - } catch (InvalidProtocolBufferException e) { + ZooKeeperProtos.SplitLogTask.Builder builder = ZooKeeperProtos.SplitLogTask.newBuilder(); + ProtobufUtil.mergeFrom(builder, data, prefixLen, data.length - prefixLen); + return new SplitLogTask(builder.build()); + } catch (IOException e) { throw new DeserializationException(Bytes.toStringBinary(data, 0, 64), e); } } http://git-wip-us.apache.org/repos/asf/hbase/blob/b194052e/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java index 1ab6ca7..b1aa66c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java @@ -131,7 +131,6 @@ import com.google.protobuf.BlockingService; import com.google.protobuf.CodedInputStream; import com.google.protobuf.Descriptors.MethodDescriptor; import com.google.protobuf.Message; -import com.google.protobuf.Message.Builder; import com.google.protobuf.ServiceException; import com.google.protobuf.TextFormat; @@ -1778,7 +1777,9 @@ public class RpcServer implements RpcServerInterface { CodedInputStream cis = CodedInputStream.newInstance(buf, offset, buf.length); int headerSize = cis.readRawVarint32(); offset = cis.getTotalBytesRead(); - RequestHeader header = RequestHeader.newBuilder().mergeFrom(buf, offset, headerSize).build(); + Message.Builder builder = RequestHeader.newBuilder(); + ProtobufUtil.mergeFrom(builder, buf, offset, headerSize); + RequestHeader header = (RequestHeader) builder.build(); offset += headerSize; int id = header.getCallId(); if (LOG.isTraceEnabled()) { @@ -1806,13 +1807,14 @@ public class RpcServer implements RpcServerInterface { if (header.hasRequestParam() && header.getRequestParam()) { md = this.service.getDescriptorForType().findMethodByName(header.getMethodName()); if (md == null) throw new UnsupportedOperationException(header.getMethodName()); - Builder builder = this.service.getRequestPrototype(md).newBuilderForType(); + builder = this.service.getRequestPrototype(md).newBuilderForType(); // To read the varint, I need an inputstream; might as well be a CIS. cis = CodedInputStream.newInstance(buf, offset, buf.length); int paramSize = cis.readRawVarint32(); offset += cis.getTotalBytesRead(); if (builder != null) { - param = builder.mergeFrom(buf, offset, paramSize).build(); + ProtobufUtil.mergeFrom(builder, buf, offset, paramSize); + param = builder.build(); } offset += paramSize; } http://git-wip-us.apache.org/repos/asf/hbase/blob/b194052e/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MutationSerialization.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MutationSerialization.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MutationSerialization.java index b15b513..4d200e8 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MutationSerialization.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MutationSerialization.java @@ -25,7 +25,7 @@ import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Mutation; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; -import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType; import org.apache.hadoop.io.serializer.Deserializer; import org.apache.hadoop.io.serializer.Serialization; @@ -57,7 +57,9 @@ public class MutationSerialization implements Serialization { @Override public Mutation deserialize(Mutation mutation) throws IOException { - MutationProto proto = MutationProto.parseDelimitedFrom(in); + ClientProtos.MutationProto.Builder builder = ClientProtos.MutationProto.newBuilder(); + ProtobufUtil.mergeDelimitedFrom(builder, in); + ClientProtos.MutationProto proto = builder.build(); return ProtobufUtil.toMutation(proto); } http://git-wip-us.apache.org/repos/asf/hbase/blob/b194052e/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/ResultSerialization.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/ResultSerialization.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/ResultSerialization.java index ebd3664..19b12c5 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/ResultSerialization.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/ResultSerialization.java @@ -125,7 +125,9 @@ public class ResultSerialization extends Configured implements Serialization() { http://git-wip-us.apache.org/repos/asf/hbase/blob/b194052e/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableLockManager.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableLockManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableLockManager.java index ef1e84f..86e9093 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableLockManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableLockManager.java @@ -41,8 +41,6 @@ import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; import org.apache.hadoop.hbase.zookeeper.lock.ZKInterProcessReadWriteLock; import org.apache.zookeeper.KeeperException; -import com.google.protobuf.InvalidProtocolBufferException; - /** * A manager for distributed table level locks. */ @@ -213,10 +211,10 @@ public abstract class TableLockManager { return null; } try { - ZooKeeperProtos.TableLock data = ZooKeeperProtos.TableLock.newBuilder().mergeFrom( - bytes, pblen, bytes.length - pblen).build(); - return data; - } catch (InvalidProtocolBufferException ex) { + ZooKeeperProtos.TableLock.Builder builder = ZooKeeperProtos.TableLock.newBuilder(); + ProtobufUtil.mergeFrom(builder, bytes, pblen, bytes.length - pblen); + return builder.build(); + } catch (IOException ex) { LOG.warn("Exception in deserialization", ex); } return null; http://git-wip-us.apache.org/repos/asf/hbase/blob/b194052e/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java index ee81503..f0dcdbc 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java @@ -7453,8 +7453,9 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi " in region "+Bytes.toStringBinary(getRegionInfo().getRegionName())); } - Message request = service.getRequestPrototype(methodDesc).newBuilderForType() - .mergeFrom(call.getRequest()).build(); + Message.Builder builder = service.getRequestPrototype(methodDesc).newBuilderForType(); + ProtobufUtil.mergeFrom(builder, call.getRequest()); + Message request = builder.build(); if (coprocessorHost != null) { request = coprocessorHost.preEndpointInvocation(service, methodName, request); http://git-wip-us.apache.org/repos/asf/hbase/blob/b194052e/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java index 33cd992..0b568aa 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java @@ -3235,9 +3235,9 @@ public class HRegionServer extends HasThread implements throw new UnknownProtocolException(service.getClass(), "Unknown method " + methodName + " called on service " + serviceName); } - Message request = - service.getRequestPrototype(methodDesc).newBuilderForType().mergeFrom(call.getRequest()) - .build(); + Message.Builder builderForType = service.getRequestPrototype(methodDesc).newBuilderForType(); + ProtobufUtil.mergeFrom(builderForType, call.getRequest()); + Message request = builderForType.build(); final Message.Builder responseBuilder = service.getResponsePrototype(methodDesc).newBuilderForType(); service.callMethod(methodDesc, serviceController, request, new RpcCallback() { http://git-wip-us.apache.org/repos/asf/hbase/blob/b194052e/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java index af8b5bb..3ed9da0 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java @@ -21,7 +21,6 @@ package org.apache.hadoop.hbase.regionserver.wal; import java.io.EOFException; import java.io.IOException; -import java.io.InputStream; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; @@ -35,8 +34,9 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.hbase.codec.Codec; -import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.io.LimitInputStream; +import org.apache.hadoop.hbase.HBaseInterfaceAudience; +import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.generated.WALProtos; import org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader.Builder; import org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey; @@ -45,7 +45,6 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.wal.WAL.Entry; import com.google.protobuf.CodedInputStream; -import com.google.protobuf.InvalidProtocolBufferException; /** * A Protobuf based WAL has the following structure: @@ -330,9 +329,9 @@ public class ProtobufLogReader extends ReaderBase { "inputStream.available()= " + this.inputStream.available() + ", " + "entry size= " + size); } - final InputStream limitedInput = new LimitInputStream(this.inputStream, size); - builder.mergeFrom(limitedInput); - } catch (InvalidProtocolBufferException ipbe) { + ProtobufUtil.mergeFrom(builder, new LimitInputStream(this.inputStream, size), + (int)size); + } catch (IOException ipbe) { throw (EOFException) new EOFException("Invalid PB, EOF? Ignoring; originalPosition=" + originalPosition + ", currentPosition=" + this.inputStream.getPos() + ", messageSize=" + size + ", currentAvailable=" + available).initCause(ipbe); http://git-wip-us.apache.org/repos/asf/hbase/blob/b194052e/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java index 4ccf4dc..887af0a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java @@ -72,7 +72,6 @@ import org.apache.hadoop.io.Text; import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.ListMultimap; import com.google.common.collect.Lists; -import com.google.protobuf.InvalidProtocolBufferException; /** * Maintains lists of permission grants to users and groups to allow for @@ -594,11 +593,11 @@ public class AccessControlLists { if (ProtobufUtil.isPBMagicPrefix(data)) { int pblen = ProtobufUtil.lengthOfPBMagic(); try { - AccessControlProtos.UsersAndPermissions perms = - AccessControlProtos.UsersAndPermissions.newBuilder().mergeFrom( - data, pblen, data.length - pblen).build(); - return ProtobufUtil.toUserTablePermissions(perms); - } catch (InvalidProtocolBufferException e) { + AccessControlProtos.UsersAndPermissions.Builder builder = + AccessControlProtos.UsersAndPermissions.newBuilder(); + ProtobufUtil.mergeFrom(builder, data, pblen, data.length - pblen); + return ProtobufUtil.toUserTablePermissions(builder.build()); + } catch (IOException e) { throw new DeserializationException(e); } } else { @@ -665,9 +664,13 @@ public class AccessControlLists { Tag tag = tagsIterator.next(); if (tag.getType() == ACL_TAG_TYPE) { // Deserialize the table permissions from the KV - ListMultimap kvPerms = ProtobufUtil.toUsersAndPermissions( - AccessControlProtos.UsersAndPermissions.newBuilder().mergeFrom( - tag.getBuffer(), tag.getTagOffset(), tag.getTagLength()).build()); + // TODO: This can be improved. Don't build UsersAndPermissions just to unpack it again, + // use the builder + AccessControlProtos.UsersAndPermissions.Builder builder = + AccessControlProtos.UsersAndPermissions.newBuilder(); + ProtobufUtil.mergeFrom(builder, tag.getBuffer(), tag.getTagOffset(), tag.getTagLength()); + ListMultimap kvPerms = + ProtobufUtil.toUsersAndPermissions(builder.build()); // Are there permissions for this user? List userPerms = kvPerms.get(user.getShortName()); if (userPerms != null) { http://git-wip-us.apache.org/repos/asf/hbase/blob/b194052e/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java index 3c3af5a..f529731 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java @@ -1969,9 +1969,13 @@ public class AccessController extends BaseMasterAndRegionObserver tags.add(tag); } else { // Merge the perms from the older ACL into the current permission set - ListMultimap kvPerms = ProtobufUtil.toUsersAndPermissions( - AccessControlProtos.UsersAndPermissions.newBuilder().mergeFrom( - tag.getBuffer(), tag.getTagOffset(), tag.getTagLength()).build()); + // TODO: The efficiency of this can be improved. Don't build just to unpack + // again, use the builder + AccessControlProtos.UsersAndPermissions.Builder builder = + AccessControlProtos.UsersAndPermissions.newBuilder(); + ProtobufUtil.mergeFrom(builder, tag.getBuffer(), tag.getTagOffset(), tag.getTagLength()); + ListMultimap kvPerms = + ProtobufUtil.toUsersAndPermissions(builder.build()); perms.putAll(kvPerms); } } http://git-wip-us.apache.org/repos/asf/hbase/blob/b194052e/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/HbaseObjectWritableFor96Migration.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/HbaseObjectWritableFor96Migration.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/HbaseObjectWritableFor96Migration.java index fc6f0c7..92ea08a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/HbaseObjectWritableFor96Migration.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/HbaseObjectWritableFor96Migration.java @@ -689,7 +689,8 @@ class HbaseObjectWritableFor96Migration implements Writable, WritableWithSize, C byte [] scanBytes = new byte[length]; in.readFully(scanBytes); ClientProtos.Scan.Builder scanProto = ClientProtos.Scan.newBuilder(); - instance = ProtobufUtil.toScan(scanProto.mergeFrom(scanBytes).build()); + ProtobufUtil.mergeFrom(scanProto, scanBytes); + instance = ProtobufUtil.toScan(scanProto.build()); } else { // Writable or Serializable Class instanceClass = null; int b = (byte)WritableUtils.readVInt(in); http://git-wip-us.apache.org/repos/asf/hbase/blob/b194052e/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java index 774930d..c725b11 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java @@ -63,8 +63,6 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.SimpleMutableByteRange; import org.apache.hadoop.util.ReflectionUtils; -import com.google.protobuf.InvalidProtocolBufferException; - /** * Utility method to support visibility */ @@ -131,10 +129,10 @@ public class VisibilityUtils { if (ProtobufUtil.isPBMagicPrefix(data)) { int pblen = ProtobufUtil.lengthOfPBMagic(); try { - VisibilityLabelsRequest request = VisibilityLabelsRequest.newBuilder() - .mergeFrom(data, pblen, data.length - pblen).build(); - return request.getVisLabelList(); - } catch (InvalidProtocolBufferException e) { + VisibilityLabelsRequest.Builder builder = VisibilityLabelsRequest.newBuilder(); + ProtobufUtil.mergeFrom(builder, data, pblen, data.length - pblen); + return builder.getVisLabelList(); + } catch (IOException e) { throw new DeserializationException(e); } } @@ -152,10 +150,10 @@ public class VisibilityUtils { if (ProtobufUtil.isPBMagicPrefix(data)) { int pblen = ProtobufUtil.lengthOfPBMagic(); try { - MultiUserAuthorizations multiUserAuths = MultiUserAuthorizations.newBuilder() - .mergeFrom(data, pblen, data.length - pblen).build(); - return multiUserAuths; - } catch (InvalidProtocolBufferException e) { + MultiUserAuthorizations.Builder builder = MultiUserAuthorizations.newBuilder(); + ProtobufUtil.mergeFrom(builder, data, pblen, data.length - pblen); + return builder.build(); + } catch (IOException e) { throw new DeserializationException(e); } } http://git-wip-us.apache.org/repos/asf/hbase/blob/b194052e/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java index 4c54d27..8717948 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java @@ -82,7 +82,6 @@ import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.StringUtils; import com.google.common.primitives.Ints; -import com.google.protobuf.InvalidProtocolBufferException; /** * Utility methods for interacting with the underlying file system. @@ -605,11 +604,10 @@ public abstract class FSUtils { int pblen = ProtobufUtil.lengthOfPBMagic(); FSProtos.HBaseVersionFileContent.Builder builder = FSProtos.HBaseVersionFileContent.newBuilder(); - FSProtos.HBaseVersionFileContent fileContent; try { - fileContent = builder.mergeFrom(bytes, pblen, bytes.length - pblen).build(); - return fileContent.getVersion(); - } catch (InvalidProtocolBufferException e) { + ProtobufUtil.mergeFrom(builder, bytes, pblen, bytes.length - pblen); + return builder.getVersion(); + } catch (IOException e) { // Convert throw new DeserializationException(e); } http://git-wip-us.apache.org/repos/asf/hbase/blob/b194052e/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/LoadBalancerTracker.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/LoadBalancerTracker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/LoadBalancerTracker.java index bcd7446..0a0d1ab 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/LoadBalancerTracker.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/LoadBalancerTracker.java @@ -17,6 +17,8 @@ */ package org.apache.hadoop.hbase.zookeeper; +import java.io.IOException; + import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; @@ -27,8 +29,6 @@ import org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos; import org.apache.hadoop.hbase.util.Bytes; import org.apache.zookeeper.KeeperException; -import com.google.protobuf.InvalidProtocolBufferException; - /** * Tracks the load balancer state up in ZK */ @@ -85,8 +85,8 @@ public class LoadBalancerTracker extends ZooKeeperNodeTracker { LoadBalancerProtos.LoadBalancerState.newBuilder(); try { int magicLen = ProtobufUtil.lengthOfPBMagic(); - builder.mergeFrom(pbBytes, magicLen, pbBytes.length - magicLen); - } catch (InvalidProtocolBufferException e) { + ProtobufUtil.mergeFrom(builder, pbBytes, magicLen, pbBytes.length - magicLen); + } catch (IOException e) { throw new DeserializationException(e); } return builder.build(); http://git-wip-us.apache.org/repos/asf/hbase/blob/b194052e/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/RegionServerTracker.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/RegionServerTracker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/RegionServerTracker.java index b9172bc..365010f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/RegionServerTracker.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/RegionServerTracker.java @@ -87,7 +87,7 @@ public class RegionServerTracker extends ZooKeeperListener { byte[] data = ZKUtil.getData(watcher, nodePath); if (data != null && data.length > 0 && ProtobufUtil.isPBMagicPrefix(data)) { int magicLen = ProtobufUtil.lengthOfPBMagic(); - rsInfoBuilder.mergeFrom(data, magicLen, data.length - magicLen); + ProtobufUtil.mergeFrom(rsInfoBuilder, data, magicLen, data.length - magicLen); } if (LOG.isDebugEnabled()) { LOG.debug("Added tracking of RS " + nodePath); http://git-wip-us.apache.org/repos/asf/hbase/blob/b194052e/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKTableStateManager.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKTableStateManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKTableStateManager.java index fb3d765..db00c14 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKTableStateManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKTableStateManager.java @@ -18,6 +18,7 @@ */ package org.apache.hadoop.hbase.zookeeper; +import java.io.IOException; import java.io.InterruptedIOException; import java.util.Arrays; import java.util.HashMap; @@ -37,8 +38,6 @@ import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos; import org.apache.zookeeper.KeeperException; -import com.google.protobuf.InvalidProtocolBufferException; - /** * Implementation of TableStateManager which reads, caches and sets state * up in ZooKeeper. If multiple read/write clients, will make for confusion. @@ -348,9 +347,9 @@ public class ZKTableStateManager implements TableStateManager { ProtobufUtil.expectPBMagicPrefix(data); ZooKeeperProtos.Table.Builder builder = ZooKeeperProtos.Table.newBuilder(); int magicLen = ProtobufUtil.lengthOfPBMagic(); - ZooKeeperProtos.Table t = builder.mergeFrom(data, magicLen, data.length - magicLen).build(); - return t.getState(); - } catch (InvalidProtocolBufferException e) { + ProtobufUtil.mergeFrom(builder, data, magicLen, data.length - magicLen); + return builder.getState(); + } catch (IOException e) { KeeperException ke = new KeeperException.DataInconsistencyException(); ke.initCause(e); throw ke; http://git-wip-us.apache.org/repos/asf/hbase/blob/b194052e/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSerialization.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSerialization.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSerialization.java new file mode 100644 index 0000000..c90b12e --- /dev/null +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSerialization.java @@ -0,0 +1,115 @@ +/** + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.mapreduce; + + +import com.google.protobuf.InvalidProtocolBufferException; +import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.junit.experimental.categories.Category; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.HBaseTestingUtility; +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.client.Mutation; +import org.apache.hadoop.hbase.client.Result; + +import org.apache.hadoop.hbase.mapreduce.MutationSerialization; +import org.apache.hadoop.hbase.mapreduce.ResultSerialization; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.io.serializer.Deserializer; +import org.apache.hadoop.io.serializer.Serializer; +import org.junit.BeforeClass; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TestName; + +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +@Category(SmallTests.class) +public class TestSerialization { + @Rule public TestName name = new TestName(); + private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); + private static Configuration conf; + private static final byte [] row = Bytes.toBytes("row1"); + private static final byte [] qualifier = Bytes.toBytes("qualifier1"); + private static final byte [] family = Bytes.toBytes("family1"); + private static final byte [] value = new byte[100 * 1024 * 1024]; + + @BeforeClass + public static void setUpBeforeClass() throws Exception { + conf = TEST_UTIL.getConfiguration(); + conf.setInt("hbase.client.keyvalue.maxsize", Integer.MAX_VALUE); + } + + @Test + public void testLargeMutation() + throws Exception { + Put put = new Put(row); + put.addColumn(family, qualifier, value); + + MutationSerialization serialization = new MutationSerialization(); + Serializer serializer = serialization.getSerializer(Mutation.class); + Deserializer deserializer = serialization.getDeserializer(Mutation.class); + ByteArrayOutputStream os = new ByteArrayOutputStream(); + ByteArrayInputStream is = null; + try { + serializer.open(os); + serializer.serialize(put); + os.flush(); + is = new ByteArrayInputStream(os.toByteArray()); + deserializer.open(is); + deserializer.deserialize(null); + } catch (InvalidProtocolBufferException e) { + assertTrue("Got InvalidProtocolBufferException in " + name.getMethodName(), + e.getCause() instanceof InvalidProtocolBufferException); + } catch (Exception e) { + fail("Got an invalid exception: " + e); + } + } + + @Test + public void testLargeResult() + throws Exception { + Result res = Result.create(new KeyValue[] {new KeyValue(row, family, qualifier, 0L, value)}); + + ResultSerialization serialization = new ResultSerialization(); + Serializer serializer = serialization.getSerializer(Result.class); + Deserializer deserializer = serialization.getDeserializer(Result.class); + ByteArrayOutputStream os = new ByteArrayOutputStream(); + ByteArrayInputStream is = null; + try { + serializer.open(os); + serializer.serialize(res); + os.flush(); + is = new ByteArrayInputStream(os.toByteArray()); + deserializer.open(is); + deserializer.deserialize(null); + } catch (InvalidProtocolBufferException e) { + assertTrue("Got InvalidProtocolBufferException in " + name.getMethodName(), + e.getCause() instanceof InvalidProtocolBufferException); + } catch (Exception e) { + fail("Got an invalid exception: " + e); + } + } +}