Return-Path: X-Original-To: archive-asf-public@eu.ponee.io Delivered-To: archive-asf-public@eu.ponee.io Received: from cust-asf.ponee.io (cust-asf.ponee.io [163.172.22.183]) by mx-eu-01.ponee.io (Postfix) with ESMTP id 267B7180768 for ; Tue, 2 Jan 2018 03:01:02 +0100 (CET) Received: by cust-asf.ponee.io (Postfix) id 1678F160C39; Tue, 2 Jan 2018 02:01:02 +0000 (UTC) Delivered-To: archive-asf-public@cust-asf.ponee.io Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by cust-asf.ponee.io (Postfix) with SMTP id 028B7160C44 for ; Tue, 2 Jan 2018 03:01:00 +0100 (CET) Received: (qmail 76805 invoked by uid 500); 2 Jan 2018 02:00:59 -0000 Mailing-List: contact commits-help@hbase.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: dev@hbase.apache.org Delivered-To: mailing list commits@hbase.apache.org Received: (qmail 74321 invoked by uid 99); 2 Jan 2018 02:00:57 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Tue, 02 Jan 2018 02:00:57 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id 35177F170C; Tue, 2 Jan 2018 02:00:56 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: zhangduo@apache.org To: commits@hbase.apache.org Date: Tue, 02 Jan 2018 02:01:09 -0000 Message-Id: <3cb088aa5a834848a3298b3f78bbdc2e@git.apache.org> In-Reply-To: <1300d1299d4e455fa7679d38c2a87e89@git.apache.org> References: <1300d1299d4e455fa7679d38c2a87e89@git.apache.org> X-Mailer: ASF-Git Admin Mailer Subject: [15/51] [abbrv] hbase git commit: Revert "HBASE-19651 Remove LimitInputStream" archived-at: Tue, 02 Jan 2018 02:01:03 -0000 Revert "HBASE-19651 Remove LimitInputStream" This reverts commit 28eaf715e890d0491efce9dbdc126946deb25566. Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2dae9d16 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2dae9d16 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2dae9d16 Branch: refs/heads/HBASE-19397 Commit: 2dae9d16e2fb4558c92e64e8c091fcfd086c48b9 Parents: b3f353c Author: Michael Stack Authored: Thu Dec 28 14:14:54 2017 -0800 Committer: Michael Stack Committed: Thu Dec 28 14:31:44 2017 -0800 ---------------------------------------------------------------------- .../hbase/shaded/protobuf/ProtobufUtil.java | 4 +- .../hadoop/hbase/io/LimitInputStream.java | 105 +++++++++++++++++++ .../regionserver/wal/ProtobufLogReader.java | 10 +- 3 files changed, 112 insertions(+), 7 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hbase/blob/2dae9d16/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java ---------------------------------------------------------------------- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java index cc30e53..7a5efb1 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java @@ -38,7 +38,6 @@ import java.util.function.Function; import java.util.regex.Pattern; import java.util.stream.Collectors; -import org.apache.commons.io.input.BoundedInputStream; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.ByteBufferExtendedCell; @@ -88,6 +87,7 @@ import org.apache.hadoop.hbase.client.security.SecurityCapability; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.filter.ByteArrayComparable; import org.apache.hadoop.hbase.filter.Filter; +import org.apache.hadoop.hbase.io.LimitInputStream; import org.apache.hadoop.hbase.io.TimeRange; import org.apache.hadoop.hbase.protobuf.ProtobufMagic; import org.apache.hadoop.hbase.protobuf.ProtobufMessageConverter; @@ -2626,7 +2626,7 @@ public final class ProtobufUtil { final int firstByte = in.read(); if (firstByte != -1) { final int size = CodedInputStream.readRawVarint32(firstByte, in); - final InputStream limitedInput = new BoundedInputStream(in, size); + final InputStream limitedInput = new LimitInputStream(in, size); final CodedInputStream codedInput = CodedInputStream.newInstance(limitedInput); codedInput.setSizeLimit(size); builder.mergeFrom(codedInput); http://git-wip-us.apache.org/repos/asf/hbase/blob/2dae9d16/hbase-common/src/main/java/org/apache/hadoop/hbase/io/LimitInputStream.java ---------------------------------------------------------------------- diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/LimitInputStream.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/LimitInputStream.java new file mode 100644 index 0000000..6eb710a --- /dev/null +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/LimitInputStream.java @@ -0,0 +1,105 @@ +/* + * Copyright (C) 2007 The Guava Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.apache.hadoop.hbase.io; + +import static org.apache.hbase.thirdparty.com.google.common.base.Preconditions.checkArgument; +import static org.apache.hbase.thirdparty.com.google.common.base.Preconditions.checkNotNull; + +import java.io.FilterInputStream; +import java.io.IOException; +import java.io.InputStream; + +import org.apache.yetus.audience.InterfaceAudience; + +/** + * Copied from guava source code v15 (LimitedInputStream) + * Guava deprecated LimitInputStream in v14 and removed it in v15. Copying this class here + * allows to be compatible with guava 11 to 15+. + */ +@InterfaceAudience.Private +public final class LimitInputStream extends FilterInputStream { + private long left; + private long mark = -1; + + public LimitInputStream(InputStream in, long limit) { + super(in); + checkNotNull(in); + checkArgument(limit >= 0, "limit must be non-negative"); + left = limit; + } + + @Override + public int available() throws IOException { + return (int) Math.min(in.available(), left); + } + + // it's okay to mark even if mark isn't supported, as reset won't work + @Override + public synchronized void mark(int readLimit) { + in.mark(readLimit); + mark = left; + } + + @Override + public int read() throws IOException { + if (left == 0) { + return -1; + } + + int result = in.read(); + if (result != -1) { + --left; + } + return result; + } + + @Override + public int read(byte[] b, int off, int len) throws IOException { + if (left == 0) { + return -1; + } + + len = (int) Math.min(len, left); + int result = in.read(b, off, len); + if (result != -1) { + left -= result; + } + return result; + } + + @Override + public synchronized void reset() throws IOException { + if (!in.markSupported()) { + throw new IOException("Mark not supported"); + } + if (mark == -1) { + throw new IOException("Mark not set"); + } + + in.reset(); + left = mark; + } + + @Override + public long skip(long n) throws IOException { + n = Math.min(n, left); + long skipped = in.skip(n); + left -= skipped; + return skipped; + } +} http://git-wip-us.apache.org/repos/asf/hbase/blob/2dae9d16/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java index 7babb55..cba2c61 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java @@ -26,12 +26,15 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; -import org.apache.commons.io.input.BoundedInputStream; +import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.hbase.codec.Codec; +import org.apache.hadoop.hbase.io.LimitInputStream; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos; @@ -40,9 +43,6 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey; import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.wal.WAL.Entry; -import org.apache.yetus.audience.InterfaceAudience; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream; import org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException; @@ -350,7 +350,7 @@ public class ProtobufLogReader extends ReaderBase { "inputStream.available()= " + this.inputStream.available() + ", " + "entry size= " + size + " at offset = " + this.inputStream.getPos()); } - ProtobufUtil.mergeFrom(builder, new BoundedInputStream(this.inputStream, size), + ProtobufUtil.mergeFrom(builder, new LimitInputStream(this.inputStream, size), (int)size); } catch (InvalidProtocolBufferException ipbe) { throw (EOFException) new EOFException("Invalid PB, EOF? Ignoring; originalPosition=" +