You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by zh...@apache.org on 2018/01/02 02:01:09 UTC
[15/51] [abbrv] hbase git commit: Revert "HBASE-19651 Remove
LimitInputStream"
Revert "HBASE-19651 Remove LimitInputStream"
This reverts commit 28eaf715e890d0491efce9dbdc126946deb25566.
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2dae9d16
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2dae9d16
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2dae9d16
Branch: refs/heads/HBASE-19397
Commit: 2dae9d16e2fb4558c92e64e8c091fcfd086c48b9
Parents: b3f353c
Author: Michael Stack <st...@apache.org>
Authored: Thu Dec 28 14:14:54 2017 -0800
Committer: Michael Stack <st...@apache.org>
Committed: Thu Dec 28 14:31:44 2017 -0800
----------------------------------------------------------------------
.../hbase/shaded/protobuf/ProtobufUtil.java | 4 +-
.../hadoop/hbase/io/LimitInputStream.java | 105 +++++++++++++++++++
.../regionserver/wal/ProtobufLogReader.java | 10 +-
3 files changed, 112 insertions(+), 7 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hbase/blob/2dae9d16/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java
index cc30e53..7a5efb1 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java
@@ -38,7 +38,6 @@ import java.util.function.Function;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
-import org.apache.commons.io.input.BoundedInputStream;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.ByteBufferExtendedCell;
@@ -88,6 +87,7 @@ import org.apache.hadoop.hbase.client.security.SecurityCapability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.filter.ByteArrayComparable;
import org.apache.hadoop.hbase.filter.Filter;
+import org.apache.hadoop.hbase.io.LimitInputStream;
import org.apache.hadoop.hbase.io.TimeRange;
import org.apache.hadoop.hbase.protobuf.ProtobufMagic;
import org.apache.hadoop.hbase.protobuf.ProtobufMessageConverter;
@@ -2626,7 +2626,7 @@ public final class ProtobufUtil {
final int firstByte = in.read();
if (firstByte != -1) {
final int size = CodedInputStream.readRawVarint32(firstByte, in);
- final InputStream limitedInput = new BoundedInputStream(in, size);
+ final InputStream limitedInput = new LimitInputStream(in, size);
final CodedInputStream codedInput = CodedInputStream.newInstance(limitedInput);
codedInput.setSizeLimit(size);
builder.mergeFrom(codedInput);
http://git-wip-us.apache.org/repos/asf/hbase/blob/2dae9d16/hbase-common/src/main/java/org/apache/hadoop/hbase/io/LimitInputStream.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/LimitInputStream.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/LimitInputStream.java
new file mode 100644
index 0000000..6eb710a
--- /dev/null
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/LimitInputStream.java
@@ -0,0 +1,105 @@
+/*
+ * Copyright (C) 2007 The Guava Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.hadoop.hbase.io;
+
+import static org.apache.hbase.thirdparty.com.google.common.base.Preconditions.checkArgument;
+import static org.apache.hbase.thirdparty.com.google.common.base.Preconditions.checkNotNull;
+
+import java.io.FilterInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+
+import org.apache.yetus.audience.InterfaceAudience;
+
+/**
+ * Copied from guava source code v15 (LimitedInputStream)
+ * Guava deprecated LimitInputStream in v14 and removed it in v15. Copying this class here
+ * allows to be compatible with guava 11 to 15+.
+ */
+@InterfaceAudience.Private
+public final class LimitInputStream extends FilterInputStream {
+ private long left;
+ private long mark = -1;
+
+ public LimitInputStream(InputStream in, long limit) {
+ super(in);
+ checkNotNull(in);
+ checkArgument(limit >= 0, "limit must be non-negative");
+ left = limit;
+ }
+
+ @Override
+ public int available() throws IOException {
+ return (int) Math.min(in.available(), left);
+ }
+
+ // it's okay to mark even if mark isn't supported, as reset won't work
+ @Override
+ public synchronized void mark(int readLimit) {
+ in.mark(readLimit);
+ mark = left;
+ }
+
+ @Override
+ public int read() throws IOException {
+ if (left == 0) {
+ return -1;
+ }
+
+ int result = in.read();
+ if (result != -1) {
+ --left;
+ }
+ return result;
+ }
+
+ @Override
+ public int read(byte[] b, int off, int len) throws IOException {
+ if (left == 0) {
+ return -1;
+ }
+
+ len = (int) Math.min(len, left);
+ int result = in.read(b, off, len);
+ if (result != -1) {
+ left -= result;
+ }
+ return result;
+ }
+
+ @Override
+ public synchronized void reset() throws IOException {
+ if (!in.markSupported()) {
+ throw new IOException("Mark not supported");
+ }
+ if (mark == -1) {
+ throw new IOException("Mark not set");
+ }
+
+ in.reset();
+ left = mark;
+ }
+
+ @Override
+ public long skip(long n) throws IOException {
+ n = Math.min(n, left);
+ long skipped = in.skip(n);
+ left -= skipped;
+ return skipped;
+ }
+}
http://git-wip-us.apache.org/repos/asf/hbase/blob/2dae9d16/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java
index 7babb55..cba2c61 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java
@@ -26,12 +26,15 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
-import org.apache.commons.io.input.BoundedInputStream;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.hbase.codec.Codec;
+import org.apache.hadoop.hbase.io.LimitInputStream;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos;
@@ -40,9 +43,6 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.wal.WAL.Entry;
-import org.apache.yetus.audience.InterfaceAudience;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream;
import org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException;
@@ -350,7 +350,7 @@ public class ProtobufLogReader extends ReaderBase {
"inputStream.available()= " + this.inputStream.available() + ", " +
"entry size= " + size + " at offset = " + this.inputStream.getPos());
}
- ProtobufUtil.mergeFrom(builder, new BoundedInputStream(this.inputStream, size),
+ ProtobufUtil.mergeFrom(builder, new LimitInputStream(this.inputStream, size),
(int)size);
} catch (InvalidProtocolBufferException ipbe) {
throw (EOFException) new EOFException("Invalid PB, EOF? Ignoring; originalPosition=" +