You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by sy...@apache.org on 2016/02/13 21:10:31 UTC
[20/22] hbase git commit: HBASE-9393 Hbase does not closing a closed
socket resulting in many CLOSE_WAIT
HBASE-9393 Hbase does not closing a closed socket resulting in many CLOSE_WAIT
Signed-off-by: stack <st...@apache.org>
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/c8d13318
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/c8d13318
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/c8d13318
Branch: refs/heads/hbase-12439
Commit: c8d133186b85a4e7298bab3376ad96899860b1c7
Parents: 454e45f
Author: Ashish Singhi <as...@huawei.com>
Authored: Thu Jan 28 14:23:26 2016 +0530
Committer: stack <st...@apache.org>
Committed: Fri Feb 12 13:41:09 2016 -0800
----------------------------------------------------------------------
.../org/apache/hadoop/hbase/io/hfile/HFile.java | 34 ++++++++++++++++----
.../hadoop/hbase/io/hfile/HFileBlock.java | 20 +++++++++++-
.../hadoop/hbase/io/hfile/HFileReaderImpl.java | 9 ++++++
3 files changed, 56 insertions(+), 7 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hbase/blob/c8d13318/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java
index 1e1835f..03d681e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java
@@ -41,6 +41,7 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.CanUnbuffer;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
@@ -474,6 +475,11 @@ public class HFile {
@VisibleForTesting
boolean prefetchComplete();
+
+ /**
+ * To close only the stream's socket. HBASE-9393
+ */
+ void unbufferStream();
}
/**
@@ -490,8 +496,8 @@ public class HFile {
*/
@edu.umd.cs.findbugs.annotations.SuppressWarnings(value="SF_SWITCH_FALLTHROUGH",
justification="Intentional")
- private static Reader pickReaderVersion(Path path, FSDataInputStreamWrapper fsdis,
- long size, CacheConfig cacheConf, HFileSystem hfs, Configuration conf) throws IOException {
+ private static Reader openReader(Path path, FSDataInputStreamWrapper fsdis, long size,
+ CacheConfig cacheConf, HFileSystem hfs, Configuration conf) throws IOException {
FixedFileTrailer trailer = null;
try {
boolean isHBaseChecksum = fsdis.shouldUseHBaseChecksum();
@@ -513,6 +519,22 @@ public class HFile {
LOG.warn("Error closing fsdis FSDataInputStreamWrapper", t2);
}
throw new CorruptHFileException("Problem reading HFile Trailer from file " + path, t);
+ } finally {
+ unbufferStream(fsdis);
+ }
+ }
+
+ static void unbufferStream(FSDataInputStreamWrapper fsdis) {
+ boolean useHBaseChecksum = fsdis.shouldUseHBaseChecksum();
+ final FSDataInputStream stream = fsdis.getStream(useHBaseChecksum);
+ if (stream != null && stream.getWrappedStream() instanceof CanUnbuffer) {
+ // Enclosing unbuffer() in try-catch just to be on defensive side.
+ try {
+ stream.unbuffer();
+ } catch (Throwable e) {
+ LOG.error("Failed to unbuffer the stream so possibly there may be a TCP socket connection "
+ + "left open in CLOSE_WAIT state.", e);
+ }
}
}
@@ -541,7 +563,7 @@ public class HFile {
} else {
hfs = (HFileSystem)fs;
}
- return pickReaderVersion(path, fsdis, size, cacheConf, hfs, conf);
+ return openReader(path, fsdis, size, cacheConf, hfs, conf);
}
/**
@@ -556,8 +578,8 @@ public class HFile {
FileSystem fs, Path path, CacheConfig cacheConf, Configuration conf) throws IOException {
Preconditions.checkNotNull(cacheConf, "Cannot create Reader with null CacheConf");
FSDataInputStreamWrapper stream = new FSDataInputStreamWrapper(fs, path);
- return pickReaderVersion(path, stream, fs.getFileStatus(path).getLen(),
- cacheConf, stream.getHfs(), conf);
+ return openReader(path, stream, fs.getFileStatus(path).getLen(), cacheConf, stream.getHfs(),
+ conf);
}
/**
@@ -567,7 +589,7 @@ public class HFile {
FSDataInputStream fsdis, long size, CacheConfig cacheConf, Configuration conf)
throws IOException {
FSDataInputStreamWrapper wrapper = new FSDataInputStreamWrapper(fsdis);
- return pickReaderVersion(path, wrapper, size, cacheConf, null, conf);
+ return openReader(path, wrapper, size, cacheConf, null, conf);
}
/**
http://git-wip-us.apache.org/repos/asf/hbase/blob/c8d13318/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java
index e7a1e5e..79b3e1c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java
@@ -33,10 +33,10 @@ import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.fs.HFileSystem;
-import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
import org.apache.hadoop.hbase.io.ByteArrayOutputStream;
import org.apache.hadoop.hbase.io.ByteBuffInputStream;
import org.apache.hadoop.hbase.io.ByteBufferSupportDataOutputStream;
+import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.io.encoding.HFileBlockDecodingContext;
import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultDecodingContext;
@@ -1311,6 +1311,11 @@ public class HFileBlock implements Cacheable {
void setIncludesMemstoreTS(boolean includesMemstoreTS);
void setDataBlockEncoder(HFileDataBlockEncoder encoder);
+
+ /**
+ * To close only the stream's socket. HBASE-9393
+ */
+ void unbufferStream();
}
/**
@@ -1758,6 +1763,19 @@ public class HFileBlock implements Cacheable {
public String toString() {
return "hfs=" + hfs + ", path=" + pathName + ", fileContext=" + fileContext;
}
+
+ @Override
+ public void unbufferStream() {
+ // To handle concurrent reads, ensure that no other client is accessing the streams while we
+ // unbuffer it.
+ if (streamLock.tryLock()) {
+ try {
+ HFile.unbufferStream(this.streamWrapper);
+ } finally {
+ streamLock.unlock();
+ }
+ }
+ }
}
@Override
http://git-wip-us.apache.org/repos/asf/hbase/blob/c8d13318/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java
index b2f5ded..f676e60 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java
@@ -575,6 +575,10 @@ public class HFileReaderImpl implements HFile.Reader, Configurable {
@Override
public void close() {
+ if (!pread) {
+ // For seek + pread stream socket should be closed when the scanner is closed. HBASE-9393
+ reader.unbufferStream();
+ }
this.returnBlocks(true);
}
@@ -1898,4 +1902,9 @@ public class HFileReaderImpl implements HFile.Reader, Configurable {
public int getMajorVersion() {
return 3;
}
+
+ @Override
+ public void unbufferStream() {
+ fsBlockReader.unbufferStream();
+ }
}