You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@storm.apache.org by bo...@apache.org on 2017/09/19 19:59:34 UTC

[1/2] storm git commit: Fix FD leak

Repository: storm
Updated Branches:
  refs/heads/master 95c9524ac -> f7443a822


Fix FD leak


Project: http://git-wip-us.apache.org/repos/asf/storm/repo
Commit: http://git-wip-us.apache.org/repos/asf/storm/commit/1e81a907
Tree: http://git-wip-us.apache.org/repos/asf/storm/tree/1e81a907
Diff: http://git-wip-us.apache.org/repos/asf/storm/diff/1e81a907

Branch: refs/heads/master
Commit: 1e81a907fd3722601e3d1e765baf4a9360b95e99
Parents: da2f035
Author: Kyle Nusbaum <kn...@yahoo-inc.com>
Authored: Mon Sep 18 15:06:07 2017 -0500
Committer: Kyle Nusbaum <kn...@yahoo-inc.com>
Committed: Mon Sep 18 15:06:07 2017 -0500

----------------------------------------------------------------------
 .../handler/LogviewerLogSearchHandler.java      | 140 ++++++++++---------
 1 file changed, 73 insertions(+), 67 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/storm/blob/1e81a907/storm-webapp/src/main/java/org/apache/storm/daemon/logviewer/handler/LogviewerLogSearchHandler.java
----------------------------------------------------------------------
diff --git a/storm-webapp/src/main/java/org/apache/storm/daemon/logviewer/handler/LogviewerLogSearchHandler.java b/storm-webapp/src/main/java/org/apache/storm/daemon/logviewer/handler/LogviewerLogSearchHandler.java
index 76f1683..ffa1bc9 100644
--- a/storm-webapp/src/main/java/org/apache/storm/daemon/logviewer/handler/LogviewerLogSearchHandler.java
+++ b/storm-webapp/src/main/java/org/apache/storm/daemon/logviewer/handler/LogviewerLogSearchHandler.java
@@ -272,90 +272,96 @@ public class LogviewerLogSearchHandler {
             }
 
             boolean isZipFile = file.getName().endsWith(".gz");
-            FileInputStream fis = new FileInputStream(file);
-            InputStream gzippedInputStream;
-            if (isZipFile) {
-                gzippedInputStream = new GZIPInputStream(fis);
-            } else {
-                gzippedInputStream = fis;
-            }
 
-            BufferedInputStream stream = new BufferedInputStream(gzippedInputStream);
+            InputStream gzippedInputStream = null;
+            try {
+                FileInputStream fis = new FileInputStream(file);
+                if (isZipFile) {
+                    gzippedInputStream = new GZIPInputStream(fis);
+                } else {
+                    gzippedInputStream = fis;
+                }
 
-            int fileLength;
-            if (isZipFile) {
-                fileLength = (int) ServerUtils.zipFileSize(file);
-            } else {
-                fileLength = (int) file.length();
-            }
+                BufferedInputStream stream = new BufferedInputStream(gzippedInputStream);
 
-            ByteBuffer buf = ByteBuffer.allocate(GREP_BUF_SIZE);
-            final byte[] bufArray = buf.array();
-            final byte[] searchBytes = searchString.getBytes("UTF-8");
-            numMatches = numMatches != null ? numMatches : 10;
-            startByteOffset = startByteOffset != null ? startByteOffset : 0;
+                int fileLength;
+                if (isZipFile) {
+                    fileLength = (int) ServerUtils.zipFileSize(file);
+                } else {
+                    fileLength = (int) file.length();
+                }
 
-            // Start at the part of the log file we are interested in.
-            // Allow searching when start-byte-offset == file-len so it doesn't blow up on 0-length files
-            if (startByteOffset > fileLength) {
-                throw new InvalidRequestException("Cannot search past the end of the file");
-            }
+                ByteBuffer buf = ByteBuffer.allocate(GREP_BUF_SIZE);
+                final byte[] bufArray = buf.array();
+                final byte[] searchBytes = searchString.getBytes("UTF-8");
+                numMatches = numMatches != null ? numMatches : 10;
+                startByteOffset = startByteOffset != null ? startByteOffset : 0;
 
-            if (startByteOffset > 0) {
-                StreamUtil.skipBytes(stream, startByteOffset);
-            }
+                // Start at the part of the log file we are interested in.
+                // Allow searching when start-byte-offset == file-len so it doesn't blow up on 0-length files
+                if (startByteOffset > fileLength) {
+                    throw new InvalidRequestException("Cannot search past the end of the file");
+                }
 
-            Arrays.fill(bufArray, (byte) 0);
+                if (startByteOffset > 0) {
+                    StreamUtil.skipBytes(stream, startByteOffset);
+                }
 
-            int totalBytesRead = 0;
-            int bytesRead = stream.read(bufArray, 0, Math.min((int) fileLength, GREP_BUF_SIZE));
-            buf.limit(bytesRead);
-            totalBytesRead += bytesRead;
+                Arrays.fill(bufArray, (byte) 0);
 
-            List<Map<String, Object>> initialMatches = new ArrayList<>();
-            int initBufOffset = 0;
-            int byteOffset = startByteOffset;
-            byte[] beforeBytes = null;
+                int totalBytesRead = 0;
+                int bytesRead = stream.read(bufArray, 0, Math.min((int) fileLength, GREP_BUF_SIZE));
+                buf.limit(bytesRead);
+                totalBytesRead += bytesRead;
 
-            Map<String, Object> ret = new HashMap<>();
-            while (true) {
-                SubstringSearchResult searchRet = bufferSubstringSearch(isDaemon, file, fileLength, byteOffset, initBufOffset,
-                        stream, startByteOffset, totalBytesRead, buf, searchBytes, initialMatches, numMatches, beforeBytes);
+                List<Map<String, Object>> initialMatches = new ArrayList<>();
+                int initBufOffset = 0;
+                int byteOffset = startByteOffset;
+                byte[] beforeBytes = null;
 
-                List<Map<String, Object>> matches = searchRet.getMatches();
-                Integer newByteOffset = searchRet.getNewByteOffset();
-                byte[] newBeforeBytes = searchRet.getNewBeforeBytes();
+                Map<String, Object> ret = new HashMap<>();
+                while (true) {
+                    SubstringSearchResult searchRet = bufferSubstringSearch(isDaemon, file, fileLength, byteOffset, initBufOffset,
+                            stream, startByteOffset, totalBytesRead, buf, searchBytes, initialMatches, numMatches, beforeBytes);
 
-                if (matches.size() < numMatches && totalBytesRead + startByteOffset < fileLength) {
-                    // The start index is positioned to find any possible
-                    // occurrence search string that did not quite fit in the
-                    // buffer on the previous read.
-                    final int newBufOffset = Math.min(buf.limit(), GREP_MAX_SEARCH_SIZE) - searchBytes.length;
+                    List<Map<String, Object>> matches = searchRet.getMatches();
+                    Integer newByteOffset = searchRet.getNewByteOffset();
+                    byte[] newBeforeBytes = searchRet.getNewBeforeBytes();
 
-                    totalBytesRead = rotateGrepBuffer(buf, stream, totalBytesRead, file, fileLength);
-                    if (totalBytesRead < 0) {
-                        throw new InvalidRequestException("Cannot search past the end of the file");
-                    }
+                    if (matches.size() < numMatches && totalBytesRead + startByteOffset < fileLength) {
+                        // The start index is positioned to find any possible
+                        // occurrence search string that did not quite fit in the
+                        // buffer on the previous read.
+                        final int newBufOffset = Math.min(buf.limit(), GREP_MAX_SEARCH_SIZE) - searchBytes.length;
 
-                    initialMatches = matches;
-                    initBufOffset = newBufOffset;
-                    byteOffset = newByteOffset;
-                    beforeBytes = newBeforeBytes;
-                } else {
-                    ret.put("isDaemon", isDaemon ? "yes" : "no");
-                    Integer nextByteOffset = null;
-                    if (matches.size() >= numMatches || totalBytesRead < fileLength) {
-                        nextByteOffset = (Integer) last(matches).get("byteOffset") + searchBytes.length;
-                        if (fileLength <= nextByteOffset) {
-                            nextByteOffset = null;
+                        totalBytesRead = rotateGrepBuffer(buf, stream, totalBytesRead, file, fileLength);
+                        if (totalBytesRead < 0) {
+                            throw new InvalidRequestException("Cannot search past the end of the file");
+                        }
+
+                        initialMatches = matches;
+                        initBufOffset = newBufOffset;
+                        byteOffset = newByteOffset;
+                        beforeBytes = newBeforeBytes;
+                    } else {
+                        ret.put("isDaemon", isDaemon ? "yes" : "no");
+                        Integer nextByteOffset = null;
+                        if (matches.size() >= numMatches || totalBytesRead < fileLength) {
+                            nextByteOffset = (Integer) last(matches).get("byteOffset") + searchBytes.length;
+                            if (fileLength <= nextByteOffset) {
+                                nextByteOffset = null;
+                            }
                         }
+                        ret.putAll(mkGrepResponse(searchBytes, startByteOffset, matches, nextByteOffset));
+                        break;
                     }
-                    ret.putAll(mkGrepResponse(searchBytes, startByteOffset, matches, nextByteOffset));
-                    break;
+                }
+                return ret;
+            } finally {
+                if (gzippedInputStream != null) {
+                    gzippedInputStream.close();
                 }
             }
-
-            return ret;
         } catch (IOException e) {
             throw new RuntimeException(e);
         }


[2/2] storm git commit: Merge branch 'STORM-2742' of https://github.com/knusbaum/incubator-storm into STORM-2742

Posted by bo...@apache.org.
Merge branch 'STORM-2742' of https://github.com/knusbaum/incubator-storm into STORM-2742

STORM-2742: Logviewer leaking file descriptors

This closes #2330


Project: http://git-wip-us.apache.org/repos/asf/storm/repo
Commit: http://git-wip-us.apache.org/repos/asf/storm/commit/f7443a82
Tree: http://git-wip-us.apache.org/repos/asf/storm/tree/f7443a82
Diff: http://git-wip-us.apache.org/repos/asf/storm/diff/f7443a82

Branch: refs/heads/master
Commit: f7443a8224b23e648117eb56e03b4b03f80398cb
Parents: 95c9524 1e81a90
Author: Robert Evans <ev...@yahoo-inc.com>
Authored: Tue Sep 19 14:43:00 2017 -0500
Committer: Robert Evans <ev...@yahoo-inc.com>
Committed: Tue Sep 19 14:43:00 2017 -0500

----------------------------------------------------------------------
 .../handler/LogviewerLogSearchHandler.java      | 140 ++++++++++---------
 1 file changed, 73 insertions(+), 67 deletions(-)
----------------------------------------------------------------------