You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by so...@apache.org on 2020/06/03 19:13:41 UTC

[lucene-solr] 12/47: LUCENE-9376: Fix or suppress 20 resource leak precommit warnings in lucene/search

This is an automated email from the ASF dual-hosted git repository.

sokolov pushed a commit to branch jira/lucene-8962
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit 17592d28a1daa70fe6158d07001ee02c7c2b0780
Author: Erick Erickson <Er...@gmail.com>
AuthorDate: Thu May 21 20:29:18 2020 -0400

    LUCENE-9376: Fix or suppress 20 resource leak precommit warnings in lucene/search
---
 lucene/CHANGES.txt                                 |  3 +++
 .../org/apache/lucene/search/TestFuzzyQuery.java   |  5 ++--
 .../apache/lucene/search/TestLRUQueryCache.java    | 17 ++++++++------
 .../lucene/search/TestSameScoresWithThreads.java   |  1 +
 .../apache/lucene/search/TestSearcherManager.java  |  1 +
 .../org/apache/lucene/search/TestTermQuery.java    |  7 +++++-
 .../search/uhighlight/UnifiedHighlighter.java      |  2 ++
 .../lucene/search/highlight/HighlighterTest.java   | 27 +++++++++++-----------
 .../lucene/search/highlight/TokenSourcesTest.java  |  2 ++
 .../highlight/custom/HighlightCustomQueryTest.java | 23 +++++++++---------
 .../lucene/search/TestTermAutomatonQuery.java      |  3 ++-
 .../suggest/analyzing/TestFreeTextSuggester.java   |  1 +
 .../suggest/analyzing/TestSuggestStopFilter.java   |  9 --------
 .../search/suggest/document/TestSuggestField.java  |  2 +-
 .../lucene/search/ShardSearchingTestBase.java      |  3 +--
 15 files changed, 59 insertions(+), 47 deletions(-)

diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index ab88bcb..46c7063 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -286,6 +286,9 @@ Build
 
 * Upgrade forbiddenapis to version 3.0.  (Uwe Schindler)
 
+* LUCENE-9376: Fix or suppress 20 resource leak precommit warnings in lucene/search
+  (Andras Salamon via Erick Erickson)
+
 ======================= Lucene 8.5.1 =======================
 
 Bug Fixes
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestFuzzyQuery.java b/lucene/core/src/test/org/apache/lucene/search/TestFuzzyQuery.java
index 99376e3..b188612 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestFuzzyQuery.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestFuzzyQuery.java
@@ -411,7 +411,6 @@ public class TestFuzzyQuery extends LuceneTestCase {
   
   public void testGiga() throws Exception {
 
-    MockAnalyzer analyzer = new MockAnalyzer(random());
     Directory index = newDirectory();
     RandomIndexWriter w = new RandomIndexWriter(random(), index);
 
@@ -443,6 +442,7 @@ public class TestFuzzyQuery extends LuceneTestCase {
     assertEquals(1, hits.length);
     assertEquals("Giga byte", searcher.doc(hits[0].doc).get("field"));
     r.close();
+    w.close();
     index.close();
   }
   
@@ -561,6 +561,7 @@ public class TestFuzzyQuery extends LuceneTestCase {
       w.addDocument(doc);
     }
     DirectoryReader r = w.getReader();
+    w.close();
     //System.out.println("TEST: reader=" + r);
     IndexSearcher s = newSearcher(r);
     int iters = atLeast(200);
@@ -638,7 +639,7 @@ public class TestFuzzyQuery extends LuceneTestCase {
       }
     }
     
-    IOUtils.close(r, w, dir);
+    IOUtils.close(r, dir);
   }
 
   private static class TermAndScore implements Comparable<TermAndScore> {
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestLRUQueryCache.java b/lucene/core/src/test/org/apache/lucene/search/TestLRUQueryCache.java
index 7993beb..ef02375 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestLRUQueryCache.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestLRUQueryCache.java
@@ -181,14 +181,17 @@ public class TestLRUQueryCache extends LuceneTestCase {
       thread.join();
     }
 
-    if (error.get() != null) {
-      throw error.get();
+    try {
+      if (error.get() != null) {
+        throw error.get();
+      }
+      queryCache.assertConsistent();
+    } finally {
+      mgr.close();
+      w.close();
+      dir.close();
+      queryCache.assertConsistent();
     }
-    queryCache.assertConsistent();
-    mgr.close();
-    w.close();
-    dir.close();
-    queryCache.assertConsistent();
   }
 
   public void testLRUEviction() throws Exception {
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestSameScoresWithThreads.java b/lucene/core/src/test/org/apache/lucene/search/TestSameScoresWithThreads.java
index a615a6a..4b284dfd 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestSameScoresWithThreads.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestSameScoresWithThreads.java
@@ -119,6 +119,7 @@ public class TestSameScoresWithThreads extends LuceneTestCase {
         thread.join();
       }
     }
+    docs.close();
     r.close();
     dir.close();
   }
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestSearcherManager.java b/lucene/core/src/test/org/apache/lucene/search/TestSearcherManager.java
index b923866..1d8edcc 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestSearcherManager.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestSearcherManager.java
@@ -310,6 +310,7 @@ public class TestSearcherManager extends ThreadedIndexingAndSearchingTestCase {
     Directory dir = newDirectory();
     IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
         new MockAnalyzer(random())).setMergeScheduler(new ConcurrentMergeScheduler()));
+    @SuppressWarnings("resource")
     SearcherManager sm = new SearcherManager(writer, false, false, new SearcherFactory());
     writer.addDocument(new Document());
     writer.commit();
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestTermQuery.java b/lucene/core/src/test/org/apache/lucene/search/TestTermQuery.java
index e460e26..65986d9 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestTermQuery.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestTermQuery.java
@@ -21,6 +21,7 @@ import java.io.IOException;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.Field.Store;
 import org.apache.lucene.document.StringField;
+import org.apache.lucene.index.CompositeReaderContext;
 import org.apache.lucene.index.DirectoryReader;
 import org.apache.lucene.index.FilterDirectoryReader;
 import org.apache.lucene.index.FilterLeafReader;
@@ -47,9 +48,13 @@ public class TestTermQuery extends LuceneTestCase {
     QueryUtils.checkUnequal(
         new TermQuery(new Term("foo", "bar")),
         new TermQuery(new Term("foo", "baz")));
+    final CompositeReaderContext context;
+    try (MultiReader multiReader = new MultiReader()) {
+      context = multiReader.getContext();
+    }
     QueryUtils.checkEqual(
         new TermQuery(new Term("foo", "bar")),
-        new TermQuery(new Term("foo", "bar"), TermStates.build(new MultiReader().getContext(), new Term("foo", "bar"), true)));
+        new TermQuery(new Term("foo", "bar"), TermStates.build(context, new Term("foo", "bar"), true)));
   }
 
   public void testCreateWeightDoesNotSeekIfScoresAreNotNeeded() throws IOException {
diff --git a/lucene/highlighter/src/java/org/apache/lucene/search/uhighlight/UnifiedHighlighter.java b/lucene/highlighter/src/java/org/apache/lucene/search/uhighlight/UnifiedHighlighter.java
index 74de248..5d0dc94 100644
--- a/lucene/highlighter/src/java/org/apache/lucene/search/uhighlight/UnifiedHighlighter.java
+++ b/lucene/highlighter/src/java/org/apache/lucene/search/uhighlight/UnifiedHighlighter.java
@@ -61,6 +61,7 @@ import org.apache.lucene.search.TopDocs;
 import org.apache.lucene.search.Weight;
 import org.apache.lucene.search.spans.SpanQuery;
 import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.IOUtils;
 import org.apache.lucene.util.InPlaceMergeSorter;
 
 /**
@@ -643,6 +644,7 @@ public class UnifiedHighlighter {
 
       batchDocIdx += fieldValsByDoc.size();
     }
+    IOUtils.close(indexReaderWithTermVecCache);
     assert docIdIter.docID() == DocIdSetIterator.NO_MORE_DOCS
         || docIdIter.nextDoc() == DocIdSetIterator.NO_MORE_DOCS;
 
diff --git a/lucene/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterTest.java b/lucene/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterTest.java
index 0c3a0f6..2e70317 100644
--- a/lucene/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterTest.java
+++ b/lucene/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterTest.java
@@ -1362,24 +1362,25 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
       public void run() throws Exception {
         HashMap<String,String> synonyms = new HashMap<>();
         synonyms.put("football", "soccer,footie");
-        Analyzer analyzer = new SynonymAnalyzer(synonyms);
+        try (Analyzer analyzer = new SynonymAnalyzer(synonyms)) {
 
-        String s = "football-soccer in the euro 2004 footie competition";
+          String s = "football-soccer in the euro 2004 footie competition";
 
-        BooleanQuery.Builder query = new BooleanQuery.Builder();
-        query.add(new TermQuery(new Term("bookid", "football")), Occur.SHOULD);
-        query.add(new TermQuery(new Term("bookid", "soccer")), Occur.SHOULD);
-        query.add(new TermQuery(new Term("bookid", "footie")), Occur.SHOULD);
+          BooleanQuery.Builder query = new BooleanQuery.Builder();
+          query.add(new TermQuery(new Term("bookid", "football")), Occur.SHOULD);
+          query.add(new TermQuery(new Term("bookid", "soccer")), Occur.SHOULD);
+          query.add(new TermQuery(new Term("bookid", "footie")), Occur.SHOULD);
 
-        Highlighter highlighter = getHighlighter(query.build(), null, HighlighterTest.this);
+          Highlighter highlighter = getHighlighter(query.build(), null, HighlighterTest.this);
 
-        // Get 3 best fragments and separate with a "..."
-        TokenStream tokenStream = analyzer.tokenStream(null, s);
+          // Get 3 best fragments and separate with a "..."
+          TokenStream tokenStream = analyzer.tokenStream(null, s);
 
-        String result = highlighter.getBestFragments(tokenStream, s, 3, "...");
-        String expectedResult = "<B>football</B>-<B>soccer</B> in the euro 2004 <B>footie</B> competition";
-        assertTrue("overlapping analyzer should handle highlights OK, expected:" + expectedResult
-            + " actual:" + result, expectedResult.equals(result));
+          String result = highlighter.getBestFragments(tokenStream, s, 3, "...");
+          String expectedResult = "<B>football</B>-<B>soccer</B> in the euro 2004 <B>footie</B> competition";
+          assertTrue("overlapping analyzer should handle highlights OK, expected:" + expectedResult
+              + " actual:" + result, expectedResult.equals(result));
+        }
       }
 
     };
diff --git a/lucene/highlighter/src/test/org/apache/lucene/search/highlight/TokenSourcesTest.java b/lucene/highlighter/src/test/org/apache/lucene/search/highlight/TokenSourcesTest.java
index 825133c..30cf711 100644
--- a/lucene/highlighter/src/test/org/apache/lucene/search/highlight/TokenSourcesTest.java
+++ b/lucene/highlighter/src/test/org/apache/lucene/search/highlight/TokenSourcesTest.java
@@ -391,6 +391,7 @@ public class TokenSourcesTest extends BaseTokenStreamTestCase {
       if (startOffsets[i] == startOffsets[i-1]) {
         if (VERBOSE)
           System.out.println("Skipping test because can't easily validate random token-stream is correct.");
+        rTokenStream.close();
         return;
       }
     }
@@ -438,6 +439,7 @@ public class TokenSourcesTest extends BaseTokenStreamTestCase {
 
     reader.close();
     dir.close();
+    rTokenStream.close();
   }
 
   public void testMaxStartOffsetConsistency() throws IOException {
diff --git a/lucene/highlighter/src/test/org/apache/lucene/search/highlight/custom/HighlightCustomQueryTest.java b/lucene/highlighter/src/test/org/apache/lucene/search/highlight/custom/HighlightCustomQueryTest.java
index b8ce3dd..115a51a9 100644
--- a/lucene/highlighter/src/test/org/apache/lucene/search/highlight/custom/HighlightCustomQueryTest.java
+++ b/lucene/highlighter/src/test/org/apache/lucene/search/highlight/custom/HighlightCustomQueryTest.java
@@ -105,17 +105,18 @@ public class HighlightCustomQueryTest extends LuceneTestCase {
    */
   private String highlightField(Query query, String fieldName,
       String text) throws IOException, InvalidTokenOffsetsException {
-    TokenStream tokenStream = new MockAnalyzer(random(), MockTokenizer.SIMPLE,
-        true, MockTokenFilter.ENGLISH_STOPSET).tokenStream(fieldName, text);
-    // Assuming "<B>", "</B>" used to highlight
-    SimpleHTMLFormatter formatter = new SimpleHTMLFormatter();
-    MyQueryScorer scorer = new MyQueryScorer(query, fieldName, FIELD_NAME);
-    Highlighter highlighter = new Highlighter(formatter, scorer);
-    highlighter.setTextFragmenter(new SimpleFragmenter(Integer.MAX_VALUE));
-
-    String rv = highlighter.getBestFragments(tokenStream, text, 1,
-        "(FIELD TEXT TRUNCATED)");
-    return rv.length() == 0 ? text : rv;
+    try (MockAnalyzer mockAnalyzer = new MockAnalyzer(random(), MockTokenizer.SIMPLE,true,
+        MockTokenFilter.ENGLISH_STOPSET); TokenStream tokenStream = mockAnalyzer.tokenStream(fieldName, text)) {
+      // Assuming "<B>", "</B>" used to highlight
+      SimpleHTMLFormatter formatter = new SimpleHTMLFormatter();
+      MyQueryScorer scorer = new MyQueryScorer(query, fieldName, FIELD_NAME);
+      Highlighter highlighter = new Highlighter(formatter, scorer);
+      highlighter.setTextFragmenter(new SimpleFragmenter(Integer.MAX_VALUE));
+
+      String rv = highlighter.getBestFragments(tokenStream, text, 1,
+          "(FIELD TEXT TRUNCATED)");
+      return rv.length() == 0 ? text : rv;
+    }
   }
 
   public static class MyWeightedSpanTermExtractor extends
diff --git a/lucene/sandbox/src/test/org/apache/lucene/search/TestTermAutomatonQuery.java b/lucene/sandbox/src/test/org/apache/lucene/search/TestTermAutomatonQuery.java
index 64fe4c7..a95f095 100644
--- a/lucene/sandbox/src/test/org/apache/lucene/search/TestTermAutomatonQuery.java
+++ b/lucene/sandbox/src/test/org/apache/lucene/search/TestTermAutomatonQuery.java
@@ -444,6 +444,7 @@ public class TestTermAutomatonQuery extends LuceneTestCase {
     }
 
     IndexReader r = w.getReader();
+    w.close();
     IndexSearcher s = newSearcher(r);
 
     // Used to match ANY using MultiPhraseQuery:
@@ -561,7 +562,7 @@ public class TestTermAutomatonQuery extends LuceneTestCase {
       }
     }
 
-    IOUtils.close(w, r, dir, analyzer);
+    IOUtils.close(r, dir, analyzer);
   }
 
   private Set<String> toDocIDs(IndexSearcher s, TopDocs hits) throws IOException {
diff --git a/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/TestFreeTextSuggester.java b/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/TestFreeTextSuggester.java
index 3e89275..530a4c3 100644
--- a/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/TestFreeTextSuggester.java
+++ b/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/TestFreeTextSuggester.java
@@ -192,6 +192,7 @@ public class TestFreeTextSuggester extends LuceneTestCase {
       }
     }
     analyzer.close();
+    lfd.close();
   }
 
   // Make sure you can suggest based only on unigram model:
diff --git a/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/TestSuggestStopFilter.java b/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/TestSuggestStopFilter.java
index 5ed84e0..4dbccde 100644
--- a/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/TestSuggestStopFilter.java
+++ b/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/TestSuggestStopFilter.java
@@ -50,7 +50,6 @@ public class TestSuggestStopFilter extends BaseTokenStreamTestCase {
     Tokenizer stream = new MockTokenizer();
     stream.setReader(new StringReader("go to "));
     TokenStream filter = new SuggestStopFilter(stream, stopWords);
-    filter = new SuggestStopFilter(stream, stopWords);
     assertTokenStreamContents(filter,
                               new String[] {"go"},
                               new int[] {0},
@@ -69,8 +68,6 @@ public class TestSuggestStopFilter extends BaseTokenStreamTestCase {
     Tokenizer stream = new MockTokenizer();
     stream.setReader(new StringReader("go to school"));
     TokenStream filter = new SuggestStopFilter(stream, stopWords);
-
-    filter = new SuggestStopFilter(stream, stopWords);
     assertTokenStreamContents(filter,
                               new String[] {"go", "school"},
                               new int[] {0, 6},
@@ -89,8 +86,6 @@ public class TestSuggestStopFilter extends BaseTokenStreamTestCase {
     Tokenizer stream = new MockTokenizer();
     stream.setReader(new StringReader("go to a the school"));
     TokenStream filter = new SuggestStopFilter(stream, stopWords);
-
-    filter = new SuggestStopFilter(stream, stopWords);
     assertTokenStreamContents(filter,
                               new String[] { "go", "school" },
                               new int[] {0, 12},
@@ -109,8 +104,6 @@ public class TestSuggestStopFilter extends BaseTokenStreamTestCase {
     Tokenizer stream = new MockTokenizer();
     stream.setReader(new StringReader("go to a the"));
     TokenStream filter = new SuggestStopFilter(stream, stopWords);
-
-    filter = new SuggestStopFilter(stream, stopWords);
     assertTokenStreamContents(filter,
                               new String[] { "go", "the"},
                               new int[] {0, 8},
@@ -129,8 +122,6 @@ public class TestSuggestStopFilter extends BaseTokenStreamTestCase {
     Tokenizer stream = new MockTokenizer();
     stream.setReader(new StringReader("go to a the "));
     TokenStream filter = new SuggestStopFilter(stream, stopWords);
-
-    filter = new SuggestStopFilter(stream, stopWords);
     assertTokenStreamContents(filter,
                               new String[] { "go"},
                               new int[] {0},
diff --git a/lucene/suggest/src/test/org/apache/lucene/search/suggest/document/TestSuggestField.java b/lucene/suggest/src/test/org/apache/lucene/search/suggest/document/TestSuggestField.java
index 1dbadc1..9d80476 100644
--- a/lucene/suggest/src/test/org/apache/lucene/search/suggest/document/TestSuggestField.java
+++ b/lucene/suggest/src/test/org/apache/lucene/search/suggest/document/TestSuggestField.java
@@ -761,7 +761,7 @@ public class TestSuggestField extends LuceneTestCase {
       }
       assertTrue("at least one of the entries should have the score", matched);
     }
-
+    lineFileDocs.close();
     reader.close();
     iw.close();
   }
diff --git a/lucene/test-framework/src/java/org/apache/lucene/search/ShardSearchingTestBase.java b/lucene/test-framework/src/java/org/apache/lucene/search/ShardSearchingTestBase.java
index 4f01cf7..a8f1b7d 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/search/ShardSearchingTestBase.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/search/ShardSearchingTestBase.java
@@ -550,8 +550,7 @@ public abstract class ShardSearchingTestBase extends LuceneTestCase {
   private final class ChangeIndices extends Thread {
     @Override
     public void run() {
-      try {
-        final LineFileDocs docs = new LineFileDocs(random());
+      try (final LineFileDocs docs = new LineFileDocs(random())) {
         int numDocs = 0;
         while (System.nanoTime() < endTimeNanos) {
           final int what = random().nextInt(3);