You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by rm...@apache.org on 2015/03/05 17:45:04 UTC
svn commit: r1664404 [3/6] - in /lucene/dev/trunk/lucene:
analysis/common/src/java/org/apache/lucene/analysis/synonym/
analysis/common/src/test/org/apache/lucene/analysis/ar/
analysis/common/src/test/org/apache/lucene/analysis/bg/
analysis/common/src/t...
Modified: lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/miscellaneous/TestPerFieldAnalyzerWrapper.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/miscellaneous/TestPerFieldAnalyzerWrapper.java?rev=1664404&r1=1664403&r2=1664404&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/miscellaneous/TestPerFieldAnalyzerWrapper.java (original)
+++ lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/miscellaneous/TestPerFieldAnalyzerWrapper.java Thu Mar 5 16:45:02 2015
@@ -15,6 +15,7 @@ import org.apache.lucene.analysis.TokenS
import org.apache.lucene.analysis.core.SimpleAnalyzer;
import org.apache.lucene.analysis.core.WhitespaceAnalyzer;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
+import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.Rethrow;
/*
@@ -40,9 +41,11 @@ public class TestPerFieldAnalyzerWrapper
Map<String,Analyzer> analyzerPerField =
Collections.<String,Analyzer>singletonMap("special", new SimpleAnalyzer());
+
+ Analyzer defaultAnalyzer = new WhitespaceAnalyzer();
PerFieldAnalyzerWrapper analyzer =
- new PerFieldAnalyzerWrapper(new WhitespaceAnalyzer(), analyzerPerField);
+ new PerFieldAnalyzerWrapper(defaultAnalyzer, analyzerPerField);
try (TokenStream tokenStream = analyzer.tokenStream("field", text)) {
CharTermAttribute termAtt = tokenStream.getAttribute(CharTermAttribute.class);
@@ -67,6 +70,10 @@ public class TestPerFieldAnalyzerWrapper
assertFalse(tokenStream.incrementToken());
tokenStream.end();
}
+ // TODO: fix this about PFAW, this is crazy
+ analyzer.close();
+ defaultAnalyzer.close();
+ IOUtils.close(analyzerPerField.values());
}
public void testReuseWrapped() throws Exception {
@@ -124,6 +131,7 @@ public class TestPerFieldAnalyzerWrapper
ts4 = wrapper3.tokenStream("moreSpecial", text);
assertSame(ts3, ts4);
assertSame(ts2, ts3);
+ IOUtils.close(wrapper3, wrapper2, wrapper1, specialAnalyzer, defaultAnalyzer);
}
public void testCharFilters() throws Exception {
@@ -152,5 +160,7 @@ public class TestPerFieldAnalyzerWrapper
new int[] { 0 },
new int[] { 2 }
);
+ p.close();
+ a.close(); // TODO: fix this about PFAW, its a trap
}
}
Modified: lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/miscellaneous/TestRemoveDuplicatesTokenFilter.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/miscellaneous/TestRemoveDuplicatesTokenFilter.java?rev=1664404&r1=1664403&r2=1664404&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/miscellaneous/TestRemoveDuplicatesTokenFilter.java (original)
+++ lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/miscellaneous/TestRemoveDuplicatesTokenFilter.java Thu Mar 5 16:45:02 2015
@@ -163,6 +163,7 @@ public class TestRemoveDuplicatesTokenFi
};
checkRandomData(random(), analyzer, 200);
+ analyzer.close();
}
}
@@ -175,6 +176,7 @@ public class TestRemoveDuplicatesTokenFi
}
};
checkOneTerm(a, "", "");
+ a.close();
}
}
Modified: lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/miscellaneous/TestScandinavianFoldingFilter.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/miscellaneous/TestScandinavianFoldingFilter.java?rev=1664404&r1=1664403&r2=1664404&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/miscellaneous/TestScandinavianFoldingFilter.java (original)
+++ lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/miscellaneous/TestScandinavianFoldingFilter.java Thu Mar 5 16:45:02 2015
@@ -24,19 +24,27 @@ import org.apache.lucene.analysis.TokenS
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.core.KeywordTokenizer;
-import java.io.Reader;
-
public class TestScandinavianFoldingFilter extends BaseTokenStreamTestCase {
-
-
- private Analyzer analyzer = new Analyzer() {
- @Override
- protected TokenStreamComponents createComponents(String field) {
- final Tokenizer tokenizer = new MockTokenizer(MockTokenizer.WHITESPACE, false);
- final TokenStream stream = new ScandinavianFoldingFilter(tokenizer);
- return new TokenStreamComponents(tokenizer, stream);
- }
- };
+ private Analyzer analyzer;
+
+ @Override
+ public void setUp() throws Exception {
+ super.setUp();
+ analyzer = new Analyzer() {
+ @Override
+ protected TokenStreamComponents createComponents(String field) {
+ final Tokenizer tokenizer = new MockTokenizer(MockTokenizer.WHITESPACE, false);
+ final TokenStream stream = new ScandinavianFoldingFilter(tokenizer);
+ return new TokenStreamComponents(tokenizer, stream);
+ }
+ };
+ }
+
+ @Override
+ public void tearDown() throws Exception {
+ analyzer.close();
+ super.tearDown();
+ }
public void test() throws Exception {
@@ -117,6 +125,7 @@ public class TestScandinavianFoldingFilt
}
};
checkOneTerm(a, "", "");
+ a.close();
}
/** blast some random strings through the analyzer */
Modified: lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/miscellaneous/TestScandinavianNormalizationFilter.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/miscellaneous/TestScandinavianNormalizationFilter.java?rev=1664404&r1=1664403&r2=1664404&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/miscellaneous/TestScandinavianNormalizationFilter.java (original)
+++ lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/miscellaneous/TestScandinavianNormalizationFilter.java Thu Mar 5 16:45:02 2015
@@ -24,20 +24,27 @@ import org.apache.lucene.analysis.TokenS
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.core.KeywordTokenizer;
-import java.io.Reader;
-
-
public class TestScandinavianNormalizationFilter extends BaseTokenStreamTestCase {
-
-
- private Analyzer analyzer = new Analyzer() {
- @Override
- protected TokenStreamComponents createComponents(String field) {
- final Tokenizer tokenizer = new MockTokenizer(MockTokenizer.WHITESPACE, false);
- final TokenStream stream = new ScandinavianNormalizationFilter(tokenizer);
- return new TokenStreamComponents(tokenizer, stream);
- }
- };
+ private Analyzer analyzer;
+
+ @Override
+ public void setUp() throws Exception {
+ super.setUp();
+ analyzer = new Analyzer() {
+ @Override
+ protected TokenStreamComponents createComponents(String field) {
+ final Tokenizer tokenizer = new MockTokenizer(MockTokenizer.WHITESPACE, false);
+ final TokenStream stream = new ScandinavianNormalizationFilter(tokenizer);
+ return new TokenStreamComponents(tokenizer, stream);
+ }
+ };
+ }
+
+ @Override
+ public void tearDown() throws Exception {
+ analyzer.close();
+ super.tearDown();
+ }
public void test() throws Exception {
@@ -116,6 +123,7 @@ public class TestScandinavianNormalizati
}
};
checkOneTerm(a, "", "");
+ a.close();
}
/** blast some random strings through the analyzer */
Modified: lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/miscellaneous/TestTrimFilter.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/miscellaneous/TestTrimFilter.java?rev=1664404&r1=1664403&r2=1664404&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/miscellaneous/TestTrimFilter.java (original)
+++ lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/miscellaneous/TestTrimFilter.java Thu Mar 5 16:45:02 2015
@@ -18,7 +18,6 @@
package org.apache.lucene.analysis.miscellaneous;
import java.io.IOException;
-import java.io.Reader;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.BaseTokenStreamTestCase;
@@ -103,6 +102,7 @@ public class TestTrimFilter extends Base
}
};
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
+ a.close();
}
public void testEmptyTerm() throws IOException {
@@ -114,5 +114,6 @@ public class TestTrimFilter extends Base
}
};
checkOneTerm(a, "", "");
+ a.close();
}
}
Modified: lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/miscellaneous/TestWordDelimiterFilter.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/miscellaneous/TestWordDelimiterFilter.java?rev=1664404&r1=1664403&r2=1664404&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/miscellaneous/TestWordDelimiterFilter.java (original)
+++ lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/miscellaneous/TestWordDelimiterFilter.java Thu Mar 5 16:45:02 2015
@@ -24,6 +24,7 @@ import org.apache.lucene.analysis.standa
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.analysis.util.CharArraySet;
+import org.apache.lucene.util.IOUtils;
import org.junit.Test;
import java.io.IOException;
@@ -292,6 +293,7 @@ public class TestWordDelimiterFilter ext
new int[] { 4, 4, 11 },
new int[] { 10, 15, 15 },
new int[] { 2, 0, 1 });
+ IOUtils.close(a, a2, a3);
}
/** concat numbers + words + all */
@@ -312,6 +314,7 @@ public class TestWordDelimiterFilter ext
new int[] { 0, 0, 0, 4, 8, 8, 12 },
new int[] { 3, 7, 15, 7, 11, 15, 15 },
new int[] { 1, 0, 0, 1, 1, 0, 1 });
+ a.close();
}
/** concat numbers + words + all + preserve original */
@@ -332,6 +335,7 @@ public class TestWordDelimiterFilter ext
new int[] { 0, 0, 0, 0, 4, 8, 8, 12 },
new int[] { 15, 3, 7, 15, 7, 11, 15, 15 },
new int[] { 1, 0, 0, 0, 1, 1, 0, 1 });
+ a.close();
}
/** blast some random strings through the analyzer */
@@ -356,6 +360,7 @@ public class TestWordDelimiterFilter ext
};
// TODO: properly support positionLengthAttribute
checkRandomData(random(), a, 200*RANDOM_MULTIPLIER, 20, false, false);
+ a.close();
}
}
@@ -381,6 +386,7 @@ public class TestWordDelimiterFilter ext
};
// TODO: properly support positionLengthAttribute
checkRandomData(random(), a, 20*RANDOM_MULTIPLIER, 8192, false, false);
+ a.close();
}
}
@@ -404,6 +410,7 @@ public class TestWordDelimiterFilter ext
};
// depending upon options, this thing may or may not preserve the empty term
checkAnalysisConsistency(random, a, random.nextBoolean(), "");
+ a.close();
}
}
}
Modified: lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/ngram/EdgeNGramTokenFilterTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/ngram/EdgeNGramTokenFilterTest.java?rev=1664404&r1=1664403&r2=1664404&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/ngram/EdgeNGramTokenFilterTest.java (original)
+++ lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/ngram/EdgeNGramTokenFilterTest.java Thu Mar 5 16:45:02 2015
@@ -35,7 +35,6 @@ import org.apache.lucene.analysis.tokena
import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
import org.apache.lucene.util.TestUtil;
-import org.apache.lucene.util.TestUtil;
/**
* Tests {@link EdgeNGramTokenFilter} for correctness.
@@ -183,6 +182,7 @@ public class EdgeNGramTokenFilterTest ex
}
};
checkRandomData(random(), a, 100*RANDOM_MULTIPLIER);
+ a.close();
}
}
@@ -197,6 +197,7 @@ public class EdgeNGramTokenFilterTest ex
}
};
checkAnalysisConsistency(random, a, random.nextBoolean(), "");
+ a.close();
}
public void testGraphs() throws IOException {
Modified: lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/ngram/EdgeNGramTokenizerTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/ngram/EdgeNGramTokenizerTest.java?rev=1664404&r1=1664403&r2=1664404&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/ngram/EdgeNGramTokenizerTest.java (original)
+++ lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/ngram/EdgeNGramTokenizerTest.java Thu Mar 5 16:45:02 2015
@@ -113,6 +113,7 @@ public class EdgeNGramTokenizerTest exte
};
checkRandomData(random(), a, 100*RANDOM_MULTIPLIER, 20);
checkRandomData(random(), a, 10*RANDOM_MULTIPLIER, 8192);
+ a.close();
}
}
Modified: lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/ngram/NGramTokenFilterTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/ngram/NGramTokenFilterTest.java?rev=1664404&r1=1664403&r2=1664404&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/ngram/NGramTokenFilterTest.java (original)
+++ lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/ngram/NGramTokenFilterTest.java Thu Mar 5 16:45:02 2015
@@ -29,7 +29,6 @@ import org.apache.lucene.analysis.miscel
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
import org.apache.lucene.util.TestUtil;
-import org.apache.lucene.util.Version;
import java.io.IOException;
import java.io.StringReader;
@@ -140,6 +139,7 @@ public class NGramTokenFilterTest extend
new int[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 },
new int[] { 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11 },
new int[] { 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 });
+ analyzer.close();
}
/** blast some random strings through the analyzer */
@@ -156,6 +156,7 @@ public class NGramTokenFilterTest extend
}
};
checkRandomData(random(), a, 200*RANDOM_MULTIPLIER, 20);
+ a.close();
}
}
@@ -170,6 +171,7 @@ public class NGramTokenFilterTest extend
}
};
checkAnalysisConsistency(random, a, random.nextBoolean(), "");
+ a.close();
}
public void testSupplementaryCharacters() throws IOException {
Modified: lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/ngram/NGramTokenizerTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/ngram/NGramTokenizerTest.java?rev=1664404&r1=1664403&r2=1664404&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/ngram/NGramTokenizerTest.java (original)
+++ lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/ngram/NGramTokenizerTest.java Thu Mar 5 16:45:02 2015
@@ -123,6 +123,7 @@ public class NGramTokenizerTest extends
};
checkRandomData(random(), a, 200*RANDOM_MULTIPLIER, 20);
checkRandomData(random(), a, 10*RANDOM_MULTIPLIER, 1027);
+ a.close();
}
}
Modified: lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/nl/TestDutchAnalyzer.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/nl/TestDutchAnalyzer.java?rev=1664404&r1=1664403&r2=1664404&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/nl/TestDutchAnalyzer.java (original)
+++ lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/nl/TestDutchAnalyzer.java Thu Mar 5 16:45:02 2015
@@ -117,6 +117,7 @@ public class TestDutchAnalyzer extends B
checkOneTerm(a, "opheffen", "opheff");
checkOneTerm(a, "opheffende", "opheff");
checkOneTerm(a, "opheffing", "opheff");
+ a.close();
}
public void testReusableTokenStream() throws Exception {
@@ -125,6 +126,7 @@ public class TestDutchAnalyzer extends B
checkOneTerm(a, "lichamelijk", "licham");
checkOneTerm(a, "lichamelijke", "licham");
checkOneTerm(a, "lichamelijkheden", "licham");
+ a.close();
}
public void testExclusionTableViaCtor() throws IOException {
@@ -132,10 +134,11 @@ public class TestDutchAnalyzer extends B
set.add("lichamelijk");
DutchAnalyzer a = new DutchAnalyzer( CharArraySet.EMPTY_SET, set);
assertAnalyzesTo(a, "lichamelijk lichamelijke", new String[] { "lichamelijk", "licham" });
-
+ a.close();
+
a = new DutchAnalyzer( CharArraySet.EMPTY_SET, set);
assertAnalyzesTo(a, "lichamelijk lichamelijke", new String[] { "lichamelijk", "licham" });
-
+ a.close();
}
/**
@@ -145,12 +148,14 @@ public class TestDutchAnalyzer extends B
public void testStemOverrides() throws IOException {
DutchAnalyzer a = new DutchAnalyzer( CharArraySet.EMPTY_SET);
checkOneTerm(a, "fiets", "fiets");
+ a.close();
}
public void testEmptyStemDictionary() throws IOException {
DutchAnalyzer a = new DutchAnalyzer( CharArraySet.EMPTY_SET,
CharArraySet.EMPTY_SET, CharArrayMap.<String>emptyMap());
checkOneTerm(a, "fiets", "fiet");
+ a.close();
}
/**
@@ -159,15 +164,20 @@ public class TestDutchAnalyzer extends B
public void testStopwordsCasing() throws IOException {
DutchAnalyzer a = new DutchAnalyzer();
assertAnalyzesTo(a, "Zelf", new String[] { });
+ a.close();
}
private void check(final String input, final String expected) throws Exception {
- checkOneTerm(new DutchAnalyzer(), input, expected);
+ Analyzer analyzer = new DutchAnalyzer();
+ checkOneTerm(analyzer, input, expected);
+ analyzer.close();
}
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
- checkRandomData(random(), new DutchAnalyzer(), 1000*RANDOM_MULTIPLIER);
+ Analyzer analyzer = new DutchAnalyzer();
+ checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER);
+ analyzer.close();
}
}
Modified: lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/no/TestNorwegianAnalyzer.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/no/TestNorwegianAnalyzer.java?rev=1664404&r1=1664403&r2=1664404&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/no/TestNorwegianAnalyzer.java (original)
+++ lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/no/TestNorwegianAnalyzer.java Thu Mar 5 16:45:02 2015
@@ -27,7 +27,7 @@ public class TestNorwegianAnalyzer exten
/** This test fails with NPE when the
* stopwords file is missing in classpath */
public void testResourcesAvailable() {
- new NorwegianAnalyzer();
+ new NorwegianAnalyzer().close();
}
/** test stopwords and stemming */
@@ -38,6 +38,7 @@ public class TestNorwegianAnalyzer exten
checkOneTerm(a, "havnedistrikter", "havnedistrikt");
// stopword
assertAnalyzesTo(a, "det", new String[] {});
+ a.close();
}
/** test use of exclusion set */
@@ -47,10 +48,13 @@ public class TestNorwegianAnalyzer exten
NorwegianAnalyzer.getDefaultStopSet(), exclusionSet);
checkOneTerm(a, "havnedistriktene", "havnedistriktene");
checkOneTerm(a, "havnedistrikter", "havnedistrikt");
+ a.close();
}
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
- checkRandomData(random(), new NorwegianAnalyzer(), 1000*RANDOM_MULTIPLIER);
+ Analyzer analyzer = new NorwegianAnalyzer();
+ checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER);
+ analyzer.close();
}
}
Modified: lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/no/TestNorwegianLightStemFilter.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/no/TestNorwegianLightStemFilter.java?rev=1664404&r1=1664403&r2=1664404&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/no/TestNorwegianLightStemFilter.java (original)
+++ lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/no/TestNorwegianLightStemFilter.java Thu Mar 5 16:45:02 2015
@@ -17,9 +17,7 @@ package org.apache.lucene.analysis.no;
* limitations under the License.
*/
-import java.io.FileInputStream;
import java.io.IOException;
-import java.io.Reader;
import java.nio.file.Files;
import java.util.Random;
@@ -36,18 +34,29 @@ import static org.apache.lucene.analysis
import static org.apache.lucene.analysis.no.NorwegianLightStemmer.BOKMAAL;
import static org.apache.lucene.analysis.no.NorwegianLightStemmer.NYNORSK;
-
/**
* Simple tests for {@link NorwegianLightStemFilter}
*/
public class TestNorwegianLightStemFilter extends BaseTokenStreamTestCase {
- private Analyzer analyzer = new Analyzer() {
- @Override
- protected TokenStreamComponents createComponents(String fieldName) {
- Tokenizer source = new MockTokenizer(MockTokenizer.WHITESPACE, false);
- return new TokenStreamComponents(source, new NorwegianLightStemFilter(source, BOKMAAL));
- }
- };
+ private Analyzer analyzer;
+
+ @Override
+ public void setUp() throws Exception {
+ super.setUp();
+ analyzer = new Analyzer() {
+ @Override
+ protected TokenStreamComponents createComponents(String fieldName) {
+ Tokenizer source = new MockTokenizer(MockTokenizer.WHITESPACE, false);
+ return new TokenStreamComponents(source, new NorwegianLightStemFilter(source, BOKMAAL));
+ }
+ };
+ }
+
+ @Override
+ public void tearDown() throws Exception {
+ analyzer.close();
+ super.tearDown();
+ }
/** Test against a vocabulary file */
public void testVocabulary() throws IOException {
@@ -64,6 +73,7 @@ public class TestNorwegianLightStemFilte
}
};
assertVocabulary(analyzer, Files.newInputStream(getDataPath("nn_light.txt")));
+ analyzer.close();
}
public void testKeyword() throws IOException {
@@ -77,6 +87,7 @@ public class TestNorwegianLightStemFilte
}
};
checkOneTerm(a, "sekretæren", "sekretæren");
+ a.close();
}
/** blast some random strings through the analyzer */
@@ -94,5 +105,6 @@ public class TestNorwegianLightStemFilte
}
};
checkOneTerm(a, "", "");
+ a.close();
}
}
Modified: lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/no/TestNorwegianMinimalStemFilter.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/no/TestNorwegianMinimalStemFilter.java?rev=1664404&r1=1664403&r2=1664404&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/no/TestNorwegianMinimalStemFilter.java (original)
+++ lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/no/TestNorwegianMinimalStemFilter.java Thu Mar 5 16:45:02 2015
@@ -17,9 +17,7 @@ package org.apache.lucene.analysis.no;
* limitations under the License.
*/
-import java.io.FileInputStream;
import java.io.IOException;
-import java.io.Reader;
import java.nio.file.Files;
import java.util.Random;
@@ -40,13 +38,25 @@ import static org.apache.lucene.analysis
* Simple tests for {@link NorwegianMinimalStemFilter}
*/
public class TestNorwegianMinimalStemFilter extends BaseTokenStreamTestCase {
- private Analyzer analyzer = new Analyzer() {
- @Override
- protected TokenStreamComponents createComponents(String fieldName) {
- Tokenizer source = new MockTokenizer(MockTokenizer.WHITESPACE, false);
- return new TokenStreamComponents(source, new NorwegianMinimalStemFilter(source, BOKMAAL));
- }
- };
+ private Analyzer analyzer;
+
+ @Override
+ public void setUp() throws Exception {
+ super.setUp();
+ analyzer = new Analyzer() {
+ @Override
+ protected TokenStreamComponents createComponents(String fieldName) {
+ Tokenizer source = new MockTokenizer(MockTokenizer.WHITESPACE, false);
+ return new TokenStreamComponents(source, new NorwegianMinimalStemFilter(source, BOKMAAL));
+ }
+ };
+ }
+
+ @Override
+ public void tearDown() throws Exception {
+ analyzer.close();
+ super.tearDown();
+ }
/** Test against a Bokmål vocabulary file */
public void testVocabulary() throws IOException {
@@ -63,6 +73,7 @@ public class TestNorwegianMinimalStemFil
}
};
assertVocabulary(analyzer, Files.newInputStream(getDataPath("nn_minimal.txt")));
+ analyzer.close();
}
public void testKeyword() throws IOException {
@@ -76,6 +87,7 @@ public class TestNorwegianMinimalStemFil
}
};
checkOneTerm(a, "sekretæren", "sekretæren");
+ a.close();
}
/** blast some random strings through the analyzer */
@@ -93,5 +105,6 @@ public class TestNorwegianMinimalStemFil
}
};
checkOneTerm(a, "", "");
+ a.close();
}
}
Modified: lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/path/TestPathHierarchyTokenizer.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/path/TestPathHierarchyTokenizer.java?rev=1664404&r1=1664403&r2=1664404&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/path/TestPathHierarchyTokenizer.java (original)
+++ lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/path/TestPathHierarchyTokenizer.java Thu Mar 5 16:45:02 2015
@@ -227,6 +227,7 @@ public class TestPathHierarchyTokenizer
};
// TODO: properly support positionLengthAttribute
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER, 20, false, false);
+ a.close();
}
/** blast some random large strings through the analyzer */
@@ -241,5 +242,6 @@ public class TestPathHierarchyTokenizer
};
// TODO: properly support positionLengthAttribute
checkRandomData(random, a, 100*RANDOM_MULTIPLIER, 1027, false, false);
+ a.close();
}
}
Modified: lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/path/TestReversePathHierarchyTokenizer.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/path/TestReversePathHierarchyTokenizer.java?rev=1664404&r1=1664403&r2=1664404&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/path/TestReversePathHierarchyTokenizer.java (original)
+++ lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/path/TestReversePathHierarchyTokenizer.java Thu Mar 5 16:45:02 2015
@@ -17,14 +17,12 @@ package org.apache.lucene.analysis.path;
* limitations under the License.
*/
-import java.io.Reader;
import java.io.StringReader;
import java.util.Random;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.BaseTokenStreamTestCase;
import org.apache.lucene.analysis.Tokenizer;
-import org.apache.lucene.analysis.Analyzer.TokenStreamComponents;
import static org.apache.lucene.analysis.path.ReversePathHierarchyTokenizer.DEFAULT_DELIMITER;
import static org.apache.lucene.analysis.path.ReversePathHierarchyTokenizer.DEFAULT_SKIP;
@@ -187,6 +185,7 @@ public class TestReversePathHierarchyTok
};
// TODO: properly support positionLengthAttribute
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER, 20, false, false);
+ a.close();
}
/** blast some random large strings through the analyzer */
@@ -201,5 +200,6 @@ public class TestReversePathHierarchyTok
};
// TODO: properly support positionLengthAttribute
checkRandomData(random, a, 100*RANDOM_MULTIPLIER, 1027, false, false);
+ a.close();
}
}
Modified: lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/pattern/TestPatternCaptureGroupTokenFilter.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/pattern/TestPatternCaptureGroupTokenFilter.java?rev=1664404&r1=1664403&r2=1664404&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/pattern/TestPatternCaptureGroupTokenFilter.java (original)
+++ lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/pattern/TestPatternCaptureGroupTokenFilter.java Thu Mar 5 16:45:02 2015
@@ -16,7 +16,7 @@ package org.apache.lucene.analysis.patte
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-import java.io.Reader;
+
import java.io.StringReader;
import java.util.regex.Pattern;
@@ -606,6 +606,7 @@ public class TestPatternCaptureGroupToke
};
checkRandomData(random(), a, 1000 * RANDOM_MULTIPLIER);
+ a.close();
}
private void testPatterns(String input, String[] regexes, String[] tokens,
Modified: lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/pattern/TestPatternReplaceCharFilter.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/pattern/TestPatternReplaceCharFilter.java?rev=1664404&r1=1664403&r2=1664404&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/pattern/TestPatternReplaceCharFilter.java (original)
+++ lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/pattern/TestPatternReplaceCharFilter.java Thu Mar 5 16:45:02 2015
@@ -324,6 +324,7 @@ public class TestPatternReplaceCharFilte
/* ASCII only input?: */
final boolean asciiOnly = true;
checkRandomData(random, a, 250 * RANDOM_MULTIPLIER, maxInputLength, asciiOnly);
+ a.close();
}
}
}
Modified: lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/pattern/TestPatternReplaceFilter.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/pattern/TestPatternReplaceFilter.java?rev=1664404&r1=1664403&r2=1664404&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/pattern/TestPatternReplaceFilter.java (original)
+++ lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/pattern/TestPatternReplaceFilter.java Thu Mar 5 16:45:02 2015
@@ -27,8 +27,6 @@ import org.apache.lucene.analysis.core.K
import java.io.IOException;
import java.util.regex.Pattern;
-/**
- */
public class TestPatternReplaceFilter extends BaseTokenStreamTestCase {
public void testReplaceAll() throws Exception {
@@ -92,6 +90,7 @@ public class TestPatternReplaceFilter ex
}
};
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
+ a.close();
Analyzer b = new Analyzer() {
@Override
@@ -102,6 +101,7 @@ public class TestPatternReplaceFilter ex
}
};
checkRandomData(random(), b, 1000*RANDOM_MULTIPLIER);
+ b.close();
}
public void testEmptyTerm() throws IOException {
@@ -113,6 +113,7 @@ public class TestPatternReplaceFilter ex
}
};
checkOneTerm(a, "", "");
+ a.close();
}
}
Modified: lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/pattern/TestPatternTokenizer.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/pattern/TestPatternTokenizer.java?rev=1664404&r1=1664403&r2=1664404&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/pattern/TestPatternTokenizer.java (original)
+++ lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/pattern/TestPatternTokenizer.java Thu Mar 5 16:45:02 2015
@@ -18,7 +18,6 @@
package org.apache.lucene.analysis.pattern;
import java.io.IOException;
-import java.io.Reader;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.List;
@@ -29,10 +28,8 @@ import org.apache.lucene.analysis.BaseTo
import org.apache.lucene.analysis.CharFilter;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.Tokenizer;
-import org.apache.lucene.analysis.Analyzer.TokenStreamComponents;
import org.apache.lucene.analysis.charfilter.MappingCharFilter;
import org.apache.lucene.analysis.charfilter.NormalizeCharMap;
-import org.apache.lucene.analysis.path.PathHierarchyTokenizer;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
public class TestPatternTokenizer extends BaseTokenStreamTestCase
@@ -137,6 +134,7 @@ public class TestPatternTokenizer extend
}
};
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
+ a.close();
Analyzer b = new Analyzer() {
@Override
@@ -146,5 +144,6 @@ public class TestPatternTokenizer extend
}
};
checkRandomData(random(), b, 1000*RANDOM_MULTIPLIER);
+ b.close();
}
}
Modified: lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/pt/TestPortugueseAnalyzer.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/pt/TestPortugueseAnalyzer.java?rev=1664404&r1=1664403&r2=1664404&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/pt/TestPortugueseAnalyzer.java (original)
+++ lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/pt/TestPortugueseAnalyzer.java Thu Mar 5 16:45:02 2015
@@ -27,7 +27,7 @@ public class TestPortugueseAnalyzer exte
/** This test fails with NPE when the
* stopwords file is missing in classpath */
public void testResourcesAvailable() {
- new PortugueseAnalyzer();
+ new PortugueseAnalyzer().close();
}
/** test stopwords and stemming */
@@ -38,6 +38,7 @@ public class TestPortugueseAnalyzer exte
checkOneTerm(a, "quilométricos", "quilometric");
// stopword
assertAnalyzesTo(a, "não", new String[] {});
+ a.close();
}
/** test use of exclusion set */
@@ -47,10 +48,13 @@ public class TestPortugueseAnalyzer exte
PortugueseAnalyzer.getDefaultStopSet(), exclusionSet);
checkOneTerm(a, "quilométricas", "quilométricas");
checkOneTerm(a, "quilométricos", "quilometric");
+ a.close();
}
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
- checkRandomData(random(), new PortugueseAnalyzer(), 1000*RANDOM_MULTIPLIER);
+ Analyzer analyzer = new PortugueseAnalyzer();
+ checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER);
+ analyzer.close();
}
}
Modified: lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/pt/TestPortugueseLightStemFilter.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/pt/TestPortugueseLightStemFilter.java?rev=1664404&r1=1664403&r2=1664404&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/pt/TestPortugueseLightStemFilter.java (original)
+++ lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/pt/TestPortugueseLightStemFilter.java Thu Mar 5 16:45:02 2015
@@ -34,13 +34,25 @@ import static org.apache.lucene.analysis
* Simple tests for {@link PortugueseLightStemFilter}
*/
public class TestPortugueseLightStemFilter extends BaseTokenStreamTestCase {
- private Analyzer analyzer = new Analyzer() {
- @Override
- protected TokenStreamComponents createComponents(String fieldName) {
- Tokenizer source = new MockTokenizer(MockTokenizer.SIMPLE, true);
- return new TokenStreamComponents(source, new PortugueseLightStemFilter(source));
- }
- };
+ private Analyzer analyzer;
+
+ @Override
+ public void setUp() throws Exception {
+ super.setUp();
+ analyzer = new Analyzer() {
+ @Override
+ protected TokenStreamComponents createComponents(String fieldName) {
+ Tokenizer source = new MockTokenizer(MockTokenizer.SIMPLE, true);
+ return new TokenStreamComponents(source, new PortugueseLightStemFilter(source));
+ }
+ };
+ }
+
+ @Override
+ public void tearDown() throws Exception {
+ analyzer.close();
+ super.tearDown();
+ }
/**
* Test the example from the paper "Assessing the impact of stemming accuracy
@@ -102,6 +114,7 @@ public class TestPortugueseLightStemFilt
}
};
checkOneTerm(a, "quilométricas", "quilométricas");
+ a.close();
}
/** blast some random strings through the analyzer */
@@ -118,5 +131,6 @@ public class TestPortugueseLightStemFilt
}
};
checkOneTerm(a, "", "");
+ a.close();
}
}
Modified: lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/pt/TestPortugueseMinimalStemFilter.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/pt/TestPortugueseMinimalStemFilter.java?rev=1664404&r1=1664403&r2=1664404&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/pt/TestPortugueseMinimalStemFilter.java (original)
+++ lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/pt/TestPortugueseMinimalStemFilter.java Thu Mar 5 16:45:02 2015
@@ -34,13 +34,25 @@ import static org.apache.lucene.analysis
* Simple tests for {@link PortugueseMinimalStemFilter}
*/
public class TestPortugueseMinimalStemFilter extends BaseTokenStreamTestCase {
- private Analyzer analyzer = new Analyzer() {
- @Override
- protected TokenStreamComponents createComponents(String fieldName) {
- Tokenizer source = new MockTokenizer(MockTokenizer.SIMPLE, true);
- return new TokenStreamComponents(source, new PortugueseMinimalStemFilter(source));
- }
- };
+ private Analyzer analyzer;
+
+ @Override
+ public void setUp() throws Exception {
+ super.setUp();
+ analyzer = new Analyzer() {
+ @Override
+ protected TokenStreamComponents createComponents(String fieldName) {
+ Tokenizer source = new MockTokenizer(MockTokenizer.SIMPLE, true);
+ return new TokenStreamComponents(source, new PortugueseMinimalStemFilter(source));
+ }
+ };
+ }
+
+ @Override
+ public void tearDown() throws Exception {
+ analyzer.close();
+ super.tearDown();
+ }
/**
* Test the example from the paper "Assessing the impact of stemming accuracy
@@ -76,6 +88,7 @@ public class TestPortugueseMinimalStemFi
}
};
checkOneTerm(a, "quilométricas", "quilométricas");
+ a.close();
}
/** blast some random strings through the analyzer */
@@ -92,5 +105,6 @@ public class TestPortugueseMinimalStemFi
}
};
checkOneTerm(a, "", "");
+ a.close();
}
}
Modified: lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/pt/TestPortugueseStemFilter.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/pt/TestPortugueseStemFilter.java?rev=1664404&r1=1664403&r2=1664404&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/pt/TestPortugueseStemFilter.java (original)
+++ lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/pt/TestPortugueseStemFilter.java Thu Mar 5 16:45:02 2015
@@ -34,13 +34,25 @@ import org.apache.lucene.analysis.util.C
* Simple tests for {@link PortugueseStemFilter}
*/
public class TestPortugueseStemFilter extends BaseTokenStreamTestCase {
- private Analyzer analyzer = new Analyzer() {
- @Override
- protected TokenStreamComponents createComponents(String fieldName) {
- Tokenizer source = new MockTokenizer(MockTokenizer.SIMPLE, true);
- return new TokenStreamComponents(source, new PortugueseStemFilter(source));
- }
- };
+ private Analyzer analyzer;
+
+ @Override
+ public void setUp() throws Exception {
+ super.setUp();
+ analyzer = new Analyzer() {
+ @Override
+ protected TokenStreamComponents createComponents(String fieldName) {
+ Tokenizer source = new MockTokenizer(MockTokenizer.SIMPLE, true);
+ return new TokenStreamComponents(source, new PortugueseStemFilter(source));
+ }
+ };
+ }
+
+ @Override
+ public void tearDown() throws Exception {
+ analyzer.close();
+ super.tearDown();
+ }
/**
* Test the example from the paper "Assessing the impact of stemming accuracy
@@ -76,6 +88,7 @@ public class TestPortugueseStemFilter ex
}
};
checkOneTerm(a, "quilométricas", "quilométricas");
+ a.close();
}
/** blast some random strings through the analyzer */
@@ -92,5 +105,6 @@ public class TestPortugueseStemFilter ex
}
};
checkOneTerm(a, "", "");
+ a.close();
}
}
Modified: lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/query/QueryAutoStopWordAnalyzerTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/query/QueryAutoStopWordAnalyzerTest.java?rev=1664404&r1=1664403&r2=1664404&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/query/QueryAutoStopWordAnalyzerTest.java (original)
+++ lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/query/QueryAutoStopWordAnalyzerTest.java Thu Mar 5 16:45:02 2015
@@ -58,6 +58,7 @@ public class QueryAutoStopWordAnalyzerTe
@Override
public void tearDown() throws Exception {
+ appAnalyzer.close();
reader.close();
super.tearDown();
}
@@ -70,12 +71,14 @@ public class QueryAutoStopWordAnalyzerTe
protectedTokenStream = protectedAnalyzer.tokenStream("repetitiveField", "boring");
assertTokenStreamContents(protectedTokenStream, new String[]{"boring"});
+ protectedAnalyzer.close();
}
public void testDefaultStopwordsAllFields() throws Exception {
protectedAnalyzer = new QueryAutoStopWordAnalyzer( appAnalyzer, reader);
TokenStream protectedTokenStream = protectedAnalyzer.tokenStream("repetitiveField", "boring");
assertTokenStreamContents(protectedTokenStream, new String[0]); // Default stop word filtering will remove boring
+ protectedAnalyzer.close();
}
public void testStopwordsAllFieldsMaxPercentDocs() throws Exception {
@@ -88,11 +91,13 @@ public class QueryAutoStopWordAnalyzerTe
protectedTokenStream = protectedAnalyzer.tokenStream("repetitiveField", "vaguelyboring");
// A filter on terms in > half of docs should not remove vaguelyBoring
assertTokenStreamContents(protectedTokenStream, new String[]{"vaguelyboring"});
+ protectedAnalyzer.close();
protectedAnalyzer = new QueryAutoStopWordAnalyzer( appAnalyzer, reader, 1f / 4f);
protectedTokenStream = protectedAnalyzer.tokenStream("repetitiveField", "vaguelyboring");
// A filter on terms in > quarter of docs should remove vaguelyBoring
assertTokenStreamContents(protectedTokenStream, new String[0]);
+ protectedAnalyzer.close();
}
public void testStopwordsPerFieldMaxPercentDocs() throws Exception {
@@ -100,21 +105,25 @@ public class QueryAutoStopWordAnalyzerTe
TokenStream protectedTokenStream = protectedAnalyzer.tokenStream("repetitiveField", "boring");
// A filter on one Field should not affect queries on another
assertTokenStreamContents(protectedTokenStream, new String[]{"boring"});
+ protectedAnalyzer.close();
protectedAnalyzer = new QueryAutoStopWordAnalyzer( appAnalyzer, reader, Arrays.asList("variedField", "repetitiveField"), 1f / 2f);
protectedTokenStream = protectedAnalyzer.tokenStream("repetitiveField", "boring");
// A filter on the right Field should affect queries on it
assertTokenStreamContents(protectedTokenStream, new String[0]);
+ protectedAnalyzer.close();
}
public void testStopwordsPerFieldMaxDocFreq() throws Exception {
protectedAnalyzer = new QueryAutoStopWordAnalyzer( appAnalyzer, reader, Arrays.asList("repetitiveField"), 10);
int numStopWords = protectedAnalyzer.getStopWords("repetitiveField").length;
assertTrue("Should have identified stop words", numStopWords > 0);
+ protectedAnalyzer.close();
protectedAnalyzer = new QueryAutoStopWordAnalyzer( appAnalyzer, reader, Arrays.asList("repetitiveField", "variedField"), 10);
int numNewStopWords = protectedAnalyzer.getStopWords("repetitiveField").length + protectedAnalyzer.getStopWords("variedField").length;
assertTrue("Should have identified more stop words", numNewStopWords > numStopWords);
+ protectedAnalyzer.close();
}
public void testNoFieldNamePollution() throws Exception {
@@ -127,6 +136,7 @@ public class QueryAutoStopWordAnalyzerTe
protectedTokenStream = protectedAnalyzer.tokenStream("variedField", "boring");
// Filter should not prevent stopwords in one field being used in another
assertTokenStreamContents(protectedTokenStream, new String[]{"boring"});
+ protectedAnalyzer.close();
}
public void testTokenStream() throws Exception {
@@ -134,5 +144,6 @@ public class QueryAutoStopWordAnalyzerTe
new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false), reader, 10);
TokenStream ts = a.tokenStream("repetitiveField", "this boring");
assertTokenStreamContents(ts, new String[] { "this" });
+ a.close();
}
}
Modified: lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/reverse/TestReverseStringFilter.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/reverse/TestReverseStringFilter.java?rev=1664404&r1=1664403&r2=1664404&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/reverse/TestReverseStringFilter.java (original)
+++ lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/reverse/TestReverseStringFilter.java Thu Mar 5 16:45:02 2015
@@ -101,6 +101,7 @@ public class TestReverseStringFilter ext
}
};
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
+ a.close();
}
public void testEmptyTerm() throws IOException {
@@ -112,5 +113,6 @@ public class TestReverseStringFilter ext
}
};
checkOneTerm(a, "", "");
+ a.close();
}
}
Modified: lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/ro/TestRomanianAnalyzer.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/ro/TestRomanianAnalyzer.java?rev=1664404&r1=1664403&r2=1664404&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/ro/TestRomanianAnalyzer.java (original)
+++ lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/ro/TestRomanianAnalyzer.java Thu Mar 5 16:45:02 2015
@@ -27,7 +27,7 @@ public class TestRomanianAnalyzer extend
/** This test fails with NPE when the
* stopwords file is missing in classpath */
public void testResourcesAvailable() {
- new RomanianAnalyzer();
+ new RomanianAnalyzer().close();
}
/** test stopwords and stemming */
@@ -38,6 +38,7 @@ public class TestRomanianAnalyzer extend
checkOneTerm(a, "absenţi", "absenţ");
// stopword
assertAnalyzesTo(a, "îl", new String[] {});
+ a.close();
}
/** test use of exclusion set */
@@ -47,10 +48,13 @@ public class TestRomanianAnalyzer extend
RomanianAnalyzer.getDefaultStopSet(), exclusionSet);
checkOneTerm(a, "absenţa", "absenţa");
checkOneTerm(a, "absenţi", "absenţ");
+ a.close();
}
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
- checkRandomData(random(), new RomanianAnalyzer(), 1000*RANDOM_MULTIPLIER);
+ Analyzer analyzer = new RomanianAnalyzer();
+ checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER);
+ analyzer.close();
}
}
Modified: lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/ru/TestRussianAnalyzer.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/ru/TestRussianAnalyzer.java?rev=1664404&r1=1664403&r2=1664404&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/ru/TestRussianAnalyzer.java (original)
+++ lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/ru/TestRussianAnalyzer.java Thu Mar 5 16:45:02 2015
@@ -28,34 +28,38 @@ import org.apache.lucene.analysis.util.C
*/
public class TestRussianAnalyzer extends BaseTokenStreamTestCase {
-
- /** Check that RussianAnalyzer doesnt discard any numbers */
- public void testDigitsInRussianCharset() throws IOException
- {
- RussianAnalyzer ra = new RussianAnalyzer();
- assertAnalyzesTo(ra, "text 1000", new String[] { "text", "1000" });
- }
-
- public void testReusableTokenStream() throws Exception {
- Analyzer a = new RussianAnalyzer();
- assertAnalyzesTo(a, "ÐмеÑÑе Ñ Ñем о Ñиле ÑлекÑÑомагниÑной ÑнеÑгии имели пÑедÑÑавление еÑе",
- new String[] { "вмеÑÑ", "Ñил", "ÑлекÑÑомагниÑн", "ÑнеÑг", "имел", "пÑедÑÑавлен" });
- assertAnalyzesTo(a, "Ðо знание ÑÑо Ñ
ÑанилоÑÑ Ð² Ñайне",
- new String[] { "знан", "ÑÑ", "Ñ
Ñан", "Ñайн" });
- }
-
-
- public void testWithStemExclusionSet() throws Exception {
- CharArraySet set = new CharArraySet( 1, true);
- set.add("пÑедÑÑавление");
- Analyzer a = new RussianAnalyzer( RussianAnalyzer.getDefaultStopSet() , set);
- assertAnalyzesTo(a, "ÐмеÑÑе Ñ Ñем о Ñиле ÑлекÑÑомагниÑной ÑнеÑгии имели пÑедÑÑавление еÑе",
- new String[] { "вмеÑÑ", "Ñил", "ÑлекÑÑомагниÑн", "ÑнеÑг", "имел", "пÑедÑÑавление" });
-
- }
-
- /** blast some random strings through the analyzer */
- public void testRandomStrings() throws Exception {
- checkRandomData(random(), new RussianAnalyzer(), 1000*RANDOM_MULTIPLIER);
- }
+
+ /** Check that RussianAnalyzer doesnt discard any numbers */
+ public void testDigitsInRussianCharset() throws IOException
+ {
+ RussianAnalyzer ra = new RussianAnalyzer();
+ assertAnalyzesTo(ra, "text 1000", new String[] { "text", "1000" });
+ ra.close();
+ }
+
+ public void testReusableTokenStream() throws Exception {
+ Analyzer a = new RussianAnalyzer();
+ assertAnalyzesTo(a, "ÐмеÑÑе Ñ Ñем о Ñиле ÑлекÑÑомагниÑной ÑнеÑгии имели пÑедÑÑавление еÑе",
+ new String[] { "вмеÑÑ", "Ñил", "ÑлекÑÑомагниÑн", "ÑнеÑг", "имел", "пÑедÑÑавлен" });
+ assertAnalyzesTo(a, "Ðо знание ÑÑо Ñ
ÑанилоÑÑ Ð² Ñайне",
+ new String[] { "знан", "ÑÑ", "Ñ
Ñан", "Ñайн" });
+ a.close();
+ }
+
+
+ public void testWithStemExclusionSet() throws Exception {
+ CharArraySet set = new CharArraySet( 1, true);
+ set.add("пÑедÑÑавление");
+ Analyzer a = new RussianAnalyzer( RussianAnalyzer.getDefaultStopSet() , set);
+ assertAnalyzesTo(a, "ÐмеÑÑе Ñ Ñем о Ñиле ÑлекÑÑомагниÑной ÑнеÑгии имели пÑедÑÑавление еÑе",
+ new String[] { "вмеÑÑ", "Ñил", "ÑлекÑÑомагниÑн", "ÑнеÑг", "имел", "пÑедÑÑавление" });
+ a.close();
+ }
+
+ /** blast some random strings through the analyzer */
+ public void testRandomStrings() throws Exception {
+ Analyzer analyzer = new RussianAnalyzer();
+ checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER);
+ analyzer.close();
+ }
}
Modified: lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/ru/TestRussianLightStemFilter.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/ru/TestRussianLightStemFilter.java?rev=1664404&r1=1664403&r2=1664404&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/ru/TestRussianLightStemFilter.java (original)
+++ lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/ru/TestRussianLightStemFilter.java Thu Mar 5 16:45:02 2015
@@ -18,7 +18,6 @@ package org.apache.lucene.analysis.ru;
*/
import java.io.IOException;
-import java.io.Reader;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.BaseTokenStreamTestCase;
@@ -35,13 +34,25 @@ import static org.apache.lucene.analysis
* Simple tests for {@link RussianLightStemFilter}
*/
public class TestRussianLightStemFilter extends BaseTokenStreamTestCase {
- private Analyzer analyzer = new Analyzer() {
- @Override
- protected TokenStreamComponents createComponents(String fieldName) {
- Tokenizer source = new MockTokenizer(MockTokenizer.WHITESPACE, false);
- return new TokenStreamComponents(source, new RussianLightStemFilter(source));
- }
- };
+ private Analyzer analyzer;
+
+ @Override
+ public void setUp() throws Exception {
+ super.setUp();
+ analyzer = new Analyzer() {
+ @Override
+ protected TokenStreamComponents createComponents(String fieldName) {
+ Tokenizer source = new MockTokenizer(MockTokenizer.WHITESPACE, false);
+ return new TokenStreamComponents(source, new RussianLightStemFilter(source));
+ }
+ };
+ }
+
+ @Override
+ public void tearDown() throws Exception {
+ analyzer.close();
+ super.tearDown();
+ }
/** Test against a vocabulary from the reference impl */
public void testVocabulary() throws IOException {
@@ -59,6 +70,7 @@ public class TestRussianLightStemFilter
}
};
checkOneTerm(a, "ÑнеÑгии", "ÑнеÑгии");
+ a.close();
}
/** blast some random strings through the analyzer */
@@ -75,5 +87,6 @@ public class TestRussianLightStemFilter
}
};
checkOneTerm(a, "", "");
+ a.close();
}
}
Modified: lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/shingle/ShingleAnalyzerWrapperTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/shingle/ShingleAnalyzerWrapperTest.java?rev=1664404&r1=1664403&r2=1664404&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/shingle/ShingleAnalyzerWrapperTest.java (original)
+++ lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/shingle/ShingleAnalyzerWrapperTest.java Thu Mar 5 16:45:02 2015
@@ -83,6 +83,7 @@ public class ShingleAnalyzerWrapperTest
public void tearDown() throws Exception {
reader.close();
directory.close();
+ analyzer.close();
super.tearDown();
}
@@ -156,6 +157,7 @@ public class ShingleAnalyzerWrapperTest
new int[] { 0, 0, 7, 7, 10, 10, 13 },
new int[] { 6, 9, 9, 12, 12, 18, 18 },
new int[] { 1, 0, 1, 0, 1, 0, 1 });
+ a.close();
}
public void testNonDefaultMinShingleSize() throws Exception {
@@ -171,6 +173,7 @@ public class ShingleAnalyzerWrapperTest
new int[] { 0, 0, 0, 7, 7, 7, 14, 14, 14, 19, 19, 28, 33 },
new int[] { 6, 18, 27, 13, 27, 32, 18, 32, 41, 27, 41, 32, 41 },
new int[] { 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 1, 1 });
+ analyzer.close();
analyzer = new ShingleAnalyzerWrapper(
new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false), 3, 4,
@@ -183,6 +186,7 @@ public class ShingleAnalyzerWrapperTest
new int[] { 0, 0, 7, 7, 14, 14, 19 },
new int[] { 18, 27, 27, 32, 32, 41, 41 },
new int[] { 1, 0, 1, 0, 1, 0, 1 });
+ analyzer.close();
}
public void testNonDefaultMinAndSameMaxShingleSize() throws Exception {
@@ -198,6 +202,7 @@ public class ShingleAnalyzerWrapperTest
new int[] { 0, 0, 7, 7, 14, 14, 19, 19, 28, 33 },
new int[] { 6, 18, 13, 27, 18, 32, 27, 41, 32, 41 },
new int[] { 1, 0, 1, 0, 1, 0, 1, 0, 1, 1 });
+ analyzer.close();
analyzer = new ShingleAnalyzerWrapper(
new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false), 3, 3,
@@ -210,6 +215,7 @@ public class ShingleAnalyzerWrapperTest
new int[] { 0, 7, 14, 19 },
new int[] { 18, 27, 32, 41 },
new int[] { 1, 1, 1, 1 });
+ analyzer.close();
}
public void testNoTokenSeparator() throws Exception {
@@ -227,6 +233,7 @@ public class ShingleAnalyzerWrapperTest
new int[] { 0, 0, 7, 7, 14, 14, 19 },
new int[] { 6, 13, 13, 18, 18, 27, 27 },
new int[] { 1, 0, 1, 0, 1, 0, 1 });
+ analyzer.close();
analyzer = new ShingleAnalyzerWrapper(
new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false),
@@ -241,6 +248,7 @@ public class ShingleAnalyzerWrapperTest
new int[] { 0, 7, 14 },
new int[] { 13, 18, 27 },
new int[] { 1, 1, 1 });
+ analyzer.close();
}
public void testNullTokenSeparator() throws Exception {
@@ -258,6 +266,7 @@ public class ShingleAnalyzerWrapperTest
new int[] { 0, 0, 7, 7, 14, 14, 19 },
new int[] { 6, 13, 13, 18, 18, 27, 27 },
new int[] { 1, 0, 1, 0, 1, 0, 1 });
+ analyzer.close();
analyzer = new ShingleAnalyzerWrapper(
new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false),
@@ -272,6 +281,7 @@ public class ShingleAnalyzerWrapperTest
new int[] { 0, 7, 14 },
new int[] { 13, 18, 27 },
new int[] { 1, 1, 1 });
+ analyzer.close();
}
public void testAltTokenSeparator() throws Exception {
@@ -289,6 +299,7 @@ public class ShingleAnalyzerWrapperTest
new int[] { 0, 0, 7, 7, 14, 14, 19 },
new int[] { 6, 13, 13, 18, 18, 27, 27 },
new int[] { 1, 0, 1, 0, 1, 0, 1 });
+ analyzer.close();
analyzer = new ShingleAnalyzerWrapper(
new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false),
@@ -303,6 +314,7 @@ public class ShingleAnalyzerWrapperTest
new int[] { 0, 7, 14 },
new int[] { 13, 18, 27 },
new int[] { 1, 1, 1 });
+ analyzer.close();
}
public void testAltFillerToken() throws Exception {
@@ -329,7 +341,17 @@ public class ShingleAnalyzerWrapperTest
new int[] { 0, 0, 7, 7, 19, 19 },
new int[] { 6, 13, 13, 19, 27, 27 },
new int[] { 1, 0, 1, 0, 1, 1 });
+ analyzer.close();
+ delegate = new Analyzer() {
+ @Override
+ protected TokenStreamComponents createComponents(String fieldName) {
+ CharArraySet stopSet = StopFilter.makeStopSet("into");
+ Tokenizer tokenizer = new MockTokenizer(MockTokenizer.WHITESPACE, false);
+ TokenFilter filter = new StopFilter(tokenizer, stopSet);
+ return new TokenStreamComponents(tokenizer, filter);
+ }
+ };
analyzer = new ShingleAnalyzerWrapper(
delegate,
ShingleFilter.DEFAULT_MIN_SHINGLE_SIZE,
@@ -341,7 +363,17 @@ public class ShingleAnalyzerWrapperTest
new int[] { 0, 7, 19 },
new int[] { 13, 19, 27 },
new int[] { 1, 1, 1 });
+ analyzer.close();
+ delegate = new Analyzer() {
+ @Override
+ protected TokenStreamComponents createComponents(String fieldName) {
+ CharArraySet stopSet = StopFilter.makeStopSet("into");
+ Tokenizer tokenizer = new MockTokenizer(MockTokenizer.WHITESPACE, false);
+ TokenFilter filter = new StopFilter(tokenizer, stopSet);
+ return new TokenStreamComponents(tokenizer, filter);
+ }
+ };
analyzer = new ShingleAnalyzerWrapper(
delegate,
ShingleFilter.DEFAULT_MIN_SHINGLE_SIZE,
@@ -353,6 +385,7 @@ public class ShingleAnalyzerWrapperTest
new int[] { 0, 7, 19 },
new int[] { 13, 19, 27 },
new int[] { 1, 1, 1 });
+ analyzer.close();
}
public void testOutputUnigramsIfNoShinglesSingleToken() throws Exception {
@@ -367,5 +400,6 @@ public class ShingleAnalyzerWrapperTest
new int[] { 0 },
new int[] { 6 },
new int[] { 1 });
+ analyzer.close();
}
}
Modified: lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/shingle/ShingleFilterTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/shingle/ShingleFilterTest.java?rev=1664404&r1=1664403&r2=1664404&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/shingle/ShingleFilterTest.java (original)
+++ lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/shingle/ShingleFilterTest.java Thu Mar 5 16:45:02 2015
@@ -1113,6 +1113,7 @@ public class ShingleFilterTest extends B
}
};
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
+ a.close();
}
/** blast some random large strings through the analyzer */
@@ -1126,6 +1127,7 @@ public class ShingleFilterTest extends B
}
};
checkRandomData(random, a, 100*RANDOM_MULTIPLIER, 8192);
+ a.close();
}
public void testEmptyTerm() throws IOException {
@@ -1137,6 +1139,7 @@ public class ShingleFilterTest extends B
}
};
checkOneTerm(a, "", "");
+ a.close();
}
public void testTrailingHole1() throws IOException {
Modified: lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/sinks/TestTeeSinkTokenFilter.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/sinks/TestTeeSinkTokenFilter.java?rev=1664404&r1=1664403&r2=1664404&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/sinks/TestTeeSinkTokenFilter.java (original)
+++ lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/sinks/TestTeeSinkTokenFilter.java Thu Mar 5 16:45:02 2015
@@ -123,6 +123,7 @@ public class TestTeeSinkTokenFilter exte
assertEquals(DocIdSetIterator.NO_MORE_DOCS, positions.nextDoc());
r.close();
dir.close();
+ analyzer.close();
}
public void testGeneral() throws IOException {
Modified: lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/snowball/TestSnowball.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/snowball/TestSnowball.java?rev=1664404&r1=1664403&r2=1664404&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/snowball/TestSnowball.java (original)
+++ lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/snowball/TestSnowball.java Thu Mar 5 16:45:02 2015
@@ -48,6 +48,7 @@ public class TestSnowball extends BaseTo
assertAnalyzesTo(a, "he abhorred accents",
new String[]{"he", "abhor", "accent"});
+ a.close();
}
public void testFilterTokens() throws Exception {
@@ -113,6 +114,7 @@ public class TestSnowball extends BaseTo
}
};
checkOneTerm(a, "", "");
+ a.close();
}
}
@@ -131,5 +133,6 @@ public class TestSnowball extends BaseTo
}
};
checkRandomData(random(), a, 100*RANDOM_MULTIPLIER);
+ a.close();
}
}
\ No newline at end of file
Modified: lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/snowball/TestSnowballVocab.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/snowball/TestSnowballVocab.java?rev=1664404&r1=1664403&r2=1664404&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/snowball/TestSnowballVocab.java (original)
+++ lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/snowball/TestSnowballVocab.java Thu Mar 5 16:45:02 2015
@@ -18,7 +18,6 @@ package org.apache.lucene.analysis.snowb
*/
import java.io.IOException;
-import java.io.Reader;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.Tokenizer;
@@ -80,5 +79,6 @@ public class TestSnowballVocab extends L
assertVocabulary(a, getDataPath("TestSnowballVocabData.zip"),
dataDirectory + "/voc.txt", dataDirectory + "/output.txt");
+ a.close();
}
}
Modified: lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/sr/TestSerbianNormalizationFilter.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/sr/TestSerbianNormalizationFilter.java?rev=1664404&r1=1664403&r2=1664404&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/sr/TestSerbianNormalizationFilter.java (original)
+++ lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/sr/TestSerbianNormalizationFilter.java Thu Mar 5 16:45:02 2015
@@ -30,14 +30,26 @@ import org.apache.lucene.analysis.core.K
* Tests {@link SerbianNormalizationFilter}
*/
public class TestSerbianNormalizationFilter extends BaseTokenStreamTestCase {
- private Analyzer analyzer = new Analyzer() {
- @Override
- protected TokenStreamComponents createComponents(String field) {
- final Tokenizer tokenizer = new MockTokenizer(MockTokenizer.WHITESPACE, false);
- final TokenStream stream = new SerbianNormalizationFilter(tokenizer);
- return new TokenStreamComponents(tokenizer, stream);
- }
- };
+ private Analyzer analyzer;
+
+ @Override
+ public void setUp() throws Exception {
+ super.setUp();
+ analyzer = new Analyzer() {
+ @Override
+ protected TokenStreamComponents createComponents(String fieldName) {
+ final Tokenizer tokenizer = new MockTokenizer(MockTokenizer.WHITESPACE, false);
+ final TokenStream stream = new SerbianNormalizationFilter(tokenizer);
+ return new TokenStreamComponents(tokenizer, stream);
+ }
+ };
+ }
+
+ @Override
+ public void tearDown() throws Exception {
+ analyzer.close();
+ super.tearDown();
+ }
/**
* Tests Cyrillic text.
@@ -67,5 +79,6 @@ public class TestSerbianNormalizationFil
}
};
checkOneTerm(a, "", "");
+ a.close();
}
}
Modified: lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/standard/TestClassicAnalyzer.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/standard/TestClassicAnalyzer.java?rev=1664404&r1=1664403&r2=1664404&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/standard/TestClassicAnalyzer.java (original)
+++ lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/standard/TestClassicAnalyzer.java Thu Mar 5 16:45:02 2015
@@ -35,17 +35,29 @@ import org.apache.lucene.util.BytesRef;
import java.io.IOException;
import java.util.Arrays;
-import java.util.Random;
/** tests for classicanalyzer */
public class TestClassicAnalyzer extends BaseTokenStreamTestCase {
- private Analyzer a = new ClassicAnalyzer();
+ private Analyzer a;
+
+ @Override
+ public void setUp() throws Exception {
+ super.setUp();
+ a = new ClassicAnalyzer();
+ }
+
+ @Override
+ public void tearDown() throws Exception {
+ a.close();
+ super.tearDown();
+ }
public void testMaxTermLength() throws Exception {
ClassicAnalyzer sa = new ClassicAnalyzer();
sa.setMaxTokenLength(5);
assertAnalyzesTo(sa, "ab cd toolong xy z", new String[]{"ab", "cd", "xy", "z"});
+ sa.close();
}
public void testMaxTermLength2() throws Exception {
@@ -54,6 +66,7 @@ public class TestClassicAnalyzer extends
sa.setMaxTokenLength(5);
assertAnalyzesTo(sa, "ab cd toolong xy z", new String[]{"ab", "cd", "xy", "z"}, new int[]{1, 1, 2, 1});
+ sa.close();
}
public void testMaxTermLength3() throws Exception {
@@ -115,6 +128,7 @@ public class TestClassicAnalyzer extends
try {
ClassicAnalyzer analyzer = new ClassicAnalyzer();
assertAnalyzesTo(analyzer, "www.nutch.org.", new String[]{ "www.nutch.org" }, new String[] { "<HOST>" });
+ analyzer.close();
} catch (NullPointerException e) {
fail("Should not throw an NPE and it did");
}
@@ -137,8 +151,10 @@ public class TestClassicAnalyzer extends
// 2.4 should not show the bug. But, alas, it's also obsolete,
// so we check latest released (Robert's gonna break this on 4.0 soon :) )
+ a2.close();
a2 = new ClassicAnalyzer();
assertAnalyzesTo(a2, "www.nutch.org.", new String[]{ "www.nutch.org" }, new String[] { "<HOST>" });
+ a2.close();
}
public void testEMailAddresses() throws Exception {
@@ -246,6 +262,7 @@ public class TestClassicAnalyzer extends
public void testJava14BWCompatibility() throws Exception {
ClassicAnalyzer sa = new ClassicAnalyzer();
assertAnalyzesTo(sa, "test\u02C6test", new String[] { "test", "test" });
+ sa.close();
}
/**
@@ -253,7 +270,8 @@ public class TestClassicAnalyzer extends
*/
public void testWickedLongTerm() throws IOException {
RAMDirectory dir = new RAMDirectory();
- IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(new ClassicAnalyzer()));
+ Analyzer analyzer = new ClassicAnalyzer();
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(analyzer));
char[] chars = new char[IndexWriter.MAX_TERM_LENGTH];
Arrays.fill(chars, 'x');
@@ -309,16 +327,21 @@ public class TestClassicAnalyzer extends
reader.close();
dir.close();
+ analyzer.close();
+ sa.close();
}
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
- checkRandomData(random(), new ClassicAnalyzer(), 1000*RANDOM_MULTIPLIER);
+ Analyzer analyzer = new ClassicAnalyzer();
+ checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER);
+ analyzer.close();
}
/** blast some random large strings through the analyzer */
public void testRandomHugeStrings() throws Exception {
- Random random = random();
- checkRandomData(random, new ClassicAnalyzer(), 100*RANDOM_MULTIPLIER, 8192);
+ Analyzer analyzer = new ClassicAnalyzer();
+ checkRandomData(random(), analyzer, 100*RANDOM_MULTIPLIER, 8192);
+ analyzer.close();
}
}
Modified: lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/standard/TestStandardAnalyzer.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/standard/TestStandardAnalyzer.java?rev=1664404&r1=1664403&r2=1664404&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/standard/TestStandardAnalyzer.java (original)
+++ lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/standard/TestStandardAnalyzer.java Thu Mar 5 16:45:02 2015
@@ -114,14 +114,25 @@ public class TestStandardAnalyzer extend
BaseTokenStreamTestCase.assertTokenStreamContents(tokenizer, new String[] { "testing", "1234" });
}
- private Analyzer a = new Analyzer() {
- @Override
- protected TokenStreamComponents createComponents(String fieldName) {
-
- Tokenizer tokenizer = new StandardTokenizer(newAttributeFactory());
- return new TokenStreamComponents(tokenizer);
- }
- };
+ private Analyzer a;
+
+ @Override
+ public void setUp() throws Exception {
+ super.setUp();
+ a = new Analyzer() {
+ @Override
+ protected TokenStreamComponents createComponents(String fieldName) {
+ Tokenizer tokenizer = new StandardTokenizer(newAttributeFactory());
+ return new TokenStreamComponents(tokenizer);
+ }
+ };
+ }
+
+ @Override
+ public void tearDown() throws Exception {
+ a.close();
+ super.tearDown();
+ }
public void testArmenian() throws Exception {
BaseTokenStreamTestCase.assertAnalyzesTo(a, "ÕÕ«ÖÕ«ÕºÕ¥Õ¤Õ«Õ¡ÕµÕ« 13 Õ´Õ«Õ¬Õ«Õ¸Õ¶ Õ°Õ¸Õ¤Õ¾Õ¡Õ®Õ¶Õ¥ÖÕ¨ (4,600` Õ°Õ¡ÕµÕ¥ÖÕ¥Õ¶ Õ¾Õ«ÖÕ«ÕºÕ¥Õ¤Õ«Õ¡ÕµÕ¸ÖÕ´) Õ£ÖÕ¾Õ¥Õ¬ Õ¥Õ¶ Õ¯Õ¡Õ´Õ¡Õ¾Õ¸ÖÕ¶Õ¥ÖÕ« Õ¯Õ¸Õ²Õ´Õ«Ö Õ¸Ö Õ°Õ¡Õ´Õ¡ÖÕµÕ¡ Õ¢Õ¸Õ¬Õ¸Ö Õ°Õ¸Õ¤Õ¾Õ¡Õ®Õ¶Õ¥ÖÕ¨ Õ¯Õ¡ÖÕ¸Õ² Õ§ ÕÕ´Õ¢Õ¡Õ£ÖÕ¥Õ¬ ÖÕ¡Õ¶Õ¯Õ¡Ö Õ´Õ¡ÖÕ¤ Õ¸Õ¾ Õ¯Õ¡ÖÕ¸Õ² Õ§ Õ¢Õ¡ÖÕ¥Õ¬ ÕÕ«ÖÕ«ÕºÕ¥Õ¤Õ«Õ¡ÕµÕ« Õ¯Õ¡ÕµÖÕ¨Ö",
@@ -350,27 +361,30 @@ public class TestStandardAnalyzer extend
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
- checkRandomData(random(), new StandardAnalyzer(), 1000*RANDOM_MULTIPLIER);
+ Analyzer analyzer = new StandardAnalyzer();
+ checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER);
+ analyzer.close();
}
/** blast some random large strings through the analyzer */
public void testRandomHugeStrings() throws Exception {
- Random random = random();
- checkRandomData(random, new StandardAnalyzer(), 100*RANDOM_MULTIPLIER, 8192);
+ Analyzer analyzer = new StandardAnalyzer();
+ checkRandomData(random(), analyzer, 100*RANDOM_MULTIPLIER, 8192);
+ analyzer.close();
}
// Adds random graph after:
public void testRandomHugeStringsGraphAfter() throws Exception {
Random random = random();
- checkRandomData(random,
- new Analyzer() {
- @Override
- protected TokenStreamComponents createComponents(String fieldName) {
- Tokenizer tokenizer = new StandardTokenizer(newAttributeFactory());
- TokenStream tokenStream = new MockGraphTokenFilter(random(), tokenizer);
- return new TokenStreamComponents(tokenizer, tokenStream);
- }
- },
- 100*RANDOM_MULTIPLIER, 8192);
+ Analyzer analyzer = new Analyzer() {
+ @Override
+ protected TokenStreamComponents createComponents(String fieldName) {
+ Tokenizer tokenizer = new StandardTokenizer(newAttributeFactory());
+ TokenStream tokenStream = new MockGraphTokenFilter(random(), tokenizer);
+ return new TokenStreamComponents(tokenizer, tokenStream);
+ }
+ };
+ checkRandomData(random, analyzer, 100*RANDOM_MULTIPLIER, 8192);
+ analyzer.close();
}
}
Modified: lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/standard/TestUAX29URLEmailAnalyzer.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/standard/TestUAX29URLEmailAnalyzer.java?rev=1664404&r1=1664403&r2=1664404&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/standard/TestUAX29URLEmailAnalyzer.java (original)
+++ lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/standard/TestUAX29URLEmailAnalyzer.java Thu Mar 5 16:45:02 2015
@@ -26,7 +26,19 @@ import java.util.Arrays;
public class TestUAX29URLEmailAnalyzer extends BaseTokenStreamTestCase {
- private Analyzer a = new UAX29URLEmailAnalyzer();
+ private Analyzer a;
+
+ @Override
+ public void setUp() throws Exception {
+ super.setUp();
+ a = new UAX29URLEmailAnalyzer();
+ }
+
+ @Override
+ public void tearDown() throws Exception {
+ a.close();
+ super.tearDown();
+ }
public void testHugeDoc() throws IOException {
StringBuilder sb = new StringBuilder();
@@ -343,6 +355,6 @@ public class TestUAX29URLEmailAnalyzer e
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
- checkRandomData(random(), new UAX29URLEmailAnalyzer(), 1000*RANDOM_MULTIPLIER);
+ checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
}
}
Modified: lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/standard/TestUAX29URLEmailTokenizer.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/standard/TestUAX29URLEmailTokenizer.java?rev=1664404&r1=1664403&r2=1664404&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/standard/TestUAX29URLEmailTokenizer.java (original)
+++ lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/standard/TestUAX29URLEmailTokenizer.java Thu Mar 5 16:45:02 2015
@@ -8,6 +8,7 @@ import org.apache.lucene.analysis.Tokeni
import org.apache.lucene.analysis.standard.UAX29URLEmailTokenizer;
import org.apache.lucene.analysis.standard.WordBreakTestUnicode_6_3_0;
import org.apache.lucene.analysis.tokenattributes.TypeAttribute;
+import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.TestUtil;
import java.io.BufferedReader;
@@ -88,15 +89,42 @@ public class TestUAX29URLEmailTokenizer
BaseTokenStreamTestCase.assertTokenStreamContents(tokenizer, new String[] { "testing", "1234" });
}
- private Analyzer a = new Analyzer() {
- @Override
- protected TokenStreamComponents createComponents(String fieldName) {
-
- Tokenizer tokenizer = new UAX29URLEmailTokenizer(newAttributeFactory());
- return new TokenStreamComponents(tokenizer);
- }
- };
-
+ private Analyzer a, urlAnalyzer, emailAnalyzer;
+
+ @Override
+ public void setUp() throws Exception {
+ super.setUp();
+ a = new Analyzer() {
+ @Override
+ protected TokenStreamComponents createComponents(String fieldName) {
+ Tokenizer tokenizer = new UAX29URLEmailTokenizer(newAttributeFactory());
+ return new TokenStreamComponents(tokenizer);
+ }
+ };
+ urlAnalyzer = new Analyzer() {
+ @Override
+ protected TokenStreamComponents createComponents(String fieldName) {
+ UAX29URLEmailTokenizer tokenizer = new UAX29URLEmailTokenizer(newAttributeFactory());
+ tokenizer.setMaxTokenLength(Integer.MAX_VALUE); // Tokenize arbitrary length URLs
+ TokenFilter filter = new URLFilter(tokenizer);
+ return new TokenStreamComponents(tokenizer, filter);
+ }
+ };
+ emailAnalyzer = new Analyzer() {
+ @Override
+ protected TokenStreamComponents createComponents(String fieldName) {
+ UAX29URLEmailTokenizer tokenizer = new UAX29URLEmailTokenizer(newAttributeFactory());
+ TokenFilter filter = new EmailFilter(tokenizer);
+ return new TokenStreamComponents(tokenizer, filter);
+ }
+ };
+ }
+
+ @Override
+ public void tearDown() throws Exception {
+ IOUtils.close(a, urlAnalyzer, emailAnalyzer);
+ super.tearDown();
+ }
/** Passes through tokens with type "<URL>" and blocks all other types. */
private class URLFilter extends TokenFilter {
@@ -134,27 +162,7 @@ public class TestUAX29URLEmailTokenizer
}
return isTokenAvailable;
}
- }
-
- private Analyzer urlAnalyzer = new Analyzer() {
- @Override
- protected TokenStreamComponents createComponents(String fieldName) {
- UAX29URLEmailTokenizer tokenizer = new UAX29URLEmailTokenizer(newAttributeFactory());
- tokenizer.setMaxTokenLength(Integer.MAX_VALUE); // Tokenize arbitrary length URLs
- TokenFilter filter = new URLFilter(tokenizer);
- return new TokenStreamComponents(tokenizer, filter);
- }
- };
-
- private Analyzer emailAnalyzer = new Analyzer() {
- @Override
- protected TokenStreamComponents createComponents(String fieldName) {
- UAX29URLEmailTokenizer tokenizer = new UAX29URLEmailTokenizer(newAttributeFactory());
- TokenFilter filter = new EmailFilter(tokenizer);
- return new TokenStreamComponents(tokenizer, filter);
- }
- };
-
+ }
public void testArmenian() throws Exception {
BaseTokenStreamTestCase.assertAnalyzesTo(a, "ÕÕ«ÖÕ«ÕºÕ¥Õ¤Õ«Õ¡ÕµÕ« 13 Õ´Õ«Õ¬Õ«Õ¸Õ¶ Õ°Õ¸Õ¤Õ¾Õ¡Õ®Õ¶Õ¥ÖÕ¨ (4,600` Õ°Õ¡ÕµÕ¥ÖÕ¥Õ¶ Õ¾Õ«ÖÕ«ÕºÕ¥Õ¤Õ«Õ¡ÕµÕ¸ÖÕ´) Õ£ÖÕ¾Õ¥Õ¬ Õ¥Õ¶ Õ¯Õ¡Õ´Õ¡Õ¾Õ¸ÖÕ¶Õ¥ÖÕ« Õ¯Õ¸Õ²Õ´Õ«Ö Õ¸Ö Õ°Õ¡Õ´Õ¡ÖÕµÕ¡ Õ¢Õ¸Õ¬Õ¸Ö Õ°Õ¸Õ¤Õ¾Õ¡Õ®Õ¶Õ¥ÖÕ¨ Õ¯Õ¡ÖÕ¸Õ² Õ§ ÕÕ´Õ¢Õ¡Õ£ÖÕ¥Õ¬ ÖÕ¡Õ¶Õ¯Õ¡Ö Õ´Õ¡ÖÕ¤ Õ¸Õ¾ Õ¯Õ¡ÖÕ¸Õ² Õ§ Õ¢Õ¡ÖÕ¥Õ¬ ÕÕ«ÖÕ«ÕºÕ¥Õ¤Õ«Õ¡ÕµÕ« Õ¯Õ¡ÕµÖÕ¨Ö",
Modified: lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/sv/TestSwedishAnalyzer.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/sv/TestSwedishAnalyzer.java?rev=1664404&r1=1664403&r2=1664404&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/sv/TestSwedishAnalyzer.java (original)
+++ lucene/dev/trunk/lucene/analysis/common/src/test/org/apache/lucene/analysis/sv/TestSwedishAnalyzer.java Thu Mar 5 16:45:02 2015
@@ -27,7 +27,7 @@ public class TestSwedishAnalyzer extends
/** This test fails with NPE when the
* stopwords file is missing in classpath */
public void testResourcesAvailable() {
- new SwedishAnalyzer();
+ new SwedishAnalyzer().close();
}
/** test stopwords and stemming */
@@ -38,6 +38,7 @@ public class TestSwedishAnalyzer extends
checkOneTerm(a, "jaktkarlens", "jaktkarl");
// stopword
assertAnalyzesTo(a, "och", new String[] {});
+ a.close();
}
/** test use of exclusion set */
@@ -47,10 +48,13 @@ public class TestSwedishAnalyzer extends
SwedishAnalyzer.getDefaultStopSet(), exclusionSet);
checkOneTerm(a, "jaktkarlarne", "jaktkarlarne");
checkOneTerm(a, "jaktkarlens", "jaktkarl");
+ a.close();
}
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
- checkRandomData(random(), new SwedishAnalyzer(), 1000*RANDOM_MULTIPLIER);
+ Analyzer analyzer = new SwedishAnalyzer();
+ checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER);
+ analyzer.close();
}
}