You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by rm...@apache.org on 2011/05/09 17:24:23 UTC
svn commit: r1101062 [3/21] - in /lucene/dev/branches/bulkpostings: ./
dev-tools/ dev-tools/eclipse/ dev-tools/idea/.idea/
dev-tools/idea/lucene/contrib/ant/ dev-tools/idea/lucene/contrib/db/bdb-je/
dev-tools/idea/lucene/contrib/db/bdb/ dev-tools/idea/...
Modified: lucene/dev/branches/bulkpostings/lucene/contrib/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/contrib/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterTest.java?rev=1101062&r1=1101061&r2=1101062&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/contrib/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterTest.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/contrib/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterTest.java Mon May 9 15:24:04 2011
@@ -90,7 +90,7 @@ public class HighlighterTest extends Bas
Directory ramDir;
public IndexSearcher searcher = null;
int numHighlights = 0;
- final Analyzer analyzer = new MockAnalyzer(MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true);
+ final Analyzer analyzer = new MockAnalyzer(random, MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true);
TopDocs hits;
String[] texts = {
@@ -101,7 +101,7 @@ public class HighlighterTest extends Bas
"wordx wordy wordz wordx wordy wordx worda wordb wordy wordc", "y z x y z a b", "lets is a the lets is a the lets is a the lets" };
public void testQueryScorerHits() throws Exception {
- Analyzer analyzer = new MockAnalyzer(MockTokenizer.SIMPLE, true);
+ Analyzer analyzer = new MockAnalyzer(random, MockTokenizer.SIMPLE, true);
QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, FIELD_NAME, analyzer);
query = qp.parse("\"very long\"");
searcher = new IndexSearcher(ramDir, true);
@@ -133,7 +133,7 @@ public class HighlighterTest extends Bas
String s1 = "I call our world Flatland, not because we call it so,";
- QueryParser parser = new QueryParser(TEST_VERSION_CURRENT, FIELD_NAME, new MockAnalyzer(MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true));
+ QueryParser parser = new QueryParser(TEST_VERSION_CURRENT, FIELD_NAME, new MockAnalyzer(random, MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true));
// Verify that a query against the default field results in text being
// highlighted
@@ -165,7 +165,7 @@ public class HighlighterTest extends Bas
*/
private static String highlightField(Query query, String fieldName, String text)
throws IOException, InvalidTokenOffsetsException {
- TokenStream tokenStream = new MockAnalyzer(MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true).tokenStream(fieldName, new StringReader(text));
+ TokenStream tokenStream = new MockAnalyzer(random, MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true).tokenStream(fieldName, new StringReader(text));
// Assuming "<B>", "</B>" used to highlight
SimpleHTMLFormatter formatter = new SimpleHTMLFormatter();
QueryScorer scorer = new QueryScorer(query, fieldName, FIELD_NAME);
@@ -210,7 +210,7 @@ public class HighlighterTest extends Bas
String f2c = f2 + ":";
String q = "(" + f1c + ph1 + " OR " + f2c + ph1 + ") AND (" + f1c + ph2
+ " OR " + f2c + ph2 + ")";
- Analyzer analyzer = new MockAnalyzer(MockTokenizer.WHITESPACE, false);
+ Analyzer analyzer = new MockAnalyzer(random, MockTokenizer.WHITESPACE, false);
QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, f1, analyzer);
Query query = qp.parse(q);
@@ -1134,13 +1134,13 @@ public class HighlighterTest extends Bas
sb.append("stoppedtoken");
}
SimpleHTMLFormatter fm = new SimpleHTMLFormatter();
- Highlighter hg = getHighlighter(query, "data", new MockAnalyzer(MockTokenizer.SIMPLE, true, stopWords, true).tokenStream(
+ Highlighter hg = getHighlighter(query, "data", new MockAnalyzer(random, MockTokenizer.SIMPLE, true, stopWords, true).tokenStream(
"data", new StringReader(sb.toString())), fm);// new Highlighter(fm,
// new
// QueryTermScorer(query));
hg.setTextFragmenter(new NullFragmenter());
hg.setMaxDocCharsToAnalyze(100);
- match = hg.getBestFragment(new MockAnalyzer(MockTokenizer.SIMPLE, true, stopWords, true), "data", sb.toString());
+ match = hg.getBestFragment(new MockAnalyzer(random, MockTokenizer.SIMPLE, true, stopWords, true), "data", sb.toString());
assertTrue("Matched text should be no more than 100 chars in length ", match.length() < hg
.getMaxDocCharsToAnalyze());
@@ -1151,7 +1151,7 @@ public class HighlighterTest extends Bas
// + whitespace)
sb.append(" ");
sb.append(goodWord);
- match = hg.getBestFragment(new MockAnalyzer(MockTokenizer.SIMPLE, true, stopWords, true), "data", sb.toString());
+ match = hg.getBestFragment(new MockAnalyzer(random, MockTokenizer.SIMPLE, true, stopWords, true), "data", sb.toString());
assertTrue("Matched text should be no more than 100 chars in length ", match.length() < hg
.getMaxDocCharsToAnalyze());
}
@@ -1170,10 +1170,10 @@ public class HighlighterTest extends Bas
String text = "this is a text with searchterm in it";
SimpleHTMLFormatter fm = new SimpleHTMLFormatter();
- Highlighter hg = getHighlighter(query, "text", new MockAnalyzer(MockTokenizer.SIMPLE, true, stopWords, true).tokenStream("text", new StringReader(text)), fm);
+ Highlighter hg = getHighlighter(query, "text", new MockAnalyzer(random, MockTokenizer.SIMPLE, true, stopWords, true).tokenStream("text", new StringReader(text)), fm);
hg.setTextFragmenter(new NullFragmenter());
hg.setMaxDocCharsToAnalyze(36);
- String match = hg.getBestFragment(new MockAnalyzer(MockTokenizer.SIMPLE, true, stopWords, true), "text", text);
+ String match = hg.getBestFragment(new MockAnalyzer(random, MockTokenizer.SIMPLE, true, stopWords, true), "text", text);
assertTrue(
"Matched text should contain remainder of text after highlighted query ",
match.endsWith("in it"));
@@ -1191,7 +1191,7 @@ public class HighlighterTest extends Bas
// test to show how rewritten query can still be used
if (searcher != null) searcher.close();
searcher = new IndexSearcher(ramDir, true);
- Analyzer analyzer = new MockAnalyzer(MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true);
+ Analyzer analyzer = new MockAnalyzer(random, MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true);
QueryParser parser = new QueryParser(TEST_VERSION_CURRENT, FIELD_NAME, analyzer);
Query query = parser.parse("JF? or Kenned*");
@@ -1446,64 +1446,64 @@ public class HighlighterTest extends Bas
Highlighter highlighter;
String result;
- query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(MockTokenizer.WHITESPACE, false)).parse("foo");
+ query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("foo");
highlighter = getHighlighter(query, "text", getTS2(), HighlighterTest.this);
result = highlighter.getBestFragments(getTS2(), s, 3, "...");
assertEquals("Hi-Speed10 <B>foo</B>", result);
- query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(MockTokenizer.WHITESPACE, false)).parse("10");
+ query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("10");
highlighter = getHighlighter(query, "text", getTS2(), HighlighterTest.this);
result = highlighter.getBestFragments(getTS2(), s, 3, "...");
assertEquals("Hi-Speed<B>10</B> foo", result);
- query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(MockTokenizer.WHITESPACE, false)).parse("hi");
+ query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("hi");
highlighter = getHighlighter(query, "text", getTS2(), HighlighterTest.this);
result = highlighter.getBestFragments(getTS2(), s, 3, "...");
assertEquals("<B>Hi</B>-Speed10 foo", result);
- query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(MockTokenizer.WHITESPACE, false)).parse("speed");
+ query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("speed");
highlighter = getHighlighter(query, "text", getTS2(), HighlighterTest.this);
result = highlighter.getBestFragments(getTS2(), s, 3, "...");
assertEquals("Hi-<B>Speed</B>10 foo", result);
- query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(MockTokenizer.WHITESPACE, false)).parse("hispeed");
+ query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("hispeed");
highlighter = getHighlighter(query, "text", getTS2(), HighlighterTest.this);
result = highlighter.getBestFragments(getTS2(), s, 3, "...");
assertEquals("<B>Hi-Speed</B>10 foo", result);
- query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(MockTokenizer.WHITESPACE, false)).parse("hi speed");
+ query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("hi speed");
highlighter = getHighlighter(query, "text", getTS2(), HighlighterTest.this);
result = highlighter.getBestFragments(getTS2(), s, 3, "...");
assertEquals("<B>Hi-Speed</B>10 foo", result);
// ///////////////// same tests, just put the bigger overlapping token
// first
- query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(MockTokenizer.WHITESPACE, false)).parse("foo");
+ query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("foo");
highlighter = getHighlighter(query, "text", getTS2a(), HighlighterTest.this);
result = highlighter.getBestFragments(getTS2a(), s, 3, "...");
assertEquals("Hi-Speed10 <B>foo</B>", result);
- query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(MockTokenizer.WHITESPACE, false)).parse("10");
+ query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("10");
highlighter = getHighlighter(query, "text", getTS2a(), HighlighterTest.this);
result = highlighter.getBestFragments(getTS2a(), s, 3, "...");
assertEquals("Hi-Speed<B>10</B> foo", result);
- query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(MockTokenizer.WHITESPACE, false)).parse("hi");
+ query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("hi");
highlighter = getHighlighter(query, "text", getTS2a(), HighlighterTest.this);
result = highlighter.getBestFragments(getTS2a(), s, 3, "...");
assertEquals("<B>Hi</B>-Speed10 foo", result);
- query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(MockTokenizer.WHITESPACE, false)).parse("speed");
+ query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("speed");
highlighter = getHighlighter(query, "text", getTS2a(), HighlighterTest.this);
result = highlighter.getBestFragments(getTS2a(), s, 3, "...");
assertEquals("Hi-<B>Speed</B>10 foo", result);
- query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(MockTokenizer.WHITESPACE, false)).parse("hispeed");
+ query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("hispeed");
highlighter = getHighlighter(query, "text", getTS2a(), HighlighterTest.this);
result = highlighter.getBestFragments(getTS2a(), s, 3, "...");
assertEquals("<B>Hi-Speed</B>10 foo", result);
- query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(MockTokenizer.WHITESPACE, false)).parse("hi speed");
+ query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("hi speed");
highlighter = getHighlighter(query, "text", getTS2a(), HighlighterTest.this);
result = highlighter.getBestFragments(getTS2a(), s, 3, "...");
assertEquals("<B>Hi-Speed</B>10 foo", result);
@@ -1514,7 +1514,7 @@ public class HighlighterTest extends Bas
}
private Directory dir;
- private Analyzer a = new MockAnalyzer(MockTokenizer.WHITESPACE, false);
+ private Analyzer a = new MockAnalyzer(random, MockTokenizer.WHITESPACE, false);
public void testWeightedTermsWithDeletes() throws IOException, ParseException, InvalidTokenOffsetsException {
makeIndex();
@@ -1529,7 +1529,7 @@ public class HighlighterTest extends Bas
}
private void makeIndex() throws IOException {
- IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)));
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)));
writer.addDocument( doc( "t_text1", "random words for highlighting tests del" ) );
writer.addDocument( doc( "t_text1", "more random words for second field del" ) );
writer.addDocument( doc( "t_text1", "random words for highlighting tests del" ) );
@@ -1539,7 +1539,7 @@ public class HighlighterTest extends Bas
}
private void deleteDocument() throws IOException {
- IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)).setOpenMode(OpenMode.APPEND));
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).setOpenMode(OpenMode.APPEND));
writer.deleteDocuments( new Term( "t_text1", "del" ) );
// To see negative idf, keep comment the following line
//writer.optimize();
@@ -1644,7 +1644,7 @@ public class HighlighterTest extends Bas
dir = newDirectory();
ramDir = newDirectory();
IndexWriter writer = new IndexWriter(ramDir, newIndexWriterConfig(
- TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true)));
+ TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true)));
for (String text : texts) {
addDoc(writer, text);
}
Copied: lucene/dev/branches/bulkpostings/lucene/contrib/highlighter/src/test/org/apache/lucene/search/highlight/OffsetLimitTokenFilterTest.java (from r1096609, lucene/dev/trunk/lucene/contrib/highlighter/src/test/org/apache/lucene/search/highlight/OffsetLimitTokenFilterTest.java)
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/contrib/highlighter/src/test/org/apache/lucene/search/highlight/OffsetLimitTokenFilterTest.java?p2=lucene/dev/branches/bulkpostings/lucene/contrib/highlighter/src/test/org/apache/lucene/search/highlight/OffsetLimitTokenFilterTest.java&p1=lucene/dev/trunk/lucene/contrib/highlighter/src/test/org/apache/lucene/search/highlight/OffsetLimitTokenFilterTest.java&r1=1096609&r2=1101062&rev=1101062&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/contrib/highlighter/src/test/org/apache/lucene/search/highlight/OffsetLimitTokenFilterTest.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/contrib/highlighter/src/test/org/apache/lucene/search/highlight/OffsetLimitTokenFilterTest.java Mon May 9 15:24:04 2011
@@ -1 +1,60 @@
-package org.apache.lucene.search.highlight;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.Reader;
import java.io.StringReader;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.BaseToken
StreamTestCase;
import org.apache.lucene.analysis.MockTokenizer;
import org.apache.lucene.analysis.TokenStream;
public class OffsetLimitTokenFilterTest extends BaseTokenStreamTestCase {
public void testFilter() throws Exception {
TokenStream stream = new MockTokenizer(new StringReader(
"short toolong evenmuchlongertext a ab toolong foo"),
MockTokenizer.WHITESPACE, false);
OffsetLimitTokenFilter filter = new OffsetLimitTokenFilter(stream, 10);
assertTokenStreamContents(filter, new String[] {"short", "toolong"});
stream = new MockTokenizer(new StringReader(
"short toolong evenmuchlongertext a ab toolong foo"),
MockTokenizer.WHITESPACE, false);
filter = new OffsetLimitTokenFilter(stream, 12);
assertTokenStreamContents(filter, new String[] {"short", "toolong"});
stream = new MockTokenizer(new StringReader(
"short toolong evenmuchlongertext a ab toolong foo"),
MockTokenizer.WHITESPACE, false);
filter = new OffsetLimitTokenFilter(stream, 30);
assertTokenStreamContents(filter, new String[] {"short", "toolong",
"evenmuchlongertext"});
checkOneTermReuse(new Analyzer() {
@Override
public TokenStream tokenStream(String fieldName, Reader reader) {
return new OffsetLimitTokenFilter(new MockTokenizer(reader,
MockTokenizer.WHITESPACE, false), 10);
}
}, "llenges", "llenges");
}
}
\ No newline at end of file
+package org.apache.lucene.search.highlight;
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.io.Reader;
+import java.io.StringReader;
+
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.analysis.BaseTokenStreamTestCase;
+import org.apache.lucene.analysis.MockTokenizer;
+import org.apache.lucene.analysis.TokenStream;
+
+public class OffsetLimitTokenFilterTest extends BaseTokenStreamTestCase {
+
+ public void testFilter() throws Exception {
+ TokenStream stream = new MockTokenizer(new StringReader(
+ "short toolong evenmuchlongertext a ab toolong foo"),
+ MockTokenizer.WHITESPACE, false);
+ OffsetLimitTokenFilter filter = new OffsetLimitTokenFilter(stream, 10);
+ assertTokenStreamContents(filter, new String[] {"short", "toolong"});
+
+ stream = new MockTokenizer(new StringReader(
+ "short toolong evenmuchlongertext a ab toolong foo"),
+ MockTokenizer.WHITESPACE, false);
+ filter = new OffsetLimitTokenFilter(stream, 12);
+ assertTokenStreamContents(filter, new String[] {"short", "toolong"});
+
+ stream = new MockTokenizer(new StringReader(
+ "short toolong evenmuchlongertext a ab toolong foo"),
+ MockTokenizer.WHITESPACE, false);
+ filter = new OffsetLimitTokenFilter(stream, 30);
+ assertTokenStreamContents(filter, new String[] {"short", "toolong",
+ "evenmuchlongertext"});
+
+
+ checkOneTermReuse(new Analyzer() {
+
+ @Override
+ public TokenStream tokenStream(String fieldName, Reader reader) {
+ return new OffsetLimitTokenFilter(new MockTokenizer(reader,
+ MockTokenizer.WHITESPACE, false), 10);
+ }
+ }, "llenges", "llenges");
+ }
+}
\ No newline at end of file
Modified: lucene/dev/branches/bulkpostings/lucene/contrib/highlighter/src/test/org/apache/lucene/search/vectorhighlight/AbstractTestCase.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/contrib/highlighter/src/test/org/apache/lucene/search/vectorhighlight/AbstractTestCase.java?rev=1101062&r1=1101061&r2=1101062&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/contrib/highlighter/src/test/org/apache/lucene/search/vectorhighlight/AbstractTestCase.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/contrib/highlighter/src/test/org/apache/lucene/search/vectorhighlight/AbstractTestCase.java Mon May 9 15:24:04 2011
@@ -87,9 +87,9 @@ public abstract class AbstractTestCase e
@Override
public void setUp() throws Exception {
super.setUp();
- analyzerW = new MockAnalyzer(MockTokenizer.WHITESPACE, false);
+ analyzerW = new MockAnalyzer(random, MockTokenizer.WHITESPACE, false);
analyzerB = new BigramAnalyzer();
- analyzerK = new MockAnalyzer(MockTokenizer.KEYWORD, false);
+ analyzerK = new MockAnalyzer(random, MockTokenizer.KEYWORD, false);
paW = new QueryParser(TEST_VERSION_CURRENT, F, analyzerW );
paB = new QueryParser(TEST_VERSION_CURRENT, F, analyzerB );
dir = newDirectory();
Modified: lucene/dev/branches/bulkpostings/lucene/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestEmptyIndex.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestEmptyIndex.java?rev=1101062&r1=1101061&r2=1101062&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestEmptyIndex.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestEmptyIndex.java Mon May 9 15:24:04 2011
@@ -59,7 +59,7 @@ public class TestEmptyIndex extends Luce
// make sure a Directory acts the same
Directory d = newDirectory();
- new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())).close();
+ new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))).close();
r = IndexReader.open(d, false);
testNorms(r);
r.close();
@@ -84,7 +84,7 @@ public class TestEmptyIndex extends Luce
// make sure a Directory acts the same
Directory d = newDirectory();
- new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())).close();
+ new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))).close();
r = IndexReader.open(d, false);
termsEnumTest(r);
r.close();
Modified: lucene/dev/branches/bulkpostings/lucene/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestIndicesEquals.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestIndicesEquals.java?rev=1101062&r1=1101061&r2=1101062&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestIndicesEquals.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestIndicesEquals.java Mon May 9 15:24:04 2011
@@ -21,6 +21,7 @@ import java.util.Arrays;
import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
+import java.util.Random;
import org.apache.lucene.analysis.Token;
import org.apache.lucene.analysis.TokenStream;
@@ -65,7 +66,7 @@ public class TestIndicesEquals extends L
// create dir data
IndexWriter indexWriter = new IndexWriter(dir, newIndexWriterConfig(
- TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newInOrderLogMergePolicy()));
+ TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
for (int i = 0; i < 20; i++) {
Document document = new Document();
@@ -88,10 +89,13 @@ public class TestIndicesEquals extends L
Directory dir = newDirectory();
InstantiatedIndex ii = new InstantiatedIndex();
-
+
+ // we need to pass the "same" random to both, so they surely index the same payload data.
+ long seed = random.nextLong();
+
// create dir data
IndexWriter indexWriter = new IndexWriter(dir, newIndexWriterConfig(
- TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newInOrderLogMergePolicy()));
+ TEST_VERSION_CURRENT, new MockAnalyzer(new Random(seed))).setMergePolicy(newLogMergePolicy()));
indexWriter.setInfoStream(VERBOSE ? System.out : null);
if (VERBOSE) {
System.out.println("TEST: make test index");
@@ -104,7 +108,7 @@ public class TestIndicesEquals extends L
indexWriter.close();
// test ii writer
- InstantiatedIndexWriter instantiatedIndexWriter = ii.indexWriterFactory(new MockAnalyzer(), true);
+ InstantiatedIndexWriter instantiatedIndexWriter = ii.indexWriterFactory(new MockAnalyzer(new Random(seed)), true);
for (int i = 0; i < 500; i++) {
Document document = new Document();
assembleDocument(document, i);
Modified: lucene/dev/branches/bulkpostings/lucene/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestRealTime.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestRealTime.java?rev=1101062&r1=1101061&r2=1101062&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestRealTime.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestRealTime.java Mon May 9 15:24:04 2011
@@ -36,7 +36,7 @@ public class TestRealTime extends Lucene
InstantiatedIndex index = new InstantiatedIndex();
InstantiatedIndexReader reader = new InstantiatedIndexReader(index);
- IndexSearcher searcher = newSearcher(reader);
+ IndexSearcher searcher = newSearcher(reader, false);
InstantiatedIndexWriter writer = new InstantiatedIndexWriter(index);
Document doc;
Modified: lucene/dev/branches/bulkpostings/lucene/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestUnoptimizedReaderOnConstructor.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestUnoptimizedReaderOnConstructor.java?rev=1101062&r1=1101061&r2=1101062&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestUnoptimizedReaderOnConstructor.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestUnoptimizedReaderOnConstructor.java Mon May 9 15:24:04 2011
@@ -34,17 +34,17 @@ public class TestUnoptimizedReaderOnCons
public void test() throws Exception {
Directory dir = newDirectory();
- IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+ IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
addDocument(iw, "Hello, world!");
addDocument(iw, "All work and no play makes jack a dull boy");
iw.close();
- iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
+ iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
addDocument(iw, "Hello, tellus!");
addDocument(iw, "All work and no play makes danny a dull boy");
iw.close();
- iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
+ iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
addDocument(iw, "Hello, earth!");
addDocument(iw, "All work and no play makes wendy a dull girl");
iw.close();
Modified: lucene/dev/branches/bulkpostings/lucene/contrib/memory/src/test/org/apache/lucene/index/memory/MemoryIndexTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/contrib/memory/src/test/org/apache/lucene/index/memory/MemoryIndexTest.java?rev=1101062&r1=1101061&r2=1101062&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/contrib/memory/src/test/org/apache/lucene/index/memory/MemoryIndexTest.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/contrib/memory/src/test/org/apache/lucene/index/memory/MemoryIndexTest.java Mon May 9 15:24:04 2011
@@ -177,9 +177,9 @@ public class MemoryIndexTest extends Bas
*/
private Analyzer randomAnalyzer() {
switch(random.nextInt(3)) {
- case 0: return new MockAnalyzer(MockTokenizer.SIMPLE, true);
- case 1: return new MockAnalyzer(MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true);
- default: return new MockAnalyzer(MockTokenizer.WHITESPACE, false);
+ case 0: return new MockAnalyzer(random, MockTokenizer.SIMPLE, true);
+ case 1: return new MockAnalyzer(random, MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true);
+ default: return new MockAnalyzer(random, MockTokenizer.WHITESPACE, false);
}
}
Modified: lucene/dev/branches/bulkpostings/lucene/contrib/misc/src/test/org/apache/lucene/index/TestFieldNormModifier.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/contrib/misc/src/test/org/apache/lucene/index/TestFieldNormModifier.java?rev=1101062&r1=1101061&r2=1101062&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/contrib/misc/src/test/org/apache/lucene/index/TestFieldNormModifier.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/contrib/misc/src/test/org/apache/lucene/index/TestFieldNormModifier.java Mon May 9 15:24:04 2011
@@ -61,7 +61,7 @@ public class TestFieldNormModifier exten
super.setUp();
store = newDirectory();
IndexWriter writer = new IndexWriter(store, newIndexWriterConfig(
- TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newInOrderLogMergePolicy()));
+ TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
for (int i = 0; i < NUM_DOCS; i++) {
Document d = new Document();
Modified: lucene/dev/branches/bulkpostings/lucene/contrib/misc/src/test/org/apache/lucene/index/TestIndexSplitter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/contrib/misc/src/test/org/apache/lucene/index/TestIndexSplitter.java?rev=1101062&r1=1101061&r2=1101062&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/contrib/misc/src/test/org/apache/lucene/index/TestIndexSplitter.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/contrib/misc/src/test/org/apache/lucene/index/TestIndexSplitter.java Mon May 9 15:24:04 2011
@@ -39,7 +39,7 @@ public class TestIndexSplitter extends L
mergePolicy.setNoCFSRatio(1);
IndexWriter iw = new IndexWriter(
fsDir,
- new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
+ new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
setOpenMode(OpenMode.CREATE).
setMergePolicy(mergePolicy)
);
Modified: lucene/dev/branches/bulkpostings/lucene/contrib/misc/src/test/org/apache/lucene/index/TestMultiPassIndexSplitter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/contrib/misc/src/test/org/apache/lucene/index/TestMultiPassIndexSplitter.java?rev=1101062&r1=1101061&r2=1101062&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/contrib/misc/src/test/org/apache/lucene/index/TestMultiPassIndexSplitter.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/contrib/misc/src/test/org/apache/lucene/index/TestMultiPassIndexSplitter.java Mon May 9 15:24:04 2011
@@ -32,7 +32,7 @@ public class TestMultiPassIndexSplitter
public void setUp() throws Exception {
super.setUp();
dir = newDirectory();
- IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newInOrderLogMergePolicy()));
+ IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
Document doc;
for (int i = 0; i < NUM_DOCS; i++) {
doc = new Document();
Modified: lucene/dev/branches/bulkpostings/lucene/contrib/misc/src/test/org/apache/lucene/index/TestTermVectorAccessor.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/contrib/misc/src/test/org/apache/lucene/index/TestTermVectorAccessor.java?rev=1101062&r1=1101061&r2=1101062&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/contrib/misc/src/test/org/apache/lucene/index/TestTermVectorAccessor.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/contrib/misc/src/test/org/apache/lucene/index/TestTermVectorAccessor.java Mon May 9 15:24:04 2011
@@ -25,7 +25,7 @@ public class TestTermVectorAccessor exte
public void test() throws Exception {
Directory dir = newDirectory();
- IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+ IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
Document doc;
Modified: lucene/dev/branches/bulkpostings/lucene/contrib/misc/src/test/org/apache/lucene/index/codecs/appending/TestAppendingCodec.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/contrib/misc/src/test/org/apache/lucene/index/codecs/appending/TestAppendingCodec.java?rev=1101062&r1=1101061&r2=1101062&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/contrib/misc/src/test/org/apache/lucene/index/codecs/appending/TestAppendingCodec.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/contrib/misc/src/test/org/apache/lucene/index/codecs/appending/TestAppendingCodec.java Mon May 9 15:24:04 2011
@@ -30,7 +30,7 @@ import org.apache.lucene.index.Fields;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
-import org.apache.lucene.index.LogMergePolicy;
+import org.apache.lucene.index.TieredMergePolicy;
import org.apache.lucene.index.MultiFields;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
@@ -134,10 +134,10 @@ public class TestAppendingCodec extends
public void testCodec() throws Exception {
Directory dir = new AppendingRAMDirectory(random, new RAMDirectory());
- IndexWriterConfig cfg = new IndexWriterConfig(Version.LUCENE_40, new MockAnalyzer());
+ IndexWriterConfig cfg = new IndexWriterConfig(Version.LUCENE_40, new MockAnalyzer(random));
cfg.setCodecProvider(new AppendingCodecProvider());
- ((LogMergePolicy)cfg.getMergePolicy()).setUseCompoundFile(false);
+ ((TieredMergePolicy)cfg.getMergePolicy()).setUseCompoundFile(false);
IndexWriter writer = new IndexWriter(dir, cfg);
Document doc = new Document();
doc.add(newField("f", text, Store.YES, Index.ANALYZED, TermVector.WITH_POSITIONS_OFFSETS));
Modified: lucene/dev/branches/bulkpostings/lucene/contrib/misc/src/test/org/apache/lucene/misc/TestHighFreqTerms.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/contrib/misc/src/test/org/apache/lucene/misc/TestHighFreqTerms.java?rev=1101062&r1=1101061&r2=1101062&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/contrib/misc/src/test/org/apache/lucene/misc/TestHighFreqTerms.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/contrib/misc/src/test/org/apache/lucene/misc/TestHighFreqTerms.java Mon May 9 15:24:04 2011
@@ -40,7 +40,7 @@ public class TestHighFreqTerms extends L
public static void setUpClass() throws Exception {
dir = newDirectory();
writer = new IndexWriter(dir, newIndexWriterConfig(random,
- TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false))
+ TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false))
.setMaxBufferedDocs(2));
writer.setInfoStream(VERBOSE ? System.out : null);
indexDocs(writer);
Modified: lucene/dev/branches/bulkpostings/lucene/contrib/misc/src/test/org/apache/lucene/misc/TestLengthNormModifier.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/contrib/misc/src/test/org/apache/lucene/misc/TestLengthNormModifier.java?rev=1101062&r1=1101061&r2=1101062&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/contrib/misc/src/test/org/apache/lucene/misc/TestLengthNormModifier.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/contrib/misc/src/test/org/apache/lucene/misc/TestLengthNormModifier.java Mon May 9 15:24:04 2011
@@ -66,7 +66,7 @@ public class TestLengthNormModifier exte
super.setUp();
store = newDirectory();
IndexWriter writer = new IndexWriter(store, newIndexWriterConfig(
- TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newInOrderLogMergePolicy()));
+ TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
for (int i = 0; i < NUM_DOCS; i++) {
Document d = new Document();
Modified: lucene/dev/branches/bulkpostings/lucene/contrib/queries/src/java/org/apache/lucene/search/FuzzyLikeThisQuery.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/contrib/queries/src/java/org/apache/lucene/search/FuzzyLikeThisQuery.java?rev=1101062&r1=1101061&r2=1101062&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/contrib/queries/src/java/org/apache/lucene/search/FuzzyLikeThisQuery.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/contrib/queries/src/java/org/apache/lucene/search/FuzzyLikeThisQuery.java Mon May 9 15:24:04 2011
@@ -213,17 +213,15 @@ public class FuzzyLikeThisQuery extends
BoostAttribute boostAtt =
fe.attributes().addAttribute(BoostAttribute.class);
while ((possibleMatch = fe.next()) != null) {
- if (possibleMatch!=null) {
- numVariants++;
- totalVariantDocFreqs+=fe.docFreq();
- float score=boostAtt.getBoost();
- if (variantsQ.size() < MAX_VARIANTS_PER_TERM || score > minScore){
- ScoreTerm st=new ScoreTerm(new Term(startTerm.field(), new BytesRef(possibleMatch)),score,startTerm);
- variantsQ.insertWithOverflow(st);
- minScore = variantsQ.top().score; // maintain minScore
- }
- maxBoostAtt.setMaxNonCompetitiveBoost(variantsQ.size() >= MAX_VARIANTS_PER_TERM ? minScore : Float.NEGATIVE_INFINITY);
+ numVariants++;
+ totalVariantDocFreqs+=fe.docFreq();
+ float score=boostAtt.getBoost();
+ if (variantsQ.size() < MAX_VARIANTS_PER_TERM || score > minScore){
+ ScoreTerm st=new ScoreTerm(new Term(startTerm.field(), new BytesRef(possibleMatch)),score,startTerm);
+ variantsQ.insertWithOverflow(st);
+ minScore = variantsQ.top().score; // maintain minScore
}
+ maxBoostAtt.setMaxNonCompetitiveBoost(variantsQ.size() >= MAX_VARIANTS_PER_TERM ? minScore : Float.NEGATIVE_INFINITY);
}
if(numVariants>0)
Modified: lucene/dev/branches/bulkpostings/lucene/contrib/queries/src/test/org/apache/lucene/search/BooleanFilterTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/contrib/queries/src/test/org/apache/lucene/search/BooleanFilterTest.java?rev=1101062&r1=1101061&r2=1101062&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/contrib/queries/src/test/org/apache/lucene/search/BooleanFilterTest.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/contrib/queries/src/test/org/apache/lucene/search/BooleanFilterTest.java Mon May 9 15:24:04 2011
@@ -39,7 +39,7 @@ public class BooleanFilterTest extends L
public void setUp() throws Exception {
super.setUp();
directory = newDirectory();
- RandomIndexWriter writer = new RandomIndexWriter(random, directory, new MockAnalyzer(MockTokenizer.WHITESPACE, false));
+ RandomIndexWriter writer = new RandomIndexWriter(random, directory, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false));
//Add series of docs with filterable fields : acces rights, prices, dates and "in-stock" flags
addDoc(writer, "admin guest", "010", "20040101","Y");
Modified: lucene/dev/branches/bulkpostings/lucene/contrib/queries/src/test/org/apache/lucene/search/DuplicateFilterTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/contrib/queries/src/test/org/apache/lucene/search/DuplicateFilterTest.java?rev=1101062&r1=1101061&r2=1101062&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/contrib/queries/src/test/org/apache/lucene/search/DuplicateFilterTest.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/contrib/queries/src/test/org/apache/lucene/search/DuplicateFilterTest.java Mon May 9 15:24:04 2011
@@ -43,7 +43,7 @@ public class DuplicateFilterTest extends
public void setUp() throws Exception {
super.setUp();
directory = newDirectory();
- RandomIndexWriter writer = new RandomIndexWriter(random, directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newInOrderLogMergePolicy()));
+ RandomIndexWriter writer = new RandomIndexWriter(random, directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
//Add series of docs with filterable fields : url, text and dates flags
addDoc(writer, "http://lucene.apache.org", "lucene 1.4.3 available", "20040101");
Modified: lucene/dev/branches/bulkpostings/lucene/contrib/queries/src/test/org/apache/lucene/search/FuzzyLikeThisQueryTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/contrib/queries/src/test/org/apache/lucene/search/FuzzyLikeThisQueryTest.java?rev=1101062&r1=1101061&r2=1101062&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/contrib/queries/src/test/org/apache/lucene/search/FuzzyLikeThisQueryTest.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/contrib/queries/src/test/org/apache/lucene/search/FuzzyLikeThisQueryTest.java Mon May 9 15:24:04 2011
@@ -34,13 +34,13 @@ public class FuzzyLikeThisQueryTest exte
private Directory directory;
private IndexSearcher searcher;
private IndexReader reader;
- private Analyzer analyzer=new MockAnalyzer();
+ private Analyzer analyzer=new MockAnalyzer(random);
@Override
public void setUp() throws Exception {
super.setUp();
directory = newDirectory();
- RandomIndexWriter writer = new RandomIndexWriter(random, directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newInOrderLogMergePolicy()));
+ RandomIndexWriter writer = new RandomIndexWriter(random, directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
//Add series of docs with misspelt names
addDoc(writer, "jonathon smythe","1");
@@ -121,7 +121,7 @@ public class FuzzyLikeThisQueryTest exte
}
public void testFuzzyLikeThisQueryEquals() {
- Analyzer analyzer = new MockAnalyzer();
+ Analyzer analyzer = new MockAnalyzer(random);
FuzzyLikeThisQuery fltq1 = new FuzzyLikeThisQuery(10, analyzer);
fltq1.addTerms("javi", "subject", 0.5f, 2);
FuzzyLikeThisQuery fltq2 = new FuzzyLikeThisQuery(10, analyzer);
Modified: lucene/dev/branches/bulkpostings/lucene/contrib/queries/src/test/org/apache/lucene/search/TestFieldCacheRewriteMethod.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/contrib/queries/src/test/org/apache/lucene/search/TestFieldCacheRewriteMethod.java?rev=1101062&r1=1101061&r2=1101062&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/contrib/queries/src/test/org/apache/lucene/search/TestFieldCacheRewriteMethod.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/contrib/queries/src/test/org/apache/lucene/search/TestFieldCacheRewriteMethod.java Mon May 9 15:24:04 2011
@@ -36,8 +36,8 @@ public class TestFieldCacheRewriteMethod
RegexpQuery filter = new RegexpQuery(new Term("field", regexp), RegExp.NONE);
filter.setRewriteMethod(MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE);
- TopDocs fieldCacheDocs = searcher.search(fieldCache, 25);
- TopDocs filterDocs = searcher.search(filter, 25);
+ TopDocs fieldCacheDocs = searcher1.search(fieldCache, 25);
+ TopDocs filterDocs = searcher2.search(filter, 25);
CheckHits.checkEqual(fieldCache, fieldCacheDocs.scoreDocs, filterDocs.scoreDocs);
}
Modified: lucene/dev/branches/bulkpostings/lucene/contrib/queries/src/test/org/apache/lucene/search/regex/TestSpanRegexQuery.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/contrib/queries/src/test/org/apache/lucene/search/regex/TestSpanRegexQuery.java?rev=1101062&r1=1101061&r2=1101062&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/contrib/queries/src/test/org/apache/lucene/search/regex/TestSpanRegexQuery.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/contrib/queries/src/test/org/apache/lucene/search/regex/TestSpanRegexQuery.java Mon May 9 15:24:04 2011
@@ -56,7 +56,7 @@ public class TestSpanRegexQuery extends
public void testSpanRegex() throws Exception {
Directory directory = newDirectory();
IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(
- TEST_VERSION_CURRENT, new MockAnalyzer()));
+ TEST_VERSION_CURRENT, new MockAnalyzer(random)));
Document doc = new Document();
// doc.add(newField("field", "the quick brown fox jumps over the lazy dog",
// Field.Store.NO, Field.Index.ANALYZED));
@@ -97,14 +97,14 @@ public class TestSpanRegexQuery extends
// creating first index writer
IndexWriter writerA = new IndexWriter(indexStoreA, newIndexWriterConfig(
- TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE));
+ TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE));
writerA.addDocument(lDoc);
writerA.optimize();
writerA.close();
// creating second index writer
IndexWriter writerB = new IndexWriter(indexStoreB, newIndexWriterConfig(
- TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE));
+ TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE));
writerB.addDocument(lDoc2);
writerB.optimize();
writerB.close();
Modified: lucene/dev/branches/bulkpostings/lucene/contrib/queries/src/test/org/apache/lucene/search/similar/TestMoreLikeThis.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/contrib/queries/src/test/org/apache/lucene/search/similar/TestMoreLikeThis.java?rev=1101062&r1=1101061&r2=1101062&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/contrib/queries/src/test/org/apache/lucene/search/similar/TestMoreLikeThis.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/contrib/queries/src/test/org/apache/lucene/search/similar/TestMoreLikeThis.java Mon May 9 15:24:04 2011
@@ -74,7 +74,7 @@ public class TestMoreLikeThis extends Lu
Map<String,Float> originalValues = getOriginalValues();
MoreLikeThis mlt = new MoreLikeThis(reader);
- mlt.setAnalyzer(new MockAnalyzer(MockTokenizer.WHITESPACE, false));
+ mlt.setAnalyzer(new MockAnalyzer(random, MockTokenizer.WHITESPACE, false));
mlt.setMinDocFreq(1);
mlt.setMinTermFreq(1);
mlt.setMinWordLen(1);
@@ -109,7 +109,7 @@ public class TestMoreLikeThis extends Lu
private Map<String,Float> getOriginalValues() throws IOException {
Map<String,Float> originalValues = new HashMap<String,Float>();
MoreLikeThis mlt = new MoreLikeThis(reader);
- mlt.setAnalyzer(new MockAnalyzer(MockTokenizer.WHITESPACE, false));
+ mlt.setAnalyzer(new MockAnalyzer(random, MockTokenizer.WHITESPACE, false));
mlt.setMinDocFreq(1);
mlt.setMinTermFreq(1);
mlt.setMinWordLen(1);
Modified: lucene/dev/branches/bulkpostings/lucene/contrib/queryparser/src/java/org/apache/lucene/queryParser/core/nodes/QueryNodeImpl.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/contrib/queryparser/src/java/org/apache/lucene/queryParser/core/nodes/QueryNodeImpl.java?rev=1101062&r1=1101061&r2=1101062&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/contrib/queryparser/src/java/org/apache/lucene/queryParser/core/nodes/QueryNodeImpl.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/contrib/queryparser/src/java/org/apache/lucene/queryParser/core/nodes/QueryNodeImpl.java Mon May 9 15:24:04 2011
@@ -160,7 +160,7 @@ public abstract class QueryNodeImpl impl
/** verify if a node contains a tag */
public boolean containsTag(String tagName) {
- return this.tags.containsKey(tagName);
+ return this.tags.containsKey(tagName.toLowerCase());
}
public Object getTag(String tagName) {
Modified: lucene/dev/branches/bulkpostings/lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/complexPhrase/TestComplexPhraseQuery.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/complexPhrase/TestComplexPhraseQuery.java?rev=1101062&r1=1101061&r2=1101062&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/complexPhrase/TestComplexPhraseQuery.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/complexPhrase/TestComplexPhraseQuery.java Mon May 9 15:24:04 2011
@@ -34,7 +34,7 @@ import org.apache.lucene.util.LuceneTest
public class TestComplexPhraseQuery extends LuceneTestCase {
Directory rd;
- Analyzer analyzer = new MockAnalyzer();
+ Analyzer analyzer = new MockAnalyzer(random);
DocData docsContent[] = { new DocData("john smith", "1"),
new DocData("johathon smith", "2"),
Modified: lucene/dev/branches/bulkpostings/lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/core/nodes/TestQueryNode.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/core/nodes/TestQueryNode.java?rev=1101062&r1=1101061&r2=1101062&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/core/nodes/TestQueryNode.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/core/nodes/TestQueryNode.java Mon May 9 15:24:04 2011
@@ -32,4 +32,16 @@ public class TestQueryNode extends Lucen
bq.add(Arrays.asList(nodeB));
assertEquals(2, bq.getChildren().size());
}
+
+ /* LUCENE-3045 bug in QueryNodeImpl.containsTag(String key)*/
+ public void testTags() throws Exception {
+ QueryNode node = new FieldQueryNode("foo", "A", 0, 1);
+
+ node.setTag("TaG", new Object());
+ assertTrue(node.getTagMap().size() > 0);
+ assertTrue(node.containsTag("tAg"));
+ assertTrue(node.getTag("tAg") != null);
+
+ }
+
}
Modified: lucene/dev/branches/bulkpostings/lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/ext/TestExtendableQueryParser.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/ext/TestExtendableQueryParser.java?rev=1101062&r1=1101061&r2=1101062&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/ext/TestExtendableQueryParser.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/ext/TestExtendableQueryParser.java Mon May 9 15:24:04 2011
@@ -43,7 +43,7 @@ public class TestExtendableQueryParser e
public QueryParser getParser(Analyzer a, Extensions extensions)
throws Exception {
if (a == null)
- a = new MockAnalyzer(MockTokenizer.SIMPLE, true);
+ a = new MockAnalyzer(random, MockTokenizer.SIMPLE, true);
QueryParser qp = extensions == null ? new ExtendableQueryParser(
TEST_VERSION_CURRENT, "field", a) : new ExtendableQueryParser(
TEST_VERSION_CURRENT, "field", a, extensions);
Modified: lucene/dev/branches/bulkpostings/lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/precedence/TestPrecedenceQueryParser.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/precedence/TestPrecedenceQueryParser.java?rev=1101062&r1=1101061&r2=1101062&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/precedence/TestPrecedenceQueryParser.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/precedence/TestPrecedenceQueryParser.java Mon May 9 15:24:04 2011
@@ -125,7 +125,7 @@ public class TestPrecedenceQueryParser e
public PrecedenceQueryParser getParser(Analyzer a) throws Exception {
if (a == null)
- a = new MockAnalyzer(MockTokenizer.SIMPLE, true);
+ a = new MockAnalyzer(random, MockTokenizer.SIMPLE, true);
PrecedenceQueryParser qp = new PrecedenceQueryParser();
qp.setAnalyzer(a);
qp.setDefaultOperator(Operator.OR);
@@ -171,7 +171,7 @@ public class TestPrecedenceQueryParser e
public Query getQueryDOA(String query, Analyzer a) throws Exception {
if (a == null)
- a = new MockAnalyzer(MockTokenizer.SIMPLE, true);
+ a = new MockAnalyzer(random, MockTokenizer.SIMPLE, true);
PrecedenceQueryParser qp = new PrecedenceQueryParser();
qp.setAnalyzer(a);
qp.setDefaultOperator(Operator.AND);
@@ -232,7 +232,7 @@ public class TestPrecedenceQueryParser e
"+(title:dog title:cat) -author:\"bob dole\"");
PrecedenceQueryParser qp = new PrecedenceQueryParser();
- qp.setAnalyzer(new MockAnalyzer());
+ qp.setAnalyzer(new MockAnalyzer(random));
// make sure OR is the default:
assertEquals(Operator.OR, qp.getDefaultOperator());
qp.setDefaultOperator(Operator.AND);
@@ -246,7 +246,7 @@ public class TestPrecedenceQueryParser e
}
public void testPunct() throws Exception {
- Analyzer a = new MockAnalyzer(MockTokenizer.WHITESPACE, false);
+ Analyzer a = new MockAnalyzer(random, MockTokenizer.WHITESPACE, false);
assertQueryEquals("a&b", a, "a&b");
assertQueryEquals("a&&b", a, "a&&b");
assertQueryEquals(".NET", a, ".NET");
@@ -266,7 +266,7 @@ public class TestPrecedenceQueryParser e
assertQueryEquals("term 1.0 1 2", null, "term");
assertQueryEquals("term term1 term2", null, "term term term");
- Analyzer a = new MockAnalyzer();
+ Analyzer a = new MockAnalyzer(random);
assertQueryEquals("3", a, "3");
assertQueryEquals("term 1.0 1 2", a, "term 1.0 1 2");
assertQueryEquals("term term1 term2", a, "term term1 term2");
@@ -405,7 +405,7 @@ public class TestPrecedenceQueryParser e
final String defaultField = "default";
final String monthField = "month";
final String hourField = "hour";
- PrecedenceQueryParser qp = new PrecedenceQueryParser(new MockAnalyzer());
+ PrecedenceQueryParser qp = new PrecedenceQueryParser(new MockAnalyzer(random));
Map<CharSequence, DateTools.Resolution> fieldMap = new HashMap<CharSequence,DateTools.Resolution>();
// set a field specific date resolution
@@ -467,7 +467,7 @@ public class TestPrecedenceQueryParser e
}
public void testEscaped() throws Exception {
- Analyzer a = new MockAnalyzer(MockTokenizer.WHITESPACE, false);
+ Analyzer a = new MockAnalyzer(random, MockTokenizer.WHITESPACE, false);
assertQueryEquals("a\\-b:c", a, "a-b:c");
assertQueryEquals("a\\+b:c", a, "a+b:c");
@@ -533,7 +533,7 @@ public class TestPrecedenceQueryParser e
public void testBoost() throws Exception {
CharacterRunAutomaton stopSet = new CharacterRunAutomaton(BasicAutomata.makeString("on"));
- Analyzer oneStopAnalyzer = new MockAnalyzer(MockTokenizer.SIMPLE, true, stopSet, true);
+ Analyzer oneStopAnalyzer = new MockAnalyzer(random, MockTokenizer.SIMPLE, true, stopSet, true);
PrecedenceQueryParser qp = new PrecedenceQueryParser();
qp.setAnalyzer(oneStopAnalyzer);
@@ -548,7 +548,7 @@ public class TestPrecedenceQueryParser e
q = qp.parse("\"on\"^1.0", "field");
assertNotNull(q);
- q = getParser(new MockAnalyzer(MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true)).parse("the^3",
+ q = getParser(new MockAnalyzer(random, MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true)).parse("the^3",
"field");
assertNotNull(q);
}
@@ -564,7 +564,7 @@ public class TestPrecedenceQueryParser e
public void testBooleanQuery() throws Exception {
BooleanQuery.setMaxClauseCount(2);
try {
- getParser(new MockAnalyzer(MockTokenizer.WHITESPACE, false)).parse("one two three", "field");
+ getParser(new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("one two three", "field");
fail("ParseException expected due to too many boolean clauses");
} catch (QueryNodeException expected) {
// too many boolean clauses, so ParseException is expected
@@ -573,7 +573,7 @@ public class TestPrecedenceQueryParser e
// LUCENE-792
public void testNOT() throws Exception {
- Analyzer a = new MockAnalyzer(MockTokenizer.WHITESPACE, false);
+ Analyzer a = new MockAnalyzer(random, MockTokenizer.WHITESPACE, false);
assertQueryEquals("NOT foo AND bar", a, "-foo +bar");
}
@@ -582,7 +582,7 @@ public class TestPrecedenceQueryParser e
* issue has been corrected.
*/
public void testPrecedence() throws Exception {
- PrecedenceQueryParser parser = getParser(new MockAnalyzer(MockTokenizer.WHITESPACE, false));
+ PrecedenceQueryParser parser = getParser(new MockAnalyzer(random, MockTokenizer.WHITESPACE, false));
Query query1 = parser.parse("A AND B OR C AND D", "field");
Query query2 = parser.parse("(A AND B) OR (C AND D)", "field");
assertEquals(query1, query2);
Modified: lucene/dev/branches/bulkpostings/lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/standard/TestMultiFieldQPHelper.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/standard/TestMultiFieldQPHelper.java?rev=1101062&r1=1101061&r2=1101062&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/standard/TestMultiFieldQPHelper.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/standard/TestMultiFieldQPHelper.java Mon May 9 15:24:04 2011
@@ -80,7 +80,7 @@ public class TestMultiFieldQPHelper exte
String[] fields = { "b", "t" };
StandardQueryParser mfqp = new StandardQueryParser();
mfqp.setMultiFields(fields);
- mfqp.setAnalyzer(new MockAnalyzer());
+ mfqp.setAnalyzer(new MockAnalyzer(random));
Query q = mfqp.parse("one", null);
assertEquals("b:one t:one", q.toString());
@@ -150,7 +150,7 @@ public class TestMultiFieldQPHelper exte
StandardQueryParser mfqp = new StandardQueryParser();
mfqp.setMultiFields(fields);
mfqp.setFieldsBoost(boosts);
- mfqp.setAnalyzer(new MockAnalyzer());
+ mfqp.setAnalyzer(new MockAnalyzer(random));
// Check for simple
Query q = mfqp.parse("one", null);
@@ -178,24 +178,24 @@ public class TestMultiFieldQPHelper exte
public void testStaticMethod1() throws QueryNodeException {
String[] fields = { "b", "t" };
String[] queries = { "one", "two" };
- Query q = QueryParserUtil.parse(queries, fields, new MockAnalyzer());
+ Query q = QueryParserUtil.parse(queries, fields, new MockAnalyzer(random));
assertEquals("b:one t:two", q.toString());
String[] queries2 = { "+one", "+two" };
- q = QueryParserUtil.parse(queries2, fields, new MockAnalyzer());
+ q = QueryParserUtil.parse(queries2, fields, new MockAnalyzer(random));
assertEquals("(+b:one) (+t:two)", q.toString());
String[] queries3 = { "one", "+two" };
- q = QueryParserUtil.parse(queries3, fields, new MockAnalyzer());
+ q = QueryParserUtil.parse(queries3, fields, new MockAnalyzer(random));
assertEquals("b:one (+t:two)", q.toString());
String[] queries4 = { "one +more", "+two" };
- q = QueryParserUtil.parse(queries4, fields, new MockAnalyzer());
+ q = QueryParserUtil.parse(queries4, fields, new MockAnalyzer(random));
assertEquals("(b:one +b:more) (+t:two)", q.toString());
String[] queries5 = { "blah" };
try {
- q = QueryParserUtil.parse(queries5, fields, new MockAnalyzer());
+ q = QueryParserUtil.parse(queries5, fields, new MockAnalyzer(random));
fail();
} catch (IllegalArgumentException e) {
// expected exception, array length differs
@@ -219,15 +219,15 @@ public class TestMultiFieldQPHelper exte
BooleanClause.Occur[] flags = { BooleanClause.Occur.MUST,
BooleanClause.Occur.MUST_NOT };
Query q = QueryParserUtil.parse("one", fields, flags,
- new MockAnalyzer());
+ new MockAnalyzer(random));
assertEquals("+b:one -t:one", q.toString());
- q = QueryParserUtil.parse("one two", fields, flags, new MockAnalyzer());
+ q = QueryParserUtil.parse("one two", fields, flags, new MockAnalyzer(random));
assertEquals("+(b:one b:two) -(t:one t:two)", q.toString());
try {
BooleanClause.Occur[] flags2 = { BooleanClause.Occur.MUST };
- q = QueryParserUtil.parse("blah", fields, flags2, new MockAnalyzer());
+ q = QueryParserUtil.parse("blah", fields, flags2, new MockAnalyzer(random));
fail();
} catch (IllegalArgumentException e) {
// expected exception, array length differs
@@ -240,19 +240,19 @@ public class TestMultiFieldQPHelper exte
BooleanClause.Occur.MUST_NOT };
StandardQueryParser parser = new StandardQueryParser();
parser.setMultiFields(fields);
- parser.setAnalyzer(new MockAnalyzer());
+ parser.setAnalyzer(new MockAnalyzer(random));
Query q = QueryParserUtil.parse("one", fields, flags,
- new MockAnalyzer());// , fields, flags, new
+ new MockAnalyzer(random));// , fields, flags, new
// MockAnalyzer());
assertEquals("+b:one -t:one", q.toString());
- q = QueryParserUtil.parse("one two", fields, flags, new MockAnalyzer());
+ q = QueryParserUtil.parse("one two", fields, flags, new MockAnalyzer(random));
assertEquals("+(b:one b:two) -(t:one t:two)", q.toString());
try {
BooleanClause.Occur[] flags2 = { BooleanClause.Occur.MUST };
- q = QueryParserUtil.parse("blah", fields, flags2, new MockAnalyzer());
+ q = QueryParserUtil.parse("blah", fields, flags2, new MockAnalyzer(random));
fail();
} catch (IllegalArgumentException e) {
// expected exception, array length differs
@@ -265,13 +265,13 @@ public class TestMultiFieldQPHelper exte
BooleanClause.Occur[] flags = { BooleanClause.Occur.MUST,
BooleanClause.Occur.MUST_NOT, BooleanClause.Occur.SHOULD };
Query q = QueryParserUtil.parse(queries, fields, flags,
- new MockAnalyzer());
+ new MockAnalyzer(random));
assertEquals("+f1:one -f2:two f3:three", q.toString());
try {
BooleanClause.Occur[] flags2 = { BooleanClause.Occur.MUST };
q = QueryParserUtil
- .parse(queries, fields, flags2, new MockAnalyzer());
+ .parse(queries, fields, flags2, new MockAnalyzer(random));
fail();
} catch (IllegalArgumentException e) {
// expected exception, array length differs
@@ -284,13 +284,13 @@ public class TestMultiFieldQPHelper exte
BooleanClause.Occur[] flags = { BooleanClause.Occur.MUST,
BooleanClause.Occur.MUST_NOT };
Query q = QueryParserUtil.parse(queries, fields, flags,
- new MockAnalyzer());
+ new MockAnalyzer(random));
assertEquals("+b:one -t:two", q.toString());
try {
BooleanClause.Occur[] flags2 = { BooleanClause.Occur.MUST };
q = QueryParserUtil
- .parse(queries, fields, flags2, new MockAnalyzer());
+ .parse(queries, fields, flags2, new MockAnalyzer(random));
fail();
} catch (IllegalArgumentException e) {
// expected exception, array length differs
@@ -316,7 +316,7 @@ public class TestMultiFieldQPHelper exte
}
public void testStopWordSearching() throws Exception {
- Analyzer analyzer = new MockAnalyzer();
+ Analyzer analyzer = new MockAnalyzer(random);
Directory ramDir = newDirectory();
IndexWriter iw = new IndexWriter(ramDir, newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
Document doc = new Document();
@@ -342,7 +342,7 @@ public class TestMultiFieldQPHelper exte
* Return empty tokens for field "f1".
*/
private static final class AnalyzerReturningNull extends Analyzer {
- MockAnalyzer stdAnalyzer = new MockAnalyzer();
+ MockAnalyzer stdAnalyzer = new MockAnalyzer(random);
public AnalyzerReturningNull() {
}
Modified: lucene/dev/branches/bulkpostings/lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/standard/TestQPHelper.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/standard/TestQPHelper.java?rev=1101062&r1=1101061&r2=1101062&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/standard/TestQPHelper.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/standard/TestQPHelper.java Mon May 9 15:24:04 2011
@@ -191,7 +191,7 @@ public class TestQPHelper extends Lucene
public StandardQueryParser getParser(Analyzer a) throws Exception {
if (a == null)
- a = new MockAnalyzer(MockTokenizer.SIMPLE, true);
+ a = new MockAnalyzer(random, MockTokenizer.SIMPLE, true);
StandardQueryParser qp = new StandardQueryParser();
qp.setAnalyzer(a);
@@ -281,7 +281,7 @@ public class TestQPHelper extends Lucene
public Query getQueryDOA(String query, Analyzer a) throws Exception {
if (a == null)
- a = new MockAnalyzer(MockTokenizer.SIMPLE, true);
+ a = new MockAnalyzer(random, MockTokenizer.SIMPLE, true);
StandardQueryParser qp = new StandardQueryParser();
qp.setAnalyzer(a);
qp.setDefaultOperator(Operator.AND);
@@ -301,7 +301,7 @@ public class TestQPHelper extends Lucene
}
public void testConstantScoreAutoRewrite() throws Exception {
- StandardQueryParser qp = new StandardQueryParser(new MockAnalyzer(MockTokenizer.WHITESPACE, false));
+ StandardQueryParser qp = new StandardQueryParser(new MockAnalyzer(random, MockTokenizer.WHITESPACE, false));
Query q = qp.parse("foo*bar", "field");
assertTrue(q instanceof WildcardQuery);
assertEquals(MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT, ((MultiTermQuery) q).getRewriteMethod());
@@ -410,9 +410,9 @@ public class TestQPHelper extends Lucene
public void testSimple() throws Exception {
assertQueryEquals("\"term germ\"~2", null, "\"term germ\"~2");
assertQueryEquals("term term term", null, "term term term");
- assertQueryEquals("t�rm term term", new MockAnalyzer(MockTokenizer.WHITESPACE, false),
+ assertQueryEquals("t�rm term term", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false),
"t�rm term term");
- assertQueryEquals("�mlaut", new MockAnalyzer(MockTokenizer.WHITESPACE, false), "�mlaut");
+ assertQueryEquals("�mlaut", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false), "�mlaut");
// FIXME: change MockAnalyzer to not extend CharTokenizer for this test
//assertQueryEquals("\"\"", new KeywordAnalyzer(), "");
@@ -470,7 +470,7 @@ public class TestQPHelper extends Lucene
}
public void testPunct() throws Exception {
- Analyzer a = new MockAnalyzer(MockTokenizer.WHITESPACE, false);
+ Analyzer a = new MockAnalyzer(random, MockTokenizer.WHITESPACE, false);
assertQueryEquals("a&b", a, "a&b");
assertQueryEquals("a&&b", a, "a&&b");
assertQueryEquals(".NET", a, ".NET");
@@ -491,7 +491,7 @@ public class TestQPHelper extends Lucene
assertQueryEquals("term 1.0 1 2", null, "term");
assertQueryEquals("term term1 term2", null, "term term term");
- Analyzer a = new MockAnalyzer(MockTokenizer.WHITESPACE, false);
+ Analyzer a = new MockAnalyzer(random, MockTokenizer.WHITESPACE, false);
assertQueryEquals("3", a, "3");
assertQueryEquals("term 1.0 1 2", a, "term 1.0 1 2");
assertQueryEquals("term term1 term2", a, "term term1 term2");
@@ -726,7 +726,7 @@ public class TestQPHelper extends Lucene
}
public void testEscaped() throws Exception {
- Analyzer a = new MockAnalyzer(MockTokenizer.WHITESPACE, false);
+ Analyzer a = new MockAnalyzer(random, MockTokenizer.WHITESPACE, false);
/*
* assertQueryEquals("\\[brackets", a, "\\[brackets");
@@ -825,7 +825,7 @@ public class TestQPHelper extends Lucene
}
public void testQueryStringEscaping() throws Exception {
- Analyzer a = new MockAnalyzer(MockTokenizer.WHITESPACE, false);
+ Analyzer a = new MockAnalyzer(random, MockTokenizer.WHITESPACE, false);
assertEscapedQueryEquals("a-b:c", a, "a\\-b\\:c");
assertEscapedQueryEquals("a+b:c", a, "a\\+b\\:c");
@@ -866,7 +866,7 @@ public class TestQPHelper extends Lucene
@Ignore("contrib queryparser shouldn't escape wildcard terms")
public void testEscapedWildcard() throws Exception {
StandardQueryParser qp = new StandardQueryParser();
- qp.setAnalyzer(new MockAnalyzer(MockTokenizer.WHITESPACE, false));
+ qp.setAnalyzer(new MockAnalyzer(random, MockTokenizer.WHITESPACE, false));
WildcardQuery q = new WildcardQuery(new Term("field", "foo\\?ba?r"));
assertEquals(q, qp.parse("foo\\?ba?r", "field"));
@@ -904,7 +904,7 @@ public class TestQPHelper extends Lucene
public void testBoost() throws Exception {
CharacterRunAutomaton stopSet = new CharacterRunAutomaton(BasicAutomata.makeString("on"));
- Analyzer oneStopAnalyzer = new MockAnalyzer(MockTokenizer.SIMPLE, true, stopSet, true);
+ Analyzer oneStopAnalyzer = new MockAnalyzer(random, MockTokenizer.SIMPLE, true, stopSet, true);
StandardQueryParser qp = new StandardQueryParser();
qp.setAnalyzer(oneStopAnalyzer);
@@ -920,7 +920,7 @@ public class TestQPHelper extends Lucene
assertNotNull(q);
StandardQueryParser qp2 = new StandardQueryParser();
- qp2.setAnalyzer(new MockAnalyzer(MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true));
+ qp2.setAnalyzer(new MockAnalyzer(random, MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true));
q = qp2.parse("the^3", "field");
// "the" is a stop word so the result is an empty query:
@@ -950,7 +950,7 @@ public class TestQPHelper extends Lucene
public void testCustomQueryParserWildcard() {
try {
- new QPTestParser(new MockAnalyzer(MockTokenizer.WHITESPACE, false)).parse("a?t", "contents");
+ new QPTestParser(new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("a?t", "contents");
fail("Wildcard queries should not be allowed");
} catch (QueryNodeException expected) {
// expected exception
@@ -959,7 +959,7 @@ public class TestQPHelper extends Lucene
public void testCustomQueryParserFuzzy() throws Exception {
try {
- new QPTestParser(new MockAnalyzer(MockTokenizer.WHITESPACE, false)).parse("xunit~", "contents");
+ new QPTestParser(new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("xunit~", "contents");
fail("Fuzzy queries should not be allowed");
} catch (QueryNodeException expected) {
// expected exception
@@ -970,7 +970,7 @@ public class TestQPHelper extends Lucene
BooleanQuery.setMaxClauseCount(2);
try {
StandardQueryParser qp = new StandardQueryParser();
- qp.setAnalyzer(new MockAnalyzer(MockTokenizer.WHITESPACE, false));
+ qp.setAnalyzer(new MockAnalyzer(random, MockTokenizer.WHITESPACE, false));
qp.parse("one two three", "field");
fail("ParseException expected due to too many boolean clauses");
@@ -984,7 +984,7 @@ public class TestQPHelper extends Lucene
*/
public void testPrecedence() throws Exception {
StandardQueryParser qp = new StandardQueryParser();
- qp.setAnalyzer(new MockAnalyzer(MockTokenizer.WHITESPACE, false));
+ qp.setAnalyzer(new MockAnalyzer(random, MockTokenizer.WHITESPACE, false));
Query query1 = qp.parse("A AND B OR C AND D", "field");
Query query2 = qp.parse("+A +B +C +D", "field");
@@ -995,7 +995,7 @@ public class TestQPHelper extends Lucene
// Todo: Convert from DateField to DateUtil
// public void testLocalDateFormat() throws IOException, QueryNodeException {
// Directory ramDir = newDirectory();
-// IndexWriter iw = new IndexWriter(ramDir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)));
+// IndexWriter iw = new IndexWriter(ramDir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)));
// addDateDoc("a", 2005, 12, 2, 10, 15, 33, iw);
// addDateDoc("b", 2005, 12, 4, 22, 15, 00, iw);
// iw.close();
@@ -1116,7 +1116,7 @@ public class TestQPHelper extends Lucene
public void testStopwords() throws Exception {
StandardQueryParser qp = new StandardQueryParser();
CharacterRunAutomaton stopSet = new CharacterRunAutomaton(new RegExp("the|foo").toAutomaton());
- qp.setAnalyzer(new MockAnalyzer(MockTokenizer.SIMPLE, true, stopSet, true));
+ qp.setAnalyzer(new MockAnalyzer(random, MockTokenizer.SIMPLE, true, stopSet, true));
Query result = qp.parse("a:the OR a:foo", "a");
assertNotNull("result is null and it shouldn't be", result);
@@ -1140,7 +1140,7 @@ public class TestQPHelper extends Lucene
public void testPositionIncrement() throws Exception {
StandardQueryParser qp = new StandardQueryParser();
qp.setAnalyzer(
- new MockAnalyzer(MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true));
+ new MockAnalyzer(random, MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true));
qp.setEnablePositionIncrements(true);
@@ -1161,7 +1161,7 @@ public class TestQPHelper extends Lucene
public void testMatchAllDocs() throws Exception {
StandardQueryParser qp = new StandardQueryParser();
- qp.setAnalyzer(new MockAnalyzer(MockTokenizer.WHITESPACE, false));
+ qp.setAnalyzer(new MockAnalyzer(random, MockTokenizer.WHITESPACE, false));
assertEquals(new MatchAllDocsQuery(), qp.parse("*:*", "field"));
assertEquals(new MatchAllDocsQuery(), qp.parse("(*:*)", "field"));
@@ -1173,7 +1173,7 @@ public class TestQPHelper extends Lucene
private void assertHits(int expected, String query, IndexSearcher is)
throws IOException, QueryNodeException {
StandardQueryParser qp = new StandardQueryParser();
- qp.setAnalyzer(new MockAnalyzer(MockTokenizer.WHITESPACE, false));
+ qp.setAnalyzer(new MockAnalyzer(random, MockTokenizer.WHITESPACE, false));
qp.setLocale(Locale.ENGLISH);
Query q = qp.parse(query, "date");
Modified: lucene/dev/branches/bulkpostings/lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/surround/query/SingleFieldTestDb.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/surround/query/SingleFieldTestDb.java?rev=1101062&r1=1101061&r2=1101062&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/surround/query/SingleFieldTestDb.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/surround/query/SingleFieldTestDb.java Mon May 9 15:24:04 2011
@@ -41,7 +41,7 @@ public class SingleFieldTestDb {
fieldName = fName;
IndexWriter writer = new IndexWriter(db, new IndexWriterConfig(
Version.LUCENE_CURRENT,
- new MockAnalyzer()));
+ new MockAnalyzer(random)));
for (int j = 0; j < docs.length; j++) {
Document d = new Document();
d.add(new Field(fieldName, docs[j], Field.Store.NO, Field.Index.ANALYZED));
Modified: lucene/dev/branches/bulkpostings/lucene/contrib/spatial/src/test/org/apache/lucene/spatial/tier/TestCartesian.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/contrib/spatial/src/test/org/apache/lucene/spatial/tier/TestCartesian.java?rev=1101062&r1=1101061&r2=1101062&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/contrib/spatial/src/test/org/apache/lucene/spatial/tier/TestCartesian.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/contrib/spatial/src/test/org/apache/lucene/spatial/tier/TestCartesian.java Mon May 9 15:24:04 2011
@@ -71,7 +71,7 @@ public class TestCartesian extends Lucen
super.setUp();
directory = newDirectory();
- IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+ IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
setUpPlotter( 2, 15);
Modified: lucene/dev/branches/bulkpostings/lucene/contrib/spatial/src/test/org/apache/lucene/spatial/tier/TestDistance.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/contrib/spatial/src/test/org/apache/lucene/spatial/tier/TestDistance.java?rev=1101062&r1=1101061&r2=1101062&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/contrib/spatial/src/test/org/apache/lucene/spatial/tier/TestDistance.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/contrib/spatial/src/test/org/apache/lucene/spatial/tier/TestDistance.java Mon May 9 15:24:04 2011
@@ -47,7 +47,7 @@ public class TestDistance extends Lucene
public void setUp() throws Exception {
super.setUp();
directory = newDirectory();
- writer = new IndexWriter(directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+ writer = new IndexWriter(directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
addData(writer);
}
Modified: lucene/dev/branches/bulkpostings/lucene/contrib/spellchecker/src/java/org/apache/lucene/search/spell/SpellChecker.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/contrib/spellchecker/src/java/org/apache/lucene/search/spell/SpellChecker.java?rev=1101062&r1=1101061&r2=1101062&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/contrib/spellchecker/src/java/org/apache/lucene/search/spell/SpellChecker.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/contrib/spellchecker/src/java/org/apache/lucene/search/spell/SpellChecker.java Mon May 9 15:24:04 2011
@@ -29,7 +29,7 @@ import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
-import org.apache.lucene.index.LogMergePolicy;
+import org.apache.lucene.index.TieredMergePolicy;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.index.Terms;
@@ -45,7 +45,6 @@ import org.apache.lucene.store.Directory
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.ReaderUtil;
import org.apache.lucene.util.Version;
-import org.apache.lucene.util.VirtualMethod;
/**
* <p>
@@ -508,7 +507,7 @@ public class SpellChecker implements jav
ensureOpen();
final Directory dir = this.spellIndex;
final IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(Version.LUCENE_CURRENT, new WhitespaceAnalyzer(Version.LUCENE_CURRENT)).setRAMBufferSizeMB(ramMB));
- ((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(mergeFactor);
+ ((TieredMergePolicy) writer.getConfig().getMergePolicy()).setMaxMergeAtOnce(mergeFactor);
IndexSearcher indexSearcher = obtainSearcher();
final List<TermsEnum> termsEnums = new ArrayList<TermsEnum>();
Modified: lucene/dev/branches/bulkpostings/lucene/contrib/spellchecker/src/test/org/apache/lucene/search/spell/TestDirectSpellChecker.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/contrib/spellchecker/src/test/org/apache/lucene/search/spell/TestDirectSpellChecker.java?rev=1101062&r1=1101061&r2=1101062&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/contrib/spellchecker/src/test/org/apache/lucene/search/spell/TestDirectSpellChecker.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/contrib/spellchecker/src/test/org/apache/lucene/search/spell/TestDirectSpellChecker.java Mon May 9 15:24:04 2011
@@ -35,7 +35,7 @@ public class TestDirectSpellChecker exte
spellChecker.setMinQueryLength(0);
Directory dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random, dir,
- new MockAnalyzer(MockTokenizer.SIMPLE, true));
+ new MockAnalyzer(random, MockTokenizer.SIMPLE, true));
for (int i = 0; i < 20; i++) {
Document doc = new Document();
@@ -93,7 +93,7 @@ public class TestDirectSpellChecker exte
public void testOptions() throws Exception {
Directory dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random, dir,
- new MockAnalyzer(MockTokenizer.SIMPLE, true));
+ new MockAnalyzer(random, MockTokenizer.SIMPLE, true));
Document doc = new Document();
doc.add(newField("text", "foobar", Field.Store.NO, Field.Index.ANALYZED));
Modified: lucene/dev/branches/bulkpostings/lucene/contrib/spellchecker/src/test/org/apache/lucene/search/spell/TestLuceneDictionary.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/contrib/spellchecker/src/test/org/apache/lucene/search/spell/TestLuceneDictionary.java?rev=1101062&r1=1101061&r2=1101062&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/contrib/spellchecker/src/test/org/apache/lucene/search/spell/TestLuceneDictionary.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/contrib/spellchecker/src/test/org/apache/lucene/search/spell/TestLuceneDictionary.java Mon May 9 15:24:04 2011
@@ -46,7 +46,7 @@ public class TestLuceneDictionary extend
public void setUp() throws Exception {
super.setUp();
store = newDirectory();
- IndexWriter writer = new IndexWriter(store, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)));
+ IndexWriter writer = new IndexWriter(store, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)));
Document doc;
Modified: lucene/dev/branches/bulkpostings/lucene/contrib/spellchecker/src/test/org/apache/lucene/search/spell/TestSpellChecker.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/contrib/spellchecker/src/test/org/apache/lucene/search/spell/TestSpellChecker.java?rev=1101062&r1=1101061&r2=1101062&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/contrib/spellchecker/src/test/org/apache/lucene/search/spell/TestSpellChecker.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/contrib/spellchecker/src/test/org/apache/lucene/search/spell/TestSpellChecker.java Mon May 9 15:24:04 2011
@@ -54,7 +54,7 @@ public class TestSpellChecker extends Lu
//create a user index
userindex = newDirectory();
IndexWriter writer = new IndexWriter(userindex, new IndexWriterConfig(
- TEST_VERSION_CURRENT, new MockAnalyzer()));
+ TEST_VERSION_CURRENT, new MockAnalyzer(random)));
for (int i = 0; i < 1000; i++) {
Document doc = new Document();