You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by mi...@apache.org on 2014/12/07 11:52:05 UTC
svn commit: r1643659 [4/7] - in /lucene/dev/branches/lucene6005/lucene: ./
analysis/common/src/test/org/apache/lucene/analysis/core/
analysis/icu/src/test/org/apache/lucene/collation/
backward-codecs/src/test/org/apache/lucene/index/ benchmark/src/java...
Copied: lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/document/TestDocument.java (from r1642536, lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/document/TestDocument2.java)
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/document/TestDocument.java?p2=lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/document/TestDocument.java&p1=lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/document/TestDocument2.java&r1=1642536&r2=1643659&rev=1643659&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/document/TestDocument2.java (original)
+++ lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/document/TestDocument.java Sun Dec 7 10:52:03 2014
@@ -23,8 +23,10 @@ import java.math.BigInteger;
import java.net.InetAddress;
import java.text.SimpleDateFormat;
import java.util.Date;
+import java.util.HashMap;
import java.util.HashSet;
import java.util.Locale;
+import java.util.Map;
import java.util.Set;
import java.util.TimeZone;
@@ -60,11 +62,11 @@ import org.apache.lucene.search.TopDocs;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LuceneTestCase;
+import org.apache.lucene.util.NumericUtils;
import org.apache.lucene.util.TestUtil;
import org.apache.lucene.util.Version;
-import org.junit.Ignore;
-public class TestDocument2 extends LuceneTestCase {
+public class TestDocument extends LuceneTestCase {
public void setUp() throws Exception {
super.setUp();
@@ -212,54 +214,6 @@ public class TestDocument2 extends Lucen
dir.close();
}
- // nocommit test multi-valued too
- // nocommit does not work ... how to fix? bring getComparator back to life!?
- @Ignore
- public void testBigIntRange() throws Exception {
- Directory dir = newDirectory();
-
- IndexWriter w = new IndexWriter(dir, newIndexWriterConfig());
- FieldTypes fieldTypes = w.getFieldTypes();
- //System.out.println("id type: " + fieldTypes.getFieldType("id"));
-
- Document doc = w.newDocument();
- doc.addBigInteger("big", new BigInteger("3000000000000000000"));
- doc.addAtom("id", "one");
- w.addDocument(doc);
-
- doc = w.newDocument();
- doc.addBigInteger("big", new BigInteger("2000000000000000000"));
- doc.addAtom("id", "two");
- w.addDocument(doc);
-
- doc = w.newDocument();
- doc.addBigInteger("big", new BigInteger("7000000000000000000"));
- doc.addAtom("id", "three");
- w.addDocument(doc);
-
- IndexReader r = DirectoryReader.open(w, true);
- IndexSearcher s = newSearcher(r);
-
- System.out.println("FILTER: " + fieldTypes.newRangeFilter("big",
- new BigInteger("0"), true,
- new BigInteger("3000000000000000000"), true));
-
- // Make sure range query hits the right number of hits
- assertEquals(2, search(s, fieldTypes.newRangeFilter("big",
- new BigInteger("0"), true,
- new BigInteger("3000000000000000000"), true), 1).totalHits);
- System.out.println("test query 2");
- assertEquals(3, search(s, fieldTypes.newRangeFilter("big",
- new BigInteger("0"), true,
- new BigInteger("10000000000000000000"), true), 1).totalHits);
- assertEquals(1, search(s, fieldTypes.newRangeFilter("big",
- new BigInteger("10000000000000000000"), true,
- new BigInteger("25000000000000000000"), true), 1).totalHits);
- r.close();
- w.close();
- dir.close();
- }
-
public void testHalfFloatRange() throws Exception {
Directory dir = newDirectory();
@@ -286,9 +240,9 @@ public class TestDocument2 extends Lucen
IndexSearcher s = newSearcher(r);
// Make sure range query hits the right number of hits
- assertEquals(2, search(s, fieldTypes.newRangeFilter("halffloat", 0f, true, 3f, true), 1).totalHits);
- assertEquals(3, search(s, fieldTypes.newRangeFilter("halffloat", 0f, true, 10f, true), 1).totalHits);
- assertEquals(1, search(s, fieldTypes.newRangeFilter("halffloat", 1f, true,2.5f, true), 1).totalHits);
+ assertEquals(2, search(s, fieldTypes.newHalfFloatRangeFilter("halffloat", 0f, true, 3f, true), 1).totalHits);
+ assertEquals(3, search(s, fieldTypes.newHalfFloatRangeFilter("halffloat", 0f, true, 10f, true), 1).totalHits);
+ assertEquals(1, search(s, fieldTypes.newHalfFloatRangeFilter("halffloat", 1f, true,2.5f, true), 1).totalHits);
r.close();
w.close();
dir.close();
@@ -358,9 +312,9 @@ public class TestDocument2 extends Lucen
IndexSearcher s = newSearcher(r);
// Make sure range query hits the right number of hits
- assertEquals(2, search(s, fieldTypes.newRangeFilter("float", 0f, true, 3f, true), 1).totalHits);
- assertEquals(3, search(s, fieldTypes.newRangeFilter("float", 0f, true, 10f, true), 1).totalHits);
- assertEquals(1, search(s, fieldTypes.newRangeFilter("float", 1f, true,2.5f, true), 1).totalHits);
+ assertEquals(2, search(s, fieldTypes.newFloatRangeFilter("float", 0f, true, 3f, true), 1).totalHits);
+ assertEquals(3, search(s, fieldTypes.newFloatRangeFilter("float", 0f, true, 10f, true), 1).totalHits);
+ assertEquals(1, search(s, fieldTypes.newFloatRangeFilter("float", 1f, true,2.5f, true), 1).totalHits);
// Make sure doc values shows the correct float values:
TopDocs hits = s.search(new MatchAllDocsQuery(), 3, fieldTypes.newSort("id"));
@@ -436,8 +390,8 @@ public class TestDocument2 extends Lucen
IndexReader r = DirectoryReader.open(w, true);
IndexSearcher s = newSearcher(r);
- assertEquals(2, search(s, fieldTypes.newRangeFilter("int", 0, true, 3, true), 1).totalHits);
- assertEquals(3, search(s, fieldTypes.newRangeFilter("int", 0, true, 10, true), 1).totalHits);
+ assertEquals(2, search(s, fieldTypes.newIntRangeFilter("int", 0, true, 3, true), 1).totalHits);
+ assertEquals(3, search(s, fieldTypes.newIntRangeFilter("int", 0, true, 10, true), 1).totalHits);
w.close();
r.close();
dir.close();
@@ -447,14 +401,11 @@ public class TestDocument2 extends Lucen
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig());
FieldTypes fieldTypes = w.getFieldTypes();
- try {
- fieldTypes.setAnalyzer("atom", new MockAnalyzer(random()));
- // nocommit fixme
- // fail("did not hit expected exception");
- } catch (IllegalStateException ise) {
- // Expected
- assertEquals("wrong exception message: " + ise.getMessage(), "field \"atom\": can only setIndexAnalyzer if the field is indexed", ise.getMessage());
- }
+ Document doc = w.newDocument();
+ doc.addAtom("atom", "foo");
+ // nocommit fixme
+ shouldFail(() -> fieldTypes.setAnalyzer("atom", new MockAnalyzer(random())),
+ "field \"atom\": type ATOM cannot have an indexAnalyzer");
w.close();
dir.close();
}
@@ -466,13 +417,8 @@ public class TestDocument2 extends Lucen
FieldTypes fieldTypes = w.getFieldTypes();
fieldTypes.setDocValuesType("string", DocValuesType.SORTED);
Document doc = w.newDocument();
- try {
- doc.addInt("string", 17);
- fail("did not hit expected exception");
- } catch (IllegalStateException ise) {
- // Expected
- assertEquals("wrong exception message: " + ise.getMessage(), "field \"string\": type INT must use NUMERIC or SORTED_NUMERIC docValuesType (got: SORTED)", ise.getMessage());
- }
+ shouldFail(() -> doc.addInt("string", 17),
+ "field \"string\": type INT must use NUMERIC or SORTED_NUMERIC docValuesType; got: SORTED");
doc.addAtom("string", "a string");
w.addDocument(doc);
w.close();
@@ -486,13 +432,8 @@ public class TestDocument2 extends Lucen
FieldTypes fieldTypes = w.getFieldTypes();
fieldTypes.setDocValuesType("binary", DocValuesType.BINARY);
Document doc = w.newDocument();
- try {
- doc.addInt("binary", 17);
- fail("did not hit expected exception");
- } catch (IllegalStateException ise) {
- // Expected
- assertEquals("wrong exception message: " + ise.getMessage(), "field \"binary\": type INT must use NUMERIC or SORTED_NUMERIC docValuesType (got: BINARY)", ise.getMessage());
- }
+ shouldFail(() -> doc.addInt("binary", 17),
+ "field \"binary\": type INT must use NUMERIC or SORTED_NUMERIC docValuesType; got: BINARY");
doc.addAtom("binary", new BytesRef(new byte[7]));
w.addDocument(doc);
w.close();
@@ -506,13 +447,8 @@ public class TestDocument2 extends Lucen
FieldTypes fieldTypes = w.getFieldTypes();
fieldTypes.enableStored("body");
Document doc = w.newDocument();
- try {
- doc.addLargeText("body", new StringReader("a small string"));
- fail("did not hit expected exception");
- } catch (IllegalStateException ise) {
- // Expected
- assertEquals("wrong exception message: " + ise.getMessage(), "field \"body\": can only store String large text fields", ise.getMessage());
- }
+ shouldFail(() -> doc.addLargeText("body", new StringReader("a small string")),
+ "field \"body\": can only store String large text fields");
doc.addLargeText("body", "a string");
w.addDocument(doc);
w.close();
@@ -526,13 +462,8 @@ public class TestDocument2 extends Lucen
FieldTypes fieldTypes = w.getFieldTypes();
fieldTypes.enableStored("body");
Document doc = w.newDocument();
- try {
- doc.addLargeText("body", new CannedTokenStream());
- fail("did not hit expected exception");
- } catch (IllegalStateException ise) {
- // Expected
- assertEquals("wrong exception message: " + ise.getMessage(), "field \"body\": can only store String large text fields", ise.getMessage());
- }
+ shouldFail(() -> doc.addLargeText("body", new CannedTokenStream()),
+ "field \"body\": can only store String large text fields");
doc.addLargeText("body", "a string");
w.addDocument(doc);
w.close();
@@ -636,13 +567,8 @@ public class TestDocument2 extends Lucen
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig());
FieldTypes fieldTypes = w.getFieldTypes();
fieldTypes.setDocValuesType("binary", DocValuesType.BINARY);
- try {
- fieldTypes.setMultiValued("binary");
- fail("did not hit expected exception");
- } catch (IllegalStateException ise) {
- // Expected
- assertEquals("wrong exception message: " + ise.getMessage(), "field \"binary\": DocValuesType=BINARY cannot be multi-valued", ise.getMessage());
- }
+ shouldFail(() -> fieldTypes.setMultiValued("binary"),
+ "field \"binary\": DocValuesType=BINARY cannot be multi-valued");
assertFalse(fieldTypes.getMultiValued("binary"));
Document doc = w.newDocument();
doc.addStored("binary", new BytesRef(new byte[7]));
@@ -657,13 +583,8 @@ public class TestDocument2 extends Lucen
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig());
FieldTypes fieldTypes = w.getFieldTypes();
fieldTypes.setDocValuesType("sorted", DocValuesType.SORTED);
- try {
- fieldTypes.setMultiValued("sorted");
- fail("did not hit expected exception");
- } catch (IllegalStateException ise) {
- // Expected
- assertEquals("wrong exception message: " + ise.getMessage(), "field \"sorted\": DocValuesType=SORTED cannot be multi-valued", ise.getMessage());
- }
+ shouldFail(() -> fieldTypes.setMultiValued("sorted"),
+ "field \"sorted\": DocValuesType=SORTED cannot be multi-valued");
assertFalse(fieldTypes.getMultiValued("sorted"));
Document doc = w.newDocument();
doc.addStored("binary", new BytesRef(new byte[7]));
@@ -678,13 +599,8 @@ public class TestDocument2 extends Lucen
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig());
FieldTypes fieldTypes = w.getFieldTypes();
fieldTypes.setDocValuesType("numeric", DocValuesType.NUMERIC);
- try {
- fieldTypes.setMultiValued("numeric");
- fail("did not hit expected exception");
- } catch (IllegalStateException ise) {
- // Expected
- assertEquals("wrong exception message: " + ise.getMessage(), "field \"numeric\": DocValuesType=NUMERIC cannot be multi-valued", ise.getMessage());
- }
+ shouldFail(() -> fieldTypes.setMultiValued("numeric"),
+ "field \"numeric\": DocValuesType=NUMERIC cannot be multi-valued");
assertFalse(fieldTypes.getMultiValued("numeric"));
Document doc = w.newDocument();
doc.addInt("numeric", 17);
@@ -853,13 +769,8 @@ public class TestDocument2 extends Lucen
IndexWriterConfig iwc = newIndexWriterConfig();
IndexWriter w = new IndexWriter(dir, iwc);
FieldTypes fieldTypes = w.getFieldTypes();
- try {
- fieldTypes.setDocValuesFormat("id", "foobar");
- fail("did not hit exception");
- } catch (IllegalArgumentException iae) {
- // Expected
- assertTrue("wrong exception message: " + iae.getMessage(), iae.getMessage().startsWith("field \"id\": An SPI class of type org.apache.lucene.codecs.DocValuesFormat with name 'foobar' does not exist"));
- }
+ shouldFail(() -> fieldTypes.setDocValuesFormat("id", "foobar"),
+ "field \"id\": An SPI class of type org.apache.lucene.codecs.DocValuesFormat with name 'foobar' does not exist");
fieldTypes.setDocValuesFormat("id", "Memory");
w.close();
dir.close();
@@ -872,13 +783,8 @@ public class TestDocument2 extends Lucen
FieldTypes fieldTypes = w.getFieldTypes();
fieldTypes.setDocValuesType("id", DocValuesType.BINARY);
Document doc = w.newDocument();
- try {
- doc.addInt("id", 17);
- fail("did not hit exception");
- } catch (IllegalStateException ise) {
- // Expected
- assertEquals("wrong exception message: " + ise.getMessage(), "field \"id\": type INT must use NUMERIC or SORTED_NUMERIC docValuesType (got: BINARY)", ise.getMessage());
- }
+ shouldFail(() -> doc.addInt("id", 17),
+ "field \"id\": type INT must use NUMERIC or SORTED_NUMERIC docValuesType; got: BINARY");
fieldTypes.setPostingsFormat("id", "Memory");
w.close();
dir.close();
@@ -889,13 +795,8 @@ public class TestDocument2 extends Lucen
IndexWriterConfig iwc = newIndexWriterConfig();
IndexWriter w = new IndexWriter(dir, iwc);
FieldTypes fieldTypes = w.getFieldTypes();
- try {
- fieldTypes.setPostingsFormat("id", "foobar");
- fail("did not hit exception");
- } catch (IllegalArgumentException iae) {
- // Expected
- assertTrue("wrong exception message: " + iae.getMessage(), iae.getMessage().startsWith("field \"id\": An SPI class of type org.apache.lucene.codecs.PostingsFormat with name 'foobar' does not exist"));
- }
+ shouldFail(() -> fieldTypes.setPostingsFormat("id", "foobar"),
+ "field \"id\": An SPI class of type org.apache.lucene.codecs.PostingsFormat with name 'foobar' does not exist");
fieldTypes.setPostingsFormat("id", "Memory");
w.close();
dir.close();
@@ -984,16 +885,11 @@ public class TestDocument2 extends Lucen
w.close();
w = new IndexWriter(dir, newIndexWriterConfig());
- doc = w.newDocument();
- try {
- doc.addInt("id", 7);
- fail("did not hit exception");
- } catch (IllegalStateException iae) {
- // Expected
- assertEquals("wrong exception message: " + iae.getMessage(), "field \"id\": cannot change from value type ATOM to INT", iae.getMessage());
- }
- doc.addAtom("id", new BytesRef(new byte[7]));
- w.addDocument(doc);
+ Document doc2 = w.newDocument();
+ shouldFail(() -> doc2.addInt("id", 7),
+ "field \"id\": cannot change from value type ATOM to INT");
+ doc2.addAtom("id", new BytesRef(new byte[7]));
+ w.addDocument(doc2);
w.close();
dir.close();
}
@@ -1010,13 +906,8 @@ public class TestDocument2 extends Lucen
w.addDocument(doc);
IndexReader r = DirectoryReader.open(w, true);
- try {
- fieldTypes.newStringTermQuery("foo", "bar");
- fail("did not hit exception");
- } catch (IllegalStateException ise) {
- // Expected
- assertEquals("wrong exception message: " + ise.getMessage(), "field \"foo\": cannot create term query: this field was not indexed", ise.getMessage());
- }
+ shouldFail(() -> fieldTypes.newStringTermQuery("foo", "bar"),
+ "field \"foo\": cannot create term query: this field was not indexed");
r.close();
w.close();
dir.close();
@@ -1035,13 +926,8 @@ public class TestDocument2 extends Lucen
w.addDocument(doc);
IndexReader r = DirectoryReader.open(w, true);
- try {
- fieldTypes.newSort("foo");
- fail("did not hit exception");
- } catch (IllegalStateException ise) {
- // Expected
- assertEquals("wrong exception message: " + ise.getMessage(), "field \"foo\": this field was not indexed for sorting", ise.getMessage());
- }
+ shouldFail(() -> fieldTypes.newSort("foo"),
+ "field \"foo\": this field was not indexed for sorting");
r.close();
w.close();
dir.close();
@@ -1055,12 +941,8 @@ public class TestDocument2 extends Lucen
Document doc = w.newDocument();
doc.addInt("int", 17);
w.addDocument(doc);
- try {
- fieldTypes.newRangeFilter("int", 0, true, 7, true);
- fail("did not hit exception");
- } catch (IllegalStateException ise) {
- assertEquals("field \"int\": this field was not indexed for fast ranges", ise.getMessage());
- }
+ shouldFail(() -> fieldTypes.newIntRangeFilter("int", 0, true, 7, true),
+ "field \"int\": cannot create range filter: this field was not indexed for fast ranges");
w.close();
dir.close();
}
@@ -1621,12 +1503,8 @@ public class TestDocument2 extends Lucen
doc.addInt("field", 17);
w.addDocument(doc);
- try {
- fieldTypes.setDocValuesType("field", DocValuesType.NUMERIC);
- fail("did not hit exception");
- } catch (IllegalStateException ise) {
- assertEquals("field \"field\": cannot change docValuesType from NONE to NUMERIC", ise.getMessage());
- }
+ shouldFail(() -> fieldTypes.setDocValuesType("field", DocValuesType.NUMERIC),
+ "field \"field\": cannot change docValuesType from NONE to NUMERIC");
w.close();
dir.close();
}
@@ -1819,17 +1697,14 @@ public class TestDocument2 extends Lucen
fieldTypes.enableStored("field");
Document doc = w.newDocument();
- try {
+ shouldFail(() ->
doc.addLargeText("field", new TokenStream() {
@Override
public boolean incrementToken() {
return false;
}
- });
- fail("did not hit exception");
- } catch (IllegalStateException ise) {
- assertEquals("field \"field\": can only store String large text fields", ise.getMessage());
- }
+ }),
+ "field \"field\": can only store String large text fields");
w.close();
dir.close();
}
@@ -1933,7 +1808,7 @@ public class TestDocument2 extends Lucen
DirectoryReader r = DirectoryReader.open(w, true);
IndexSearcher s = newSearcher(r);
assertEquals(1, s.search(new TermQuery(new Term("double",
- Document.longToBytes(Document.sortableDoubleBits(Double.doubleToLongBits(180.0))))),
+ NumericUtils.doubleToBytes(180.0))),
1).totalHits);
r.close();
w.close();
@@ -1965,6 +1840,153 @@ public class TestDocument2 extends Lucen
w.close();
dir.close();
}
+
+ private static void shouldFail(Runnable x, String message) {
+ try {
+ x.run();
+ fail("did not hit expected exception");
+ } catch (IllegalStateException ise) {
+ assertTrue("wrong message: " + ise.getMessage(), ise.getMessage().startsWith(message));
+ } catch (IllegalArgumentException iae) {
+ assertTrue("wrong message: " + iae.getMessage(), iae.getMessage().startsWith(message));
+ }
+ }
+
+ public void testStoredAfterLargeText() throws Exception {
+ Directory dir = newDirectory();
+ IndexWriter w = newIndexWriter(dir);
+
+ Document doc = w.newDocument();
+ doc.addLargeText("field", "ABC");
+ shouldFail(() -> doc.addStored("field", "foo"),
+ "field \"field\": this field is already indexed with indexOptions=DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS");
+ doc.addAtom("collated", "ABC");
+ w.close();
+ dir.close();
+ }
+
+ public void testStoredAfterInt() throws Exception {
+ Directory dir = newDirectory();
+ IndexWriter w = newIndexWriter(dir);
+
+ Document doc = w.newDocument();
+ doc.addInt("field", 17);
+ shouldFail(() -> doc.addStoredInt("field", 18),
+ "field \"field\": cannot addStored: field is already indexed with indexOptions=DOCS");
+ doc.addAtom("collated", "ABC");
+ w.close();
+ dir.close();
+ }
+
+ public void testStoredAfterLong() throws Exception {
+ Directory dir = newDirectory();
+ IndexWriter w = newIndexWriter(dir);
+
+ Document doc = w.newDocument();
+ doc.addLong("field", 17L);
+ shouldFail(() -> doc.addStoredLong("field", 18L),
+ "field \"field\": cannot addStored: field is already indexed with indexOptions=DOCS");
+ doc.addAtom("collated", "ABC");
+ w.close();
+ dir.close();
+ }
+
+ public void testStoredAfterFloat() throws Exception {
+ Directory dir = newDirectory();
+ IndexWriter w = newIndexWriter(dir);
+
+ Document doc = w.newDocument();
+ doc.addFloat("field", 17F);
+ shouldFail(() -> doc.addStoredFloat("field", 18F),
+ "field \"field\": cannot addStored: field is already indexed with indexOptions=DOCS");
+ doc.addAtom("collated", "ABC");
+ w.close();
+ dir.close();
+ }
+
+ public void testStoredAfterDouble() throws Exception {
+ Directory dir = newDirectory();
+ IndexWriter w = newIndexWriter(dir);
+
+ Document doc = w.newDocument();
+ doc.addDouble("field", 17D);
+ shouldFail(() -> doc.addStoredDouble("field", 18D),
+ "field \"field\": cannot addStored: field is already indexed with indexOptions=DOCS");
+ doc.addAtom("collated", "ABC");
+ w.close();
+ dir.close();
+ }
+
+ public void testSortKey() throws Exception {
+ Directory dir = newDirectory();
+ IndexWriter w = newIndexWriter(dir);
+
+ Document doc = w.newDocument();
+ doc.addAtom("sev", "cosmetic");
+ w.addDocument(doc);
+
+ doc = w.newDocument();
+ doc.addAtom("sev", "major");
+ w.addDocument(doc);
+
+ doc = w.newDocument();
+ doc.addAtom("sev", "critical");
+ w.addDocument(doc);
+
+ doc = w.newDocument();
+ doc.addAtom("sev", "minor");
+ w.addDocument(doc);
+
+ // missing
+ doc = w.newDocument();
+ w.addDocument(doc);
+
+ DirectoryReader r = DirectoryReader.open(w, true);
+ FieldTypes fieldTypes = r.getFieldTypes();
+
+ IndexSearcher s = newSearcher(r);
+ TopDocs hits = s.search(new MatchAllDocsQuery(), 5, fieldTypes.newSort("sev"));
+ assertEquals(5, hits.totalHits);
+ assertEquals("cosmetic", s.doc(hits.scoreDocs[0].doc).getString("sev"));
+ assertEquals("critical", s.doc(hits.scoreDocs[1].doc).getString("sev"));
+ assertEquals("major", s.doc(hits.scoreDocs[2].doc).getString("sev"));
+ assertEquals("minor", s.doc(hits.scoreDocs[3].doc).getString("sev"));
+ assertNull(s.doc(hits.scoreDocs[4].doc).getInt("sev"));
+
+ final Map<BytesRef,Integer> sortMap = new HashMap<>();
+ sortMap.put(new BytesRef("critical"), 0);
+ sortMap.put(new BytesRef("major"), 1);
+ sortMap.put(new BytesRef("minor"), 2);
+ sortMap.put(new BytesRef("cosmetic"), 3);
+ fieldTypes.setSortKey("sev", v -> sortMap.get(v));
+
+ hits = s.search(new MatchAllDocsQuery(), 5, fieldTypes.newSort("sev"));
+ assertEquals(5, hits.totalHits);
+ assertEquals("critical", s.doc(hits.scoreDocs[0].doc).getString("sev"));
+ assertEquals("major", s.doc(hits.scoreDocs[1].doc).getString("sev"));
+ assertEquals("minor", s.doc(hits.scoreDocs[2].doc).getString("sev"));
+ assertEquals("cosmetic", s.doc(hits.scoreDocs[3].doc).getString("sev"));
+ assertNull(s.doc(hits.scoreDocs[4].doc).getInt("sev"));
+
+ r.close();
+ w.close();
+ dir.close();
+ }
+
+ public void testExcMixedBinaryStringAtom() throws Exception {
+ Directory dir = newDirectory();
+ IndexWriter w = newIndexWriter(dir);
+ FieldTypes fieldTypes = w.getFieldTypes();
+ Document doc = w.newDocument();
+ doc.addAtom("field", "bar");
+
+ Document doc2 = w.newDocument();
+ // nocommit why no failure?
+ //shouldFail(() -> doc2.addAtom("field", new BytesRef("bar")),
+ //"foo");
+ w.close();
+ dir.close();
+ }
// nocommit test per-field analyzers
Modified: lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestAbuseSchema.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestAbuseSchema.java?rev=1643659&r1=1643658&r2=1643659&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestAbuseSchema.java (original)
+++ lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestAbuseSchema.java Sun Dec 7 10:52:03 2014
@@ -807,4 +807,80 @@ public class TestAbuseSchema extends Luc
dir.close();
}
+ // LUCENE-1008
+ public void testNoTermVectorAfterTermVector() throws IOException {
+ Directory dir = newDirectory();
+ Analyzer a = new MockAnalyzer(random());
+ IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
+
+ List<LowSchemaField> document = new ArrayList<>();
+ LowSchemaField field = new LowSchemaField(a, "tvtest", "a b c", IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, true);
+ field.enableTermVectors(true, true, true);
+ document.add(field);
+ iw.addDocument(document);
+
+ document = new ArrayList<>();
+ field = new LowSchemaField(a, "tvtest", "x y z", IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, true);
+ field.enableTermVectors(true, true, true);
+ document.add(field);
+ iw.addDocument(document);
+
+ // Make first segment
+ iw.commit();
+
+ document = new ArrayList<>();
+ field = new LowSchemaField(a, "tvtest", "a b c", IndexOptions.NONE, false);
+ document.add(field);
+ iw.addDocument(document);
+ // Make 2nd segment
+ iw.commit();
+
+ iw.forceMerge(1);
+ iw.close();
+ dir.close();
+ }
+
+ /**
+ * Test adding two fields with the same name, one indexed
+ * the other stored only. The omitNorms and omitTermFreqAndPositions setting
+ * of the stored field should not affect the indexed one (LUCENE-1590)
+ */
+ public void testLUCENE_1590() throws Exception {
+ Directory dir = newDirectory();
+ IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
+ Analyzer a = new MockAnalyzer(random());
+
+ List<LowSchemaField> doc = new ArrayList<>();
+ LowSchemaField field = new LowSchemaField(a, "f1", "v1", IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, true);
+ field.disableNorms();
+ doc.add(field);
+
+ field = new LowSchemaField(a, "f1", "v2", IndexOptions.NONE, false);
+ doc.add(field);
+
+ // f2 has no TF
+ field = new LowSchemaField(a, "f2", "v1", IndexOptions.DOCS, true);
+ doc.add(field);
+
+ field = new LowSchemaField(a, "f2", "v2", IndexOptions.NONE, false);
+ doc.add(field);
+
+ writer.addDocument(doc);
+ writer.forceMerge(1); // be sure to have a single segment
+ writer.close();
+
+ TestUtil.checkIndex(dir);
+
+ SegmentReader reader = getOnlySegmentReader(DirectoryReader.open(dir));
+ FieldInfos fi = reader.getFieldInfos();
+ // f1
+ assertFalse("f1 should have no norms", fi.fieldInfo("f1").hasNorms());
+ assertEquals("omitTermFreqAndPositions field bit should not be set for f1", IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, fi.fieldInfo("f1").getIndexOptions());
+ // f2
+ assertTrue("f2 should have norms", fi.fieldInfo("f2").hasNorms());
+ assertEquals("omitTermFreqAndPositions field bit should be set for f2", IndexOptions.DOCS, fi.fieldInfo("f2").getIndexOptions());
+ reader.close();
+ dir.close();
+ }
+
}
Modified: lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestAddIndexes.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestAddIndexes.java?rev=1643659&r1=1643658&r2=1643659&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestAddIndexes.java (original)
+++ lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestAddIndexes.java Sun Dec 7 10:52:03 2014
@@ -612,10 +612,10 @@ public class TestAddIndexes extends Luce
}
doc = writer.newDocument();
- doc.addStored("content", "aaa bbb ccc ddd eee fff ggg hhh iii");
- doc.addStored("content", "aaa bbb ccc ddd eee fff ggg hhh iii");
- doc.addStored("content", "aaa bbb ccc ddd eee fff ggg hhh iii");
- doc.addStored("content", "aaa bbb ccc ddd eee fff ggg hhh iii");
+ doc.addLargeText("content", "aaa bbb ccc ddd eee fff ggg hhh iii");
+ doc.addLargeText("content", "aaa bbb ccc ddd eee fff ggg hhh iii");
+ doc.addLargeText("content", "aaa bbb ccc ddd eee fff ggg hhh iii");
+ doc.addLargeText("content", "aaa bbb ccc ddd eee fff ggg hhh iii");
for(int i=0;i<10;i++) {
writer.addDocument(doc);
}
Modified: lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestDemoParallelLeafReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestDemoParallelLeafReader.java?rev=1643659&r1=1643658&r2=1643659&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestDemoParallelLeafReader.java (original)
+++ lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestDemoParallelLeafReader.java Sun Dec 7 10:52:03 2014
@@ -1155,7 +1155,7 @@ public class TestDemoParallelLeafReader
checkAllNumberDVs(r);
IndexSearcher s = newSearcher(r);
testNumericDVSort(s);
- testNumericRangeQuery(s);
+ testRanges(s);
} finally {
reindexer.mgr.release(r);
}
@@ -1177,7 +1177,7 @@ public class TestDemoParallelLeafReader
checkAllNumberDVs(r);
IndexSearcher s = newSearcher(r);
testNumericDVSort(s);
- testNumericRangeQuery(s);
+ testRanges(s);
} finally {
reindexer.mgr.release(r);
}
@@ -1196,7 +1196,7 @@ public class TestDemoParallelLeafReader
checkAllNumberDVs(r);
IndexSearcher s = newSearcher(r);
testNumericDVSort(s);
- testNumericRangeQuery(s);
+ testRanges(s);
} finally {
reindexer.mgr.release(r);
}
@@ -1248,7 +1248,7 @@ public class TestDemoParallelLeafReader
checkAllNumberDVs(r);
IndexSearcher s = newSearcher(r);
testNumericDVSort(s);
- testNumericRangeQuery(s);
+ testRanges(s);
} finally {
reindexer.mgr.release(r);
}
@@ -1327,7 +1327,7 @@ public class TestDemoParallelLeafReader
}
}
- private static void testNumericRangeQuery(IndexSearcher s) throws IOException {
+ private static void testRanges(IndexSearcher s) throws IOException {
NumericDocValues numbers = MultiDocValues.getNumericValues(s.getIndexReader(), "number");
FieldTypes fieldTypes = s.getFieldTypes();
for(int i=0;i<100;i++) {
@@ -1340,7 +1340,7 @@ public class TestDemoParallelLeafReader
max = x;
}
- TopDocs hits = s.search(new ConstantScoreQuery(fieldTypes.newRangeFilter("number", min, true, max, true)), 100);
+ TopDocs hits = s.search(new ConstantScoreQuery(fieldTypes.newLongRangeFilter("number", min, true, max, true)), 100);
for(ScoreDoc scoreDoc : hits.scoreDocs) {
long value = Long.parseLong(s.doc(scoreDoc.doc).getString("text").split(" ")[1]);
assertTrue(value >= min);
Modified: lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestDocumentWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestDocumentWriter.java?rev=1643659&r1=1643658&r2=1643659&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestDocumentWriter.java (original)
+++ lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestDocumentWriter.java Sun Dec 7 10:52:03 2014
@@ -247,46 +247,4 @@ public class TestDocumentWriter extends
assertEquals(2, termPositions.nextPosition());
reader.close();
}
-
- /**
- * Test adding two fields with the same name, one indexed
- * the other stored only. The omitNorms and omitTermFreqAndPositions setting
- * of the stored field should not affect the indexed one (LUCENE-1590)
- */
- public void testLUCENE_1590() throws Exception {
- IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
- FieldTypes fieldTypes = writer.getFieldTypes();
- // f1 has no norms
- fieldTypes.disableNorms("f1");
- fieldTypes.disableHighlighting("f1");
- fieldTypes.setIndexOptions("f1", IndexOptions.DOCS_AND_FREQS_AND_POSITIONS);
- fieldTypes.setMultiValued("f1");
- fieldTypes.setMultiValued("f2");
-
- Document doc = writer.newDocument();
- doc.addLargeText("f1", "v1");
- doc.addStored("f1", "v2");
-
- // f2 has no TF
- fieldTypes.disableHighlighting("f2");
- fieldTypes.setIndexOptions("f2", IndexOptions.DOCS);
- doc.addLargeText("f2", "v1");
- doc.addStored("f2", "v2");
-
- writer.addDocument(doc);
- writer.forceMerge(1); // be sure to have a single segment
- writer.close();
-
- TestUtil.checkIndex(dir);
-
- SegmentReader reader = getOnlySegmentReader(DirectoryReader.open(dir));
- FieldInfos fi = reader.getFieldInfos();
- // f1
- assertFalse("f1 should have no norms", fi.fieldInfo("f1").hasNorms());
- assertEquals("omitTermFreqAndPositions field bit should not be set for f1", IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, fi.fieldInfo("f1").getIndexOptions());
- // f2
- assertTrue("f2 should have norms", fi.fieldInfo("f2").hasNorms());
- assertEquals("omitTermFreqAndPositions field bit should be set for f2", IndexOptions.DOCS, fi.fieldInfo("f2").getIndexOptions());
- reader.close();
- }
}
Modified: lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestFieldsReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestFieldsReader.java?rev=1643659&r1=1643658&r2=1643659&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestFieldsReader.java (original)
+++ lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestFieldsReader.java Sun Dec 7 10:52:03 2014
@@ -24,7 +24,7 @@ import java.util.concurrent.atomic.Atomi
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document;
-import org.apache.lucene.document.Document2StoredFieldVisitor;
+import org.apache.lucene.document.DocumentStoredFieldVisitor;
import org.apache.lucene.document.FieldTypes;
import org.apache.lucene.store.BufferedIndexInput;
import org.apache.lucene.store.Directory;
@@ -78,7 +78,7 @@ public class TestFieldsReader extends Lu
assertTrue(field.fieldType().indexOptions() == IndexOptions.DOCS);
FieldTypes fieldTypes = FieldTypes.getFieldTypes(dir, null);
- Document2StoredFieldVisitor visitor = new Document2StoredFieldVisitor(fieldTypes, DocHelper.TEXT_FIELD_3_KEY);
+ DocumentStoredFieldVisitor visitor = new DocumentStoredFieldVisitor(fieldTypes, DocHelper.TEXT_FIELD_3_KEY);
reader.document(0, visitor);
final List<IndexableField> fields = visitor.getDocument().getFields();
assertEquals(1, fields.size());
Modified: lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestForTooMuchCloning.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestForTooMuchCloning.java?rev=1643659&r1=1643658&r2=1643659&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestForTooMuchCloning.java (original)
+++ lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestForTooMuchCloning.java Sun Dec 7 10:52:03 2014
@@ -70,7 +70,7 @@ public class TestForTooMuchCloning exten
assertTrue(hits.totalHits > 0);
final int queryCloneCount = dir.getInputCloneCount() - cloneCount;
//System.out.println("query clone count=" + queryCloneCount);
- assertTrue("too many calls to IndexInput.clone during TermRangeQuery: " + queryCloneCount, queryCloneCount < 50);
+ assertTrue("too many calls to IndexInput.clone during TermRangeQuery: " + queryCloneCount, queryCloneCount < 60);
r.close();
dir.close();
}
Modified: lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestIndexFileDeleter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestIndexFileDeleter.java?rev=1643659&r1=1643658&r2=1643659&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestIndexFileDeleter.java (original)
+++ lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestIndexFileDeleter.java Sun Dec 7 10:52:03 2014
@@ -468,6 +468,8 @@ public class TestIndexFileDeleter extend
doc.addLargeText("field", "some text");
w.addDocument(doc);
}
+ } catch (AlreadyClosedException ace) {
+ // ok
} catch (IOException ioe) {
if (ioe.getMessage().contains("background merge hit exception")) {
Throwable cause = ioe.getCause();
Modified: lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterWithThreads.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterWithThreads.java?rev=1643659&r1=1643658&r2=1643659&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterWithThreads.java (original)
+++ lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterWithThreads.java Sun Dec 7 10:52:03 2014
@@ -75,11 +75,10 @@ public class TestIndexWriterWithThreads
final long stopTime = System.currentTimeMillis() + 200;
do {
- Document doc = writer.newDocument();
- doc.addLargeText("field", "aaa bbb ccc ddd eee fff ggg hhh iii jjj");
- doc.addInt("dv", 5);
try {
- // nocommit wtf? id was never indexed in the doc?
+ Document doc = writer.newDocument();
+ doc.addLargeText("field", "aaa bbb ccc ddd eee fff ggg hhh iii jjj");
+ doc.addInt("dv", 5);
writer.updateDocument(new Term("id", ""+(idUpto++)), doc);
addCount++;
} catch (IOException ioe) {
Modified: lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestManyFields.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestManyFields.java?rev=1643659&r1=1643658&r2=1643659&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestManyFields.java (original)
+++ lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestManyFields.java Sun Dec 7 10:52:03 2014
@@ -100,7 +100,7 @@ public class TestManyFields extends Luce
String longTerm = b.toString();
Document doc = writer.newDocument();
- doc.addStored("field", longTerm);
+ doc.addLargeText("field", longTerm);
writer.addDocument(doc);
}
}
Modified: lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestPayloads.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestPayloads.java?rev=1643659&r1=1643658&r2=1643659&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestPayloads.java (original)
+++ lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestPayloads.java Sun Dec 7 10:52:03 2014
@@ -366,6 +366,9 @@ public class TestPayloads extends Lucene
}
}
+ /**
+ * This Analyzer uses an MockTokenizer and PayloadFilter.
+ */
private static class PayloadAnalyzer extends Analyzer {
Map<String,PayloadData> fieldToData = new HashMap<>();
@@ -396,7 +399,6 @@ public class TestPayloads extends Lucene
* This Filter adds payloads to the tokens.
*/
private static class PayloadFilter extends TokenFilter {
- private int startOffset;
PayloadAttribute payloadAtt;
CharTermAttribute termAttribute;
private Map<String,PayloadData> fieldToData;
Modified: lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestTermVectorsWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestTermVectorsWriter.java?rev=1643659&r1=1643658&r2=1643659&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestTermVectorsWriter.java (original)
+++ lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestTermVectorsWriter.java Sun Dec 7 10:52:03 2014
@@ -492,35 +492,6 @@ public class TestTermVectorsWriter exten
dir.close();
}
- // LUCENE-1008
- public void testNoTermVectorAfterTermVector() throws IOException {
- Directory dir = newDirectory();
- IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
- FieldTypes fieldTypes = iw.getFieldTypes();
- fieldTypes.enableTermVectors("tvtest");
- fieldTypes.enableTermVectorOffsets("tvtest");
- fieldTypes.enableTermVectorPositions("tvtest");
- fieldTypes.setMultiValued("tvtest");
-
- Document document = iw.newDocument();
- document.addLargeText("tvtest", "a b c");
- iw.addDocument(document);
- document = iw.newDocument();
- document.addLargeText("tvtest", "x y z");
- iw.addDocument(document);
- // Make first segment
- iw.commit();
-
- document.addStored("tvtest", "a b c");
- iw.addDocument(document);
- // Make 2nd segment
- iw.commit();
-
- iw.forceMerge(1);
- iw.close();
- dir.close();
- }
-
// LUCENE-5611: don't abort segment when term vector settings are wrong
public void testNoAbortOnBadTVSettings() throws Exception {
Directory dir = newDirectory();
Modified: lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestTerms.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestTerms.java?rev=1643659&r1=1643658&r2=1643659&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestTerms.java (original)
+++ lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestTerms.java Sun Dec 7 10:52:03 2014
@@ -24,6 +24,7 @@ import org.apache.lucene.document.Docume
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LuceneTestCase;
+import org.apache.lucene.util.NumericUtils;
import org.apache.lucene.util.TestUtil;
public class TestTerms extends LuceneTestCase {
@@ -100,8 +101,8 @@ public class TestTerms extends LuceneTes
IndexReader r = w.getReader();
Terms terms = MultiFields.getTerms(r, "field");
- assertEquals(minValue, Document.bytesToInt(terms.getMin()));
- assertEquals(maxValue, Document.bytesToInt(terms.getMax()));
+ assertEquals(minValue, NumericUtils.bytesToInt(terms.getMin()));
+ assertEquals(maxValue, NumericUtils.bytesToInt(terms.getMax()));
r.close();
w.close();
@@ -126,8 +127,8 @@ public class TestTerms extends LuceneTes
IndexReader r = w.getReader();
Terms terms = MultiFields.getTerms(r, "field");
- assertEquals(minValue, Document.bytesToLong(terms.getMin()));
- assertEquals(maxValue, Document.bytesToLong(terms.getMax()));
+ assertEquals(minValue, NumericUtils.bytesToLong(terms.getMin()));
+ assertEquals(maxValue, NumericUtils.bytesToLong(terms.getMax()));
r.close();
w.close();
@@ -151,8 +152,8 @@ public class TestTerms extends LuceneTes
IndexReader r = w.getReader();
Terms terms = MultiFields.getTerms(r, "field");
- assertEquals(minValue, Document.bytesToFloat(terms.getMin()), 0.0f);
- assertEquals(maxValue, Document.bytesToFloat(terms.getMax()), 0.0f);
+ assertEquals(minValue, NumericUtils.bytesToFloat(terms.getMin()), 0.0f);
+ assertEquals(maxValue, NumericUtils.bytesToFloat(terms.getMax()), 0.0f);
r.close();
w.close();
@@ -178,8 +179,8 @@ public class TestTerms extends LuceneTes
Terms terms = MultiFields.getTerms(r, "field");
- assertEquals(minValue, Document.bytesToDouble(terms.getMin()), 0.0);
- assertEquals(maxValue, Document.bytesToDouble(terms.getMax()), 0.0);
+ assertEquals(minValue, NumericUtils.bytesToDouble(terms.getMin()), 0.0);
+ assertEquals(maxValue, NumericUtils.bytesToDouble(terms.getMax()), 0.0);
r.close();
w.close();
Modified: lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/search/BaseTestRangeFilter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/search/BaseTestRangeFilter.java?rev=1643659&r1=1643658&r2=1643659&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/search/BaseTestRangeFilter.java (original)
+++ lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/search/BaseTestRangeFilter.java Sun Dec 7 10:52:03 2014
@@ -146,7 +146,7 @@ public class BaseTestRangeFilter extends
} else if (r == index.minR) {
minCount++;
}
- doc.addShortText("rand", pad(r));
+ doc.addAtom("rand", pad(r));
doc.addShortText("body", "body");
writer.addDocument(doc);
}
Modified: lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/search/TestCachingWrapperFilter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/search/TestCachingWrapperFilter.java?rev=1643659&r1=1643658&r2=1643659&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/search/TestCachingWrapperFilter.java (original)
+++ lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/search/TestCachingWrapperFilter.java Sun Dec 7 10:52:03 2014
@@ -249,17 +249,20 @@ public class TestCachingWrapperFilter ex
public void testIsCacheAble() throws Exception {
Directory dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
- writer.addDocument(writer.newDocument());
+ Document doc = writer.newDocument();
+ doc.addInt("test", 17);
+ writer.addDocument(doc);
writer.close();
IndexReader reader = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(dir));
+ FieldTypes fieldTypes = reader.getFieldTypes();
// not cacheable:
assertDocIdSetCacheable(reader, new QueryWrapperFilter(new TermQuery(new Term("test","value"))), false);
// returns default empty docidset, always cacheable:
- assertDocIdSetCacheable(reader, NumericRangeFilter.newIntRange("test", Integer.valueOf(10000), Integer.valueOf(-10000), true, true), true);
+ assertDocIdSetCacheable(reader, fieldTypes.newIntRangeFilter("test", Integer.valueOf(10000), true, Integer.valueOf(-10000), true), true);
// is cacheable:
- assertDocIdSetCacheable(reader, DocValuesRangeFilter.newIntRange("test", Integer.valueOf(10), Integer.valueOf(20), true, true), false);
+ assertDocIdSetCacheable(reader, fieldTypes.newDocValuesRangeFilter("test", Integer.valueOf(10), true, Integer.valueOf(20), true), false);
// a fixedbitset filter is always cacheable
assertDocIdSetCacheable(reader, new Filter() {
@Override
Modified: lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/search/TestFieldCacheRangeFilter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/search/TestFieldCacheRangeFilter.java?rev=1643659&r1=1643658&r2=1643659&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/search/TestFieldCacheRangeFilter.java (original)
+++ lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/search/TestFieldCacheRangeFilter.java Sun Dec 7 10:52:03 2014
@@ -58,69 +58,70 @@ public class TestFieldCacheRangeFilter e
ScoreDoc[] result;
Query q = new TermQuery(new Term("body","body"));
+ FieldTypes fieldTypes = search.getFieldTypes();
// test id, bounded on both ends
- result = search.search(q, DocValuesRangeFilter.newStringRange("id",minIP,maxIP,T,T), numDocs).scoreDocs;
+ result = search.search(q, fieldTypes.newDocValuesRangeFilter("id",minIP,T,maxIP,T), numDocs).scoreDocs;
assertEquals("find all", numDocs, result.length);
- result = search.search(q,DocValuesRangeFilter.newStringRange("id",minIP,maxIP,T,F), numDocs).scoreDocs;
+ result = search.search(q, fieldTypes.newDocValuesRangeFilter("id",minIP,T,maxIP,F), numDocs).scoreDocs;
assertEquals("all but last", numDocs-1, result.length);
- result = search.search(q,DocValuesRangeFilter.newStringRange("id",minIP,maxIP,F,T), numDocs).scoreDocs;
+ result = search.search(q, fieldTypes.newDocValuesRangeFilter("id",minIP,F,maxIP,T), numDocs).scoreDocs;
assertEquals("all but first", numDocs-1, result.length);
- result = search.search(q,DocValuesRangeFilter.newStringRange("id",minIP,maxIP,F,F), numDocs).scoreDocs;
+ result = search.search(q, fieldTypes.newDocValuesRangeFilter("id",minIP,F,maxIP,F), numDocs).scoreDocs;
assertEquals("all but ends", numDocs-2, result.length);
- result = search.search(q,DocValuesRangeFilter.newStringRange("id",medIP,maxIP,T,T), numDocs).scoreDocs;
+ result = search.search(q, fieldTypes.newDocValuesRangeFilter("id",medIP,T,maxIP,T), numDocs).scoreDocs;
assertEquals("med and up", 1+ maxId-medId, result.length);
- result = search.search(q,DocValuesRangeFilter.newStringRange("id",minIP,medIP,T,T), numDocs).scoreDocs;
+ result = search.search(q, fieldTypes.newDocValuesRangeFilter("id",minIP,T,medIP,T), numDocs).scoreDocs;
assertEquals("up to med", 1+ medId-minId, result.length);
// unbounded id
- result = search.search(q,DocValuesRangeFilter.newStringRange("id",null,null,T,T), numDocs).scoreDocs;
+ result = search.search(q, fieldTypes.newDocValuesRangeFilter("id",(String)null,T,null,T), numDocs).scoreDocs;
assertEquals("find all", numDocs, result.length);
- result = search.search(q,DocValuesRangeFilter.newStringRange("id",minIP,null,T,F), numDocs).scoreDocs;
+ result = search.search(q, fieldTypes.newDocValuesRangeFilter("id",minIP,T,null,F), numDocs).scoreDocs;
assertEquals("min and up", numDocs, result.length);
- result = search.search(q,DocValuesRangeFilter.newStringRange("id",null,maxIP,F,T), numDocs).scoreDocs;
+ result = search.search(q, fieldTypes.newDocValuesRangeFilter("id",null,F,maxIP,T), numDocs).scoreDocs;
assertEquals("max and down", numDocs, result.length);
- result = search.search(q,DocValuesRangeFilter.newStringRange("id",minIP,null,F,F), numDocs).scoreDocs;
+ result = search.search(q, fieldTypes.newDocValuesRangeFilter("id",minIP,F,null,F), numDocs).scoreDocs;
assertEquals("not min, but up", numDocs-1, result.length);
- result = search.search(q,DocValuesRangeFilter.newStringRange("id",null,maxIP,F,F), numDocs).scoreDocs;
+ result = search.search(q, fieldTypes.newDocValuesRangeFilter("id",null,F,maxIP,F), numDocs).scoreDocs;
assertEquals("not max, but down", numDocs-1, result.length);
- result = search.search(q,DocValuesRangeFilter.newStringRange("id",medIP,maxIP,T,F), numDocs).scoreDocs;
+ result = search.search(q, fieldTypes.newDocValuesRangeFilter("id",medIP,T,maxIP,F), numDocs).scoreDocs;
assertEquals("med and up, not max", maxId-medId, result.length);
- result = search.search(q,DocValuesRangeFilter.newStringRange("id",minIP,medIP,F,T), numDocs).scoreDocs;
+ result = search.search(q, fieldTypes.newDocValuesRangeFilter("id",minIP,F,medIP,T), numDocs).scoreDocs;
assertEquals("not min, up to med", medId-minId, result.length);
// very small sets
- result = search.search(q,DocValuesRangeFilter.newStringRange("id",minIP,minIP,F,F), numDocs).scoreDocs;
+ result = search.search(q, fieldTypes.newDocValuesRangeFilter("id",minIP,F,minIP,F), numDocs).scoreDocs;
assertEquals("min,min,F,F", 0, result.length);
- result = search.search(q,DocValuesRangeFilter.newStringRange("id",medIP,medIP,F,F), numDocs).scoreDocs;
+ result = search.search(q, fieldTypes.newDocValuesRangeFilter("id",medIP,F,medIP,F), numDocs).scoreDocs;
assertEquals("med,med,F,F", 0, result.length);
- result = search.search(q,DocValuesRangeFilter.newStringRange("id",maxIP,maxIP,F,F), numDocs).scoreDocs;
+ result = search.search(q, fieldTypes.newDocValuesRangeFilter("id",maxIP,F,maxIP,F), numDocs).scoreDocs;
assertEquals("max,max,F,F", 0, result.length);
- result = search.search(q,DocValuesRangeFilter.newStringRange("id",minIP,minIP,T,T), numDocs).scoreDocs;
+ result = search.search(q, fieldTypes.newDocValuesRangeFilter("id",minIP,T,minIP,T), numDocs).scoreDocs;
assertEquals("min,min,T,T", 1, result.length);
- result = search.search(q,DocValuesRangeFilter.newStringRange("id",null,minIP,F,T), numDocs).scoreDocs;
+ result = search.search(q, fieldTypes.newDocValuesRangeFilter("id",null,F,minIP,T), numDocs).scoreDocs;
assertEquals("nul,min,F,T", 1, result.length);
- result = search.search(q,DocValuesRangeFilter.newStringRange("id",maxIP,maxIP,T,T), numDocs).scoreDocs;
+ result = search.search(q, fieldTypes.newDocValuesRangeFilter("id",maxIP,T,maxIP,T), numDocs).scoreDocs;
assertEquals("max,max,T,T", 1, result.length);
- result = search.search(q,DocValuesRangeFilter.newStringRange("id",maxIP,null,T,F), numDocs).scoreDocs;
+ result = search.search(q, fieldTypes.newDocValuesRangeFilter("id",maxIP,T,null,F), numDocs).scoreDocs;
assertEquals("max,nul,T,T", 1, result.length);
- result = search.search(q,DocValuesRangeFilter.newStringRange("id",medIP,medIP,T,T), numDocs).scoreDocs;
+ result = search.search(q, fieldTypes.newDocValuesRangeFilter("id",medIP,T,medIP,T), numDocs).scoreDocs;
assertEquals("med,med,T,T", 1, result.length);
}
@@ -142,47 +143,48 @@ public class TestFieldCacheRangeFilter e
// test extremes, bounded on both ends
- result = search.search(q,DocValuesRangeFilter.newStringRange("rand",minRP,maxRP,T,T), numDocs).scoreDocs;
+ FieldTypes fieldTypes = search.getFieldTypes();
+ result = search.search(q, fieldTypes.newDocValuesRangeFilter("rand",minRP,T,maxRP,T), numDocs).scoreDocs;
assertEquals("find all", numDocs, result.length);
- result = search.search(q,DocValuesRangeFilter.newStringRange("rand",minRP,maxRP,T,F), numDocs).scoreDocs;
+ result = search.search(q, fieldTypes.newDocValuesRangeFilter("rand",minRP,T,maxRP,F), numDocs).scoreDocs;
assertEquals("all but biggest", numDocs-1, result.length);
- result = search.search(q,DocValuesRangeFilter.newStringRange("rand",minRP,maxRP,F,T), numDocs).scoreDocs;
+ result = search.search(q, fieldTypes.newDocValuesRangeFilter("rand",minRP,F,maxRP,T), numDocs).scoreDocs;
assertEquals("all but smallest", numDocs-1, result.length);
- result = search.search(q,DocValuesRangeFilter.newStringRange("rand",minRP,maxRP,F,F), numDocs).scoreDocs;
+ result = search.search(q, fieldTypes.newDocValuesRangeFilter("rand",minRP,F,maxRP,F), numDocs).scoreDocs;
assertEquals("all but extremes", numDocs-2, result.length);
// unbounded
- result = search.search(q,DocValuesRangeFilter.newStringRange("rand",minRP,null,T,F), numDocs).scoreDocs;
+ result = search.search(q, fieldTypes.newDocValuesRangeFilter("rand",minRP,T,null,F), numDocs).scoreDocs;
assertEquals("smallest and up", numDocs, result.length);
- result = search.search(q,DocValuesRangeFilter.newStringRange("rand",null,maxRP,F,T), numDocs).scoreDocs;
+ result = search.search(q, fieldTypes.newDocValuesRangeFilter("rand",null,F,maxRP,T), numDocs).scoreDocs;
assertEquals("biggest and down", numDocs, result.length);
- result = search.search(q,DocValuesRangeFilter.newStringRange("rand",minRP,null,F,F), numDocs).scoreDocs;
+ result = search.search(q, fieldTypes.newDocValuesRangeFilter("rand",minRP,F,null,F), numDocs).scoreDocs;
assertEquals("not smallest, but up", numDocs-1, result.length);
- result = search.search(q,DocValuesRangeFilter.newStringRange("rand",null,maxRP,F,F), numDocs).scoreDocs;
+ result = search.search(q, fieldTypes.newDocValuesRangeFilter("rand",null,F,maxRP,F), numDocs).scoreDocs;
assertEquals("not biggest, but down", numDocs-1, result.length);
// very small sets
- result = search.search(q,DocValuesRangeFilter.newStringRange("rand",minRP,minRP,F,F), numDocs).scoreDocs;
+ result = search.search(q, fieldTypes.newDocValuesRangeFilter("rand",minRP,F,minRP,F), numDocs).scoreDocs;
assertEquals("min,min,F,F", 0, result.length);
- result = search.search(q,DocValuesRangeFilter.newStringRange("rand",maxRP,maxRP,F,F), numDocs).scoreDocs;
+ result = search.search(q, fieldTypes.newDocValuesRangeFilter("rand",maxRP,F,maxRP,F), numDocs).scoreDocs;
assertEquals("max,max,F,F", 0, result.length);
- result = search.search(q,DocValuesRangeFilter.newStringRange("rand",minRP,minRP,T,T), numDocs).scoreDocs;
+ result = search.search(q, fieldTypes.newDocValuesRangeFilter("rand",minRP,T,minRP,T), numDocs).scoreDocs;
assertEquals("min,min,T,T", 1, result.length);
- result = search.search(q,DocValuesRangeFilter.newStringRange("rand",null,minRP,F,T), numDocs).scoreDocs;
+ result = search.search(q, fieldTypes.newDocValuesRangeFilter("rand",null,F,minRP,T), numDocs).scoreDocs;
assertEquals("nul,min,F,T", 1, result.length);
- result = search.search(q,DocValuesRangeFilter.newStringRange("rand",maxRP,maxRP,T,T), numDocs).scoreDocs;
+ result = search.search(q, fieldTypes.newDocValuesRangeFilter("rand",maxRP,T,maxRP,T), numDocs).scoreDocs;
assertEquals("max,max,T,T", 1, result.length);
- result = search.search(q,DocValuesRangeFilter.newStringRange("rand",maxRP,null,T,F), numDocs).scoreDocs;
+ result = search.search(q, fieldTypes.newDocValuesRangeFilter("rand",maxRP,T,null,F), numDocs).scoreDocs;
assertEquals("max,nul,T,T", 1, result.length);
}
@@ -205,75 +207,76 @@ public class TestFieldCacheRangeFilter e
// test id, bounded on both ends
- result = search.search(q,DocValuesRangeFilter.newIntRange("id_int",minIdO,maxIdO,T,T), numDocs).scoreDocs;
+ FieldTypes fieldTypes = search.getFieldTypes();
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_int",minIdO,T,maxIdO,T), numDocs).scoreDocs;
assertEquals("find all", numDocs, result.length);
- result = search.search(q,DocValuesRangeFilter.newIntRange("id_int",minIdO,maxIdO,T,F), numDocs).scoreDocs;
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_int",minIdO,T,maxIdO,F), numDocs).scoreDocs;
assertEquals("all but last", numDocs-1, result.length);
- result = search.search(q,DocValuesRangeFilter.newIntRange("id_int",minIdO,maxIdO,F,T), numDocs).scoreDocs;
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_int",minIdO,F,maxIdO,T), numDocs).scoreDocs;
assertEquals("all but first", numDocs-1, result.length);
- result = search.search(q,DocValuesRangeFilter.newIntRange("id_int",minIdO,maxIdO,F,F), numDocs).scoreDocs;
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_int",minIdO,F,maxIdO,F), numDocs).scoreDocs;
assertEquals("all but ends", numDocs-2, result.length);
- result = search.search(q,DocValuesRangeFilter.newIntRange("id_int",medIdO,maxIdO,T,T), numDocs).scoreDocs;
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_int",medIdO,T,maxIdO,T), numDocs).scoreDocs;
assertEquals("med and up", 1+ maxId-medId, result.length);
- result = search.search(q,DocValuesRangeFilter.newIntRange("id_int",minIdO,medIdO,T,T), numDocs).scoreDocs;
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_int",minIdO,T,medIdO,T), numDocs).scoreDocs;
assertEquals("up to med", 1+ medId-minId, result.length);
// unbounded id
- result = search.search(q,DocValuesRangeFilter.newIntRange("id_int",null,null,T,T), numDocs).scoreDocs;
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_int",(Integer) null,T,null,T), numDocs).scoreDocs;
assertEquals("find all", numDocs, result.length);
- result = search.search(q,DocValuesRangeFilter.newIntRange("id_int",minIdO,null,T,F), numDocs).scoreDocs;
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_int",minIdO,T,null,F), numDocs).scoreDocs;
assertEquals("min and up", numDocs, result.length);
- result = search.search(q,DocValuesRangeFilter.newIntRange("id_int",null,maxIdO,F,T), numDocs).scoreDocs;
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_int",null,F,maxIdO,T), numDocs).scoreDocs;
assertEquals("max and down", numDocs, result.length);
- result = search.search(q,DocValuesRangeFilter.newIntRange("id_int",minIdO,null,F,F), numDocs).scoreDocs;
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_int",minIdO,F,null,F), numDocs).scoreDocs;
assertEquals("not min, but up", numDocs-1, result.length);
- result = search.search(q,DocValuesRangeFilter.newIntRange("id_int",null,maxIdO,F,F), numDocs).scoreDocs;
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_int",null,F,maxIdO,F), numDocs).scoreDocs;
assertEquals("not max, but down", numDocs-1, result.length);
- result = search.search(q,DocValuesRangeFilter.newIntRange("id_int",medIdO,maxIdO,T,F), numDocs).scoreDocs;
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_int",medIdO,T,maxIdO,F), numDocs).scoreDocs;
assertEquals("med and up, not max", maxId-medId, result.length);
- result = search.search(q,DocValuesRangeFilter.newIntRange("id_int",minIdO,medIdO,F,T), numDocs).scoreDocs;
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_int",minIdO,F,medIdO,T), numDocs).scoreDocs;
assertEquals("not min, up to med", medId-minId, result.length);
// very small sets
- result = search.search(q,DocValuesRangeFilter.newIntRange("id_int",minIdO,minIdO,F,F), numDocs).scoreDocs;
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_int",minIdO,F,minIdO,F), numDocs).scoreDocs;
assertEquals("min,min,F,F", 0, result.length);
- result = search.search(q,DocValuesRangeFilter.newIntRange("id_int",medIdO,medIdO,F,F), numDocs).scoreDocs;
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_int",medIdO,F,medIdO,F), numDocs).scoreDocs;
assertEquals("med,med,F,F", 0, result.length);
- result = search.search(q,DocValuesRangeFilter.newIntRange("id_int",maxIdO,maxIdO,F,F), numDocs).scoreDocs;
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_int",maxIdO,F,maxIdO,F), numDocs).scoreDocs;
assertEquals("max,max,F,F", 0, result.length);
- result = search.search(q,DocValuesRangeFilter.newIntRange("id_int",minIdO,minIdO,T,T), numDocs).scoreDocs;
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_int",minIdO,T,minIdO,T), numDocs).scoreDocs;
assertEquals("min,min,T,T", 1, result.length);
- result = search.search(q,DocValuesRangeFilter.newIntRange("id_int",null,minIdO,F,T), numDocs).scoreDocs;
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_int",null,F,minIdO,T), numDocs).scoreDocs;
assertEquals("nul,min,F,T", 1, result.length);
- result = search.search(q,DocValuesRangeFilter.newIntRange("id_int",maxIdO,maxIdO,T,T), numDocs).scoreDocs;
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_int",maxIdO,T,maxIdO,T), numDocs).scoreDocs;
assertEquals("max,max,T,T", 1, result.length);
- result = search.search(q,DocValuesRangeFilter.newIntRange("id_int",maxIdO,null,T,F), numDocs).scoreDocs;
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_int",maxIdO,T,null,F), numDocs).scoreDocs;
assertEquals("max,nul,T,T", 1, result.length);
- result = search.search(q,DocValuesRangeFilter.newIntRange("id_int",medIdO,medIdO,T,T), numDocs).scoreDocs;
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_int",medIdO,T,medIdO,T), numDocs).scoreDocs;
assertEquals("med,med,T,T", 1, result.length);
// special cases
- result = search.search(q,DocValuesRangeFilter.newIntRange("id_int",Integer.valueOf(Integer.MAX_VALUE),null,F,F), numDocs).scoreDocs;
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_int",Integer.valueOf(Integer.MAX_VALUE),F,null,F), numDocs).scoreDocs;
assertEquals("overflow special case", 0, result.length);
- result = search.search(q,DocValuesRangeFilter.newIntRange("id_int",null,Integer.valueOf(Integer.MIN_VALUE),F,F), numDocs).scoreDocs;
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_int",null,F,Integer.valueOf(Integer.MIN_VALUE),F), numDocs).scoreDocs;
assertEquals("overflow special case", 0, result.length);
- result = search.search(q,DocValuesRangeFilter.newIntRange("id_int",maxIdO,minIdO,T,T), numDocs).scoreDocs;
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_int",maxIdO,T,minIdO,T), numDocs).scoreDocs;
assertEquals("inverse range", 0, result.length);
}
@@ -296,75 +299,76 @@ public class TestFieldCacheRangeFilter e
// test id, bounded on both ends
- result = search.search(q,DocValuesRangeFilter.newLongRange("id_long",minIdO,maxIdO,T,T), numDocs).scoreDocs;
+ FieldTypes fieldTypes = search.getFieldTypes();
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_long",minIdO,T,maxIdO,T), numDocs).scoreDocs;
assertEquals("find all", numDocs, result.length);
- result = search.search(q,DocValuesRangeFilter.newLongRange("id_long",minIdO,maxIdO,T,F), numDocs).scoreDocs;
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_long",minIdO,T,maxIdO,F), numDocs).scoreDocs;
assertEquals("all but last", numDocs-1, result.length);
- result = search.search(q,DocValuesRangeFilter.newLongRange("id_long",minIdO,maxIdO,F,T), numDocs).scoreDocs;
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_long",minIdO,F,maxIdO,T), numDocs).scoreDocs;
assertEquals("all but first", numDocs-1, result.length);
- result = search.search(q,DocValuesRangeFilter.newLongRange("id_long",minIdO,maxIdO,F,F), numDocs).scoreDocs;
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_long",minIdO,F,maxIdO,F), numDocs).scoreDocs;
assertEquals("all but ends", numDocs-2, result.length);
- result = search.search(q,DocValuesRangeFilter.newLongRange("id_long",medIdO,maxIdO,T,T), numDocs).scoreDocs;
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_long",medIdO,T,maxIdO,T), numDocs).scoreDocs;
assertEquals("med and up", 1+ maxId-medId, result.length);
- result = search.search(q,DocValuesRangeFilter.newLongRange("id_long",minIdO,medIdO,T,T), numDocs).scoreDocs;
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_long",minIdO,T,medIdO,T), numDocs).scoreDocs;
assertEquals("up to med", 1+ medId-minId, result.length);
// unbounded id
- result = search.search(q,DocValuesRangeFilter.newLongRange("id_long",null,null,T,T), numDocs).scoreDocs;
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_long",(Long) null,T,null,T), numDocs).scoreDocs;
assertEquals("find all", numDocs, result.length);
- result = search.search(q,DocValuesRangeFilter.newLongRange("id_long",minIdO,null,T,F), numDocs).scoreDocs;
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_long",minIdO,T,null,F), numDocs).scoreDocs;
assertEquals("min and up", numDocs, result.length);
- result = search.search(q,DocValuesRangeFilter.newLongRange("id_long",null,maxIdO,F,T), numDocs).scoreDocs;
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_long",null,F,maxIdO,T), numDocs).scoreDocs;
assertEquals("max and down", numDocs, result.length);
- result = search.search(q,DocValuesRangeFilter.newLongRange("id_long",minIdO,null,F,F), numDocs).scoreDocs;
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_long",minIdO,F,null,F), numDocs).scoreDocs;
assertEquals("not min, but up", numDocs-1, result.length);
- result = search.search(q,DocValuesRangeFilter.newLongRange("id_long",null,maxIdO,F,F), numDocs).scoreDocs;
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_long",null,F,maxIdO,F), numDocs).scoreDocs;
assertEquals("not max, but down", numDocs-1, result.length);
- result = search.search(q,DocValuesRangeFilter.newLongRange("id_long",medIdO,maxIdO,T,F), numDocs).scoreDocs;
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_long",medIdO,T,maxIdO,F), numDocs).scoreDocs;
assertEquals("med and up, not max", maxId-medId, result.length);
- result = search.search(q,DocValuesRangeFilter.newLongRange("id_long",minIdO,medIdO,F,T), numDocs).scoreDocs;
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_long",minIdO,F,medIdO,T), numDocs).scoreDocs;
assertEquals("not min, up to med", medId-minId, result.length);
// very small sets
- result = search.search(q,DocValuesRangeFilter.newLongRange("id_long",minIdO,minIdO,F,F), numDocs).scoreDocs;
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_long",minIdO,F,minIdO,F), numDocs).scoreDocs;
assertEquals("min,min,F,F", 0, result.length);
- result = search.search(q,DocValuesRangeFilter.newLongRange("id_long",medIdO,medIdO,F,F), numDocs).scoreDocs;
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_long",medIdO,F,medIdO,F), numDocs).scoreDocs;
assertEquals("med,med,F,F", 0, result.length);
- result = search.search(q,DocValuesRangeFilter.newLongRange("id_long",maxIdO,maxIdO,F,F), numDocs).scoreDocs;
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_long",maxIdO,F,maxIdO,F), numDocs).scoreDocs;
assertEquals("max,max,F,F", 0, result.length);
- result = search.search(q,DocValuesRangeFilter.newLongRange("id_long",minIdO,minIdO,T,T), numDocs).scoreDocs;
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_long",minIdO,T,minIdO,T), numDocs).scoreDocs;
assertEquals("min,min,T,T", 1, result.length);
- result = search.search(q,DocValuesRangeFilter.newLongRange("id_long",null,minIdO,F,T), numDocs).scoreDocs;
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_long",null,F,minIdO,T), numDocs).scoreDocs;
assertEquals("nul,min,F,T", 1, result.length);
- result = search.search(q,DocValuesRangeFilter.newLongRange("id_long",maxIdO,maxIdO,T,T), numDocs).scoreDocs;
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_long",maxIdO,T,maxIdO,T), numDocs).scoreDocs;
assertEquals("max,max,T,T", 1, result.length);
- result = search.search(q,DocValuesRangeFilter.newLongRange("id_long",maxIdO,null,T,F), numDocs).scoreDocs;
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_long",maxIdO,T,null,F), numDocs).scoreDocs;
assertEquals("max,nul,T,T", 1, result.length);
- result = search.search(q,DocValuesRangeFilter.newLongRange("id_long",medIdO,medIdO,T,T), numDocs).scoreDocs;
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_long",medIdO,T,medIdO,T), numDocs).scoreDocs;
assertEquals("med,med,T,T", 1, result.length);
// special cases
- result = search.search(q,DocValuesRangeFilter.newLongRange("id_long",Long.valueOf(Long.MAX_VALUE),null,F,F), numDocs).scoreDocs;
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_long",Long.valueOf(Long.MAX_VALUE),F,null,F), numDocs).scoreDocs;
assertEquals("overflow special case", 0, result.length);
- result = search.search(q,DocValuesRangeFilter.newLongRange("id_long",null,Long.valueOf(Long.MIN_VALUE),F,F), numDocs).scoreDocs;
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_long",null,F,Long.valueOf(Long.MIN_VALUE),F), numDocs).scoreDocs;
assertEquals("overflow special case", 0, result.length);
- result = search.search(q,DocValuesRangeFilter.newLongRange("id_long",maxIdO,minIdO,T,T), numDocs).scoreDocs;
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_long",maxIdO,T,minIdO,T), numDocs).scoreDocs;
assertEquals("inverse range", 0, result.length);
}
@@ -383,19 +387,20 @@ public class TestFieldCacheRangeFilter e
ScoreDoc[] result;
Query q = new TermQuery(new Term("body","body"));
- result = search.search(q,DocValuesRangeFilter.newFloatRange("id_float",minIdO,medIdO,T,T), numDocs).scoreDocs;
+ FieldTypes fieldTypes = search.getFieldTypes();
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_float",minIdO,T,medIdO,T), numDocs).scoreDocs;
assertEquals("find all", numDocs/2, result.length);
int count = 0;
- result = search.search(q,DocValuesRangeFilter.newFloatRange("id_float",null,medIdO,F,T), numDocs).scoreDocs;
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_float",null,F,medIdO,T), numDocs).scoreDocs;
count += result.length;
- result = search.search(q,DocValuesRangeFilter.newFloatRange("id_float",medIdO,null,F,F), numDocs).scoreDocs;
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_float",medIdO,F,null,F), numDocs).scoreDocs;
count += result.length;
assertEquals("sum of two concenatted ranges", numDocs, count);
- result = search.search(q,DocValuesRangeFilter.newFloatRange("id_float",null,null,T,T), numDocs).scoreDocs;
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_float",(Float) null,T,null,T), numDocs).scoreDocs;
assertEquals("find all", numDocs, result.length);
- result = search.search(q,DocValuesRangeFilter.newFloatRange("id_float",Float.valueOf(Float.POSITIVE_INFINITY),null,F,F), numDocs).scoreDocs;
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_float",Float.valueOf(Float.POSITIVE_INFINITY),F,null,F), numDocs).scoreDocs;
assertEquals("infinity special case", 0, result.length);
- result = search.search(q,DocValuesRangeFilter.newFloatRange("id_float",null,Float.valueOf(Float.NEGATIVE_INFINITY),F,F), numDocs).scoreDocs;
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_float",null,F,Float.valueOf(Float.NEGATIVE_INFINITY),F), numDocs).scoreDocs;
assertEquals("infinity special case", 0, result.length);
}
@@ -412,19 +417,20 @@ public class TestFieldCacheRangeFilter e
ScoreDoc[] result;
Query q = new TermQuery(new Term("body","body"));
- result = search.search(q,DocValuesRangeFilter.newDoubleRange("id_double",minIdO,medIdO,T,T), numDocs).scoreDocs;
+ FieldTypes fieldTypes = search.getFieldTypes();
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_double",minIdO,T,medIdO,T), numDocs).scoreDocs;
assertEquals("find all", numDocs/2, result.length);
int count = 0;
- result = search.search(q,DocValuesRangeFilter.newDoubleRange("id_double",null,medIdO,F,T), numDocs).scoreDocs;
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_double",null,F,medIdO,T), numDocs).scoreDocs;
count += result.length;
- result = search.search(q,DocValuesRangeFilter.newDoubleRange("id_double",medIdO,null,F,F), numDocs).scoreDocs;
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_double",medIdO,F,null,F), numDocs).scoreDocs;
count += result.length;
assertEquals("sum of two concenatted ranges", numDocs, count);
- result = search.search(q,DocValuesRangeFilter.newDoubleRange("id_double",null,null,T,T), numDocs).scoreDocs;
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_double",(Double) null,T,null,T), numDocs).scoreDocs;
assertEquals("find all", numDocs, result.length);
- result = search.search(q,DocValuesRangeFilter.newDoubleRange("id_double",Double.valueOf(Double.POSITIVE_INFINITY),null,F,F), numDocs).scoreDocs;
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_double",Double.valueOf(Double.POSITIVE_INFINITY),F,null,F), numDocs).scoreDocs;
assertEquals("infinity special case", 0, result.length);
- result = search.search(q,DocValuesRangeFilter.newDoubleRange("id_double",null, Double.valueOf(Double.NEGATIVE_INFINITY),F,F), numDocs).scoreDocs;
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_double",null,F,Double.valueOf(Double.NEGATIVE_INFINITY),F), numDocs).scoreDocs;
assertEquals("infinity special case", 0, result.length);
}
@@ -453,19 +459,19 @@ public class TestFieldCacheRangeFilter e
ScoreDoc[] result;
Query q = new TermQuery(new Term("body","body"));
- result = search.search(q,DocValuesRangeFilter.newIntRange("id_int",-20,20,T,T), 100).scoreDocs;
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_int",-20,T,20,T), 100).scoreDocs;
assertEquals("find all", 40, result.length);
- result = search.search(q,DocValuesRangeFilter.newIntRange("id_int",0,20,T,T), 100).scoreDocs;
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_int",0,T,20,T), 100).scoreDocs;
assertEquals("find all", 20, result.length);
- result = search.search(q,DocValuesRangeFilter.newIntRange("id_int",-20,0,T,T), 100).scoreDocs;
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_int",-20,T,0,T), 100).scoreDocs;
assertEquals("find all", 20, result.length);
- result = search.search(q,DocValuesRangeFilter.newIntRange("id_int",10,20,T,T), 100).scoreDocs;
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_int",10,T,20,T), 100).scoreDocs;
assertEquals("find all", 11, result.length);
- result = search.search(q,DocValuesRangeFilter.newIntRange("id_int",-20,-10,T,T), 100).scoreDocs;
+ result = search.search(q,fieldTypes.newDocValuesRangeFilter("id_int",-20,T,-10,T), 100).scoreDocs;
assertEquals("find all", 11, result.length);
reader.close();
dir.close();
Modified: lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/search/TestMultiValuedNumericRangeQuery.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/search/TestMultiValuedNumericRangeQuery.java?rev=1643659&r1=1643658&r2=1643659&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/search/TestMultiValuedNumericRangeQuery.java (original)
+++ lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/search/TestMultiValuedNumericRangeQuery.java Sun Dec 7 10:52:03 2014
@@ -69,8 +69,8 @@ public class TestMultiValuedNumericRange
if (lower>upper) {
int a=lower; lower=upper; upper=a;
}
- Query cq = new ConstantScoreQuery(fieldTypes.newRangeFilter("asc", format.format(lower), true, format.format(upper), true));
- Query tq = new ConstantScoreQuery(fieldTypes.newRangeFilter("trie", lower, true, upper, true));
+ Query cq = new ConstantScoreQuery(fieldTypes.newStringRangeFilter("asc", format.format(lower), true, format.format(upper), true));
+ Query tq = new ConstantScoreQuery(fieldTypes.newIntRangeFilter("trie", lower, true, upper, true));
TopDocs trTopDocs = searcher.search(cq, 1);
TopDocs nrTopDocs = searcher.search(tq, 1);
assertEquals("Returned count for NumericRangeQuery and TermRangeQuery must be equal", trTopDocs.totalHits, nrTopDocs.totalHits);
Modified: lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/search/TestNumericRangeFilter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/search/TestNumericRangeFilter.java?rev=1643659&r1=1643658&r2=1643659&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/search/TestNumericRangeFilter.java (original)
+++ lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/search/TestNumericRangeFilter.java Sun Dec 7 10:52:03 2014
@@ -26,6 +26,7 @@ import org.apache.lucene.index.MultiDocV
import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.LuceneTestCase;
+import org.apache.lucene.util.NumericUtils;
// nocommit more
@@ -39,10 +40,60 @@ public class TestNumericRangeFilter exte
IndexReader r = DirectoryReader.open(w, true);
FieldTypes fieldTypes = r.getFieldTypes();
IndexSearcher s = newSearcher(r);
- Query q = new ConstantScoreQuery(fieldTypes.newRangeFilter("number", -110, true, 400, false));
+ Query q = new ConstantScoreQuery(fieldTypes.newDoubleRangeFilter("number", -110d, true, 400d, false));
assertEquals(1, s.search(q, 1).totalHits);
NumericDocValues ndv = MultiDocValues.getNumericValues(r, "number");
- assertEquals(-103.0, Document.longToDouble(ndv.get(0)), .0000000001);
+ assertEquals(-103.0, NumericUtils.longToDouble(ndv.get(0)), .0000000001);
+ r.close();
+ w.close();
+ dir.close();
+ }
+
+ public void testBasicIntRange() throws Exception {
+ Directory dir = newDirectory();
+ IndexWriter w = new IndexWriter(dir, newIndexWriterConfig());
+ Document doc = w.newDocument();
+ doc.addInt("number", -103);
+ w.addDocument(doc);
+ doc = w.newDocument();
+ doc.addInt("number", 170);
+ w.addDocument(doc);
+
+ IndexReader r = DirectoryReader.open(w, true);
+ FieldTypes fieldTypes = r.getFieldTypes();
+ IndexSearcher s = newSearcher(r);
+ Query q = new ConstantScoreQuery(fieldTypes.newIntRangeFilter("number", -110, true, 17, false));
+ assertEquals(1, s.search(q, 1).totalHits);
+ NumericDocValues ndv = MultiDocValues.getNumericValues(r, "number");
+ assertEquals(-103, ndv.get(0));
+ r.close();
+ w.close();
+ dir.close();
+ }
+
+ public void testHalfFloatRange() throws Exception {
+ Directory dir = newDirectory();
+ IndexWriter w = new IndexWriter(dir, newIndexWriterConfig());
+ Document doc = w.newDocument();
+ doc.addHalfFloat("number", -103.0f);
+ w.addDocument(doc);
+
+ doc = w.newDocument();
+ doc.addHalfFloat("number", 17.0f);
+ w.addDocument(doc);
+
+ doc = w.newDocument();
+ doc.addHalfFloat("number", 10000.0f);
+ w.addDocument(doc);
+
+ IndexReader r = DirectoryReader.open(w, true);
+ FieldTypes fieldTypes = r.getFieldTypes();
+ IndexSearcher s = newSearcher(r);
+ assertEquals(1, s.search(new ConstantScoreQuery(fieldTypes.newHalfFloatRangeFilter("number", -10f, true, 20f, false)), 1).totalHits);
+ assertEquals(1, s.search(new ConstantScoreQuery(fieldTypes.newDocValuesRangeFilter("number", -10f, true, 20f, false)), 1).totalHits);
+
+ assertEquals(2, s.search(new ConstantScoreQuery(fieldTypes.newHalfFloatRangeFilter("number", 0f, true, 20000f, false)), 1).totalHits);
+ assertEquals(2, s.search(new ConstantScoreQuery(fieldTypes.newDocValuesRangeFilter("number", 0f, true, 20000f, false)), 1).totalHits);
r.close();
w.close();
dir.close();
Modified: lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/search/TestTermRangeQuery.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/search/TestTermRangeQuery.java?rev=1643659&r1=1643658&r2=1643659&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/search/TestTermRangeQuery.java (original)
+++ lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/search/TestTermRangeQuery.java Sun Dec 7 10:52:03 2014
@@ -491,7 +491,7 @@ public class TestTermRangeQuery extends
min[0] = 17;
byte[] max = new byte[1];
max[0] = 18;
- assertEquals(2, s.search(new ConstantScoreQuery(fieldTypes.newRangeFilter("field", min, true, max, true)), 1).totalHits);
+ assertEquals(2, s.search(new ConstantScoreQuery(fieldTypes.newBinaryRangeFilter("field", min, true, max, true)), 1).totalHits);
r.close();
w.close();