You are viewing a plain text version of this content. The canonical link for it is here.
Posted to java-commits@lucene.apache.org by us...@apache.org on 2009/06/23 17:42:13 UTC
svn commit: r787723 [2/2] - in /lucene/java/trunk: ./
src/java/org/apache/lucene/analysis/ src/java/org/apache/lucene/document/
src/java/org/apache/lucene/search/
src/java/org/apache/lucene/search/function/
src/java/org/apache/lucene/util/ src/test/org...
Added: lucene/java/trunk/src/test/org/apache/lucene/search/TestFieldCache.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/src/test/org/apache/lucene/search/TestFieldCache.java?rev=787723&view=auto
==============================================================================
--- lucene/java/trunk/src/test/org/apache/lucene/search/TestFieldCache.java (added)
+++ lucene/java/trunk/src/test/org/apache/lucene/search/TestFieldCache.java Tue Jun 23 15:42:12 2009
@@ -0,0 +1,118 @@
+package org.apache.lucene.search;
+
+/**
+ * Copyright 2004 The Apache Software Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.lucene.analysis.WhitespaceAnalyzer;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.store.RAMDirectory;
+import org.apache.lucene.util.LuceneTestCase;
+
+import java.io.IOException;
+
+public class TestFieldCache extends LuceneTestCase {
+ protected IndexReader reader;
+ private static final int NUM_DOCS = 1000;
+
+ public TestFieldCache(String s) {
+ super(s);
+ }
+
+ protected void setUp() throws Exception {
+ super.setUp();
+ RAMDirectory directory = new RAMDirectory();
+ IndexWriter writer= new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+ long theLong = Long.MAX_VALUE;
+ double theDouble = Double.MAX_VALUE;
+ byte theByte = Byte.MAX_VALUE;
+ short theShort = Short.MAX_VALUE;
+ int theInt = Integer.MAX_VALUE;
+ float theFloat = Float.MAX_VALUE;
+ for (int i = 0; i < NUM_DOCS; i++){
+ Document doc = new Document();
+ doc.add(new Field("theLong", String.valueOf(theLong--), Field.Store.NO, Field.Index.NOT_ANALYZED));
+ doc.add(new Field("theDouble", String.valueOf(theDouble--), Field.Store.NO, Field.Index.NOT_ANALYZED));
+ doc.add(new Field("theByte", String.valueOf(theByte--), Field.Store.NO, Field.Index.NOT_ANALYZED));
+ doc.add(new Field("theShort", String.valueOf(theShort--), Field.Store.NO, Field.Index.NOT_ANALYZED));
+ doc.add(new Field("theInt", String.valueOf(theInt--), Field.Store.NO, Field.Index.NOT_ANALYZED));
+ doc.add(new Field("theFloat", String.valueOf(theFloat--), Field.Store.NO, Field.Index.NOT_ANALYZED));
+ writer.addDocument(doc);
+ }
+ writer.close();
+ reader = IndexReader.open(directory);
+ }
+
+
+ public void test() throws IOException {
+ FieldCache cache = FieldCache.DEFAULT;
+ double [] doubles = cache.getDoubles(reader, "theDouble");
+ assertSame("Second request to cache return same array", doubles, cache.getDoubles(reader, "theDouble"));
+ assertSame("Second request with explicit parser return same array", doubles, cache.getDoubles(reader, "theDouble", FieldCache.DEFAULT_DOUBLE_PARSER));
+ assertTrue("doubles Size: " + doubles.length + " is not: " + NUM_DOCS, doubles.length == NUM_DOCS);
+ for (int i = 0; i < doubles.length; i++) {
+ assertTrue(doubles[i] + " does not equal: " + (Double.MAX_VALUE - i), doubles[i] == (Double.MAX_VALUE - i));
+
+ }
+
+ long [] longs = cache.getLongs(reader, "theLong");
+ assertSame("Second request to cache return same array", longs, cache.getLongs(reader, "theLong"));
+ assertSame("Second request with explicit parser return same array", longs, cache.getLongs(reader, "theLong", FieldCache.DEFAULT_LONG_PARSER));
+ assertTrue("longs Size: " + longs.length + " is not: " + NUM_DOCS, longs.length == NUM_DOCS);
+ for (int i = 0; i < longs.length; i++) {
+ assertTrue(longs[i] + " does not equal: " + (Long.MAX_VALUE - i), longs[i] == (Long.MAX_VALUE - i));
+
+ }
+
+ byte [] bytes = cache.getBytes(reader, "theByte");
+ assertSame("Second request to cache return same array", bytes, cache.getBytes(reader, "theByte"));
+ assertSame("Second request with explicit parser return same array", bytes, cache.getBytes(reader, "theByte", FieldCache.DEFAULT_BYTE_PARSER));
+ assertTrue("bytes Size: " + bytes.length + " is not: " + NUM_DOCS, bytes.length == NUM_DOCS);
+ for (int i = 0; i < bytes.length; i++) {
+ assertTrue(bytes[i] + " does not equal: " + (Byte.MAX_VALUE - i), bytes[i] == (byte) (Byte.MAX_VALUE - i));
+
+ }
+
+ short [] shorts = cache.getShorts(reader, "theShort");
+ assertSame("Second request to cache return same array", shorts, cache.getShorts(reader, "theShort"));
+ assertSame("Second request with explicit parser return same array", shorts, cache.getShorts(reader, "theShort", FieldCache.DEFAULT_SHORT_PARSER));
+ assertTrue("shorts Size: " + shorts.length + " is not: " + NUM_DOCS, shorts.length == NUM_DOCS);
+ for (int i = 0; i < shorts.length; i++) {
+ assertTrue(shorts[i] + " does not equal: " + (Short.MAX_VALUE - i), shorts[i] == (short) (Short.MAX_VALUE - i));
+
+ }
+
+ int [] ints = cache.getInts(reader, "theInt");
+ assertSame("Second request to cache return same array", ints, cache.getInts(reader, "theInt"));
+ assertSame("Second request with explicit parser return same array", ints, cache.getInts(reader, "theInt", FieldCache.DEFAULT_INT_PARSER));
+ assertTrue("ints Size: " + ints.length + " is not: " + NUM_DOCS, ints.length == NUM_DOCS);
+ for (int i = 0; i < ints.length; i++) {
+ assertTrue(ints[i] + " does not equal: " + (Integer.MAX_VALUE - i), ints[i] == (Integer.MAX_VALUE - i));
+
+ }
+
+ float [] floats = cache.getFloats(reader, "theFloat");
+ assertSame("Second request to cache return same array", floats, cache.getFloats(reader, "theFloat"));
+ assertSame("Second request with explicit parser return same array", floats, cache.getFloats(reader, "theFloat", FieldCache.DEFAULT_FLOAT_PARSER));
+ assertTrue("floats Size: " + floats.length + " is not: " + NUM_DOCS, floats.length == NUM_DOCS);
+ for (int i = 0; i < floats.length; i++) {
+ assertTrue(floats[i] + " does not equal: " + (Float.MAX_VALUE - i), floats[i] == (Float.MAX_VALUE - i));
+
+ }
+ }
+}
\ No newline at end of file
Propchange: lucene/java/trunk/src/test/org/apache/lucene/search/TestFieldCache.java
------------------------------------------------------------------------------
svn:eol-style = native
Modified: lucene/java/trunk/src/test/org/apache/lucene/search/TestNumericRangeQuery32.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/src/test/org/apache/lucene/search/TestNumericRangeQuery32.java?rev=787723&r1=787722&r2=787723&view=diff
==============================================================================
--- lucene/java/trunk/src/test/org/apache/lucene/search/TestNumericRangeQuery32.java (original)
+++ lucene/java/trunk/src/test/org/apache/lucene/search/TestNumericRangeQuery32.java Tue Jun 23 15:42:12 2009
@@ -19,12 +19,13 @@
import java.util.Random;
-import org.apache.lucene.analysis.NumericTokenStream;
import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
+import org.apache.lucene.document.NumericField;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriter.MaxFieldLength;
+import org.apache.lucene.search.SortField;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.NumericUtils;
@@ -37,15 +38,6 @@
// number of docs to generate for testing
private static final int noDocs = 10000;
- private static Field newField(String name, int precisionStep) {
- NumericTokenStream stream = new NumericTokenStream(precisionStep);
- stream.setUseNewAPI(true);
- Field f=new Field(name, stream);
- f.setOmitTermFreqAndPositions(true);
- f.setOmitNorms(true);
- return f;
- }
-
private static final RAMDirectory directory;
private static final IndexSearcher searcher;
static {
@@ -57,34 +49,31 @@
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(),
true, MaxFieldLength.UNLIMITED);
- Field
- field8 = newField("field8", 8),
- field4 = newField("field4", 4),
- field2 = newField("field2", 2),
- ascfield8 = newField("ascfield8", 8),
- ascfield4 = newField("ascfield4", 4),
- ascfield2 = newField("ascfield2", 2);
+ NumericField
+ field8 = new NumericField("field8", 8, Field.Store.YES, true),
+ field4 = new NumericField("field4", 4, Field.Store.YES, true),
+ field2 = new NumericField("field2", 2, Field.Store.YES, true),
+ ascfield8 = new NumericField("ascfield8", 8, Field.Store.NO, true),
+ ascfield4 = new NumericField("ascfield4", 4, Field.Store.NO, true),
+ ascfield2 = new NumericField("ascfield2", 2, Field.Store.NO, true);
+
+ Document doc = new Document();
+ // add fields, that have a distance to test general functionality
+ doc.add(field8); doc.add(field4); doc.add(field2);
+ // add ascending fields with a distance of 1, beginning at -noDocs/2 to test the correct splitting of range and inclusive/exclusive
+ doc.add(ascfield8); doc.add(ascfield4); doc.add(ascfield2);
// Add a series of noDocs docs with increasing int values
for (int l=0; l<noDocs; l++) {
- Document doc=new Document();
- // add fields, that have a distance to test general functionality
int val=distance*l+startOffset;
- doc.add(new Field("value", Integer.toString(val), Field.Store.YES, Field.Index.NO));
- ((NumericTokenStream)field8.tokenStreamValue()).setIntValue(val);
- doc.add(field8);
- ((NumericTokenStream)field4.tokenStreamValue()).setIntValue(val);
- doc.add(field4);
- ((NumericTokenStream)field2.tokenStreamValue()).setIntValue(val);
- doc.add(field2);
- // add ascending fields with a distance of 1, beginning at -noDocs/2 to test the correct splitting of range and inclusive/exclusive
+ field8.setIntValue(val);
+ field4.setIntValue(val);
+ field2.setIntValue(val);
+
val=l-(noDocs/2);
- ((NumericTokenStream)ascfield8.tokenStreamValue()).setIntValue(val);
- doc.add(ascfield8);
- ((NumericTokenStream)ascfield4.tokenStreamValue()).setIntValue(val);
- doc.add(ascfield4);
- ((NumericTokenStream)ascfield2.tokenStreamValue()).setIntValue(val);
- doc.add(ascfield2);
+ ascfield8.setIntValue(val);
+ ascfield4.setIntValue(val);
+ ascfield2.setIntValue(val);
writer.addDocument(doc);
}
@@ -136,9 +125,9 @@
assertNotNull(sd);
assertEquals("Score doc count"+type, count, sd.length );
Document doc=searcher.doc(sd[0].doc);
- assertEquals("First doc"+type, 2*distance+startOffset, Integer.parseInt(doc.get("value")) );
+ assertEquals("First doc"+type, 2*distance+startOffset, Integer.parseInt(doc.get(field)) );
doc=searcher.doc(sd[sd.length-1].doc);
- assertEquals("Last doc"+type, (1+count)*distance+startOffset, Integer.parseInt(doc.get("value")) );
+ assertEquals("Last doc"+type, (1+count)*distance+startOffset, Integer.parseInt(doc.get(field)) );
if (i>0) {
assertEquals("Distinct term number is equal for all query types", lastTerms, terms);
}
@@ -174,9 +163,9 @@
assertNotNull(sd);
assertEquals("Score doc count", count, sd.length );
Document doc=searcher.doc(sd[0].doc);
- assertEquals("First doc", startOffset, Integer.parseInt(doc.get("value")) );
+ assertEquals("First doc", startOffset, Integer.parseInt(doc.get(field)) );
doc=searcher.doc(sd[sd.length-1].doc);
- assertEquals("Last doc", (count-1)*distance+startOffset, Integer.parseInt(doc.get("value")) );
+ assertEquals("Last doc", (count-1)*distance+startOffset, Integer.parseInt(doc.get(field)) );
}
public void testLeftOpenRange_8bit() throws Exception {
@@ -202,9 +191,9 @@
assertNotNull(sd);
assertEquals("Score doc count", noDocs-count, sd.length );
Document doc=searcher.doc(sd[0].doc);
- assertEquals("First doc", count*distance+startOffset, Integer.parseInt(doc.get("value")) );
+ assertEquals("First doc", count*distance+startOffset, Integer.parseInt(doc.get(field)) );
doc=searcher.doc(sd[sd.length-1].doc);
- assertEquals("Last doc", (noDocs-1)*distance+startOffset, Integer.parseInt(doc.get("value")) );
+ assertEquals("Last doc", (noDocs-1)*distance+startOffset, Integer.parseInt(doc.get(field)) );
}
public void testRightOpenRange_8bit() throws Exception {
@@ -364,13 +353,13 @@
int a=lower; lower=upper; upper=a;
}
Query tq=NumericRangeQuery.newIntRange(field, precisionStep, new Integer(lower), new Integer(upper), true, true);
- TopDocs topDocs = searcher.search(tq, null, noDocs, new Sort(NumericUtils.getIntSortField(field, true)));
+ TopDocs topDocs = searcher.search(tq, null, noDocs, new Sort(new SortField(field, SortField.INT, true)));
if (topDocs.totalHits==0) continue;
ScoreDoc[] sd = topDocs.scoreDocs;
assertNotNull(sd);
- int last=Integer.parseInt(searcher.doc(sd[0].doc).get("value"));
+ int last=Integer.parseInt(searcher.doc(sd[0].doc).get(field));
for (int j=1; j<sd.length; j++) {
- int act=Integer.parseInt(searcher.doc(sd[j].doc).get("value"));
+ int act=Integer.parseInt(searcher.doc(sd[j].doc).get(field));
assertTrue("Docs should be sorted backwards", last>act );
last=act;
}
Modified: lucene/java/trunk/src/test/org/apache/lucene/search/TestNumericRangeQuery64.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/src/test/org/apache/lucene/search/TestNumericRangeQuery64.java?rev=787723&r1=787722&r2=787723&view=diff
==============================================================================
--- lucene/java/trunk/src/test/org/apache/lucene/search/TestNumericRangeQuery64.java (original)
+++ lucene/java/trunk/src/test/org/apache/lucene/search/TestNumericRangeQuery64.java Tue Jun 23 15:42:12 2009
@@ -19,12 +19,13 @@
import java.util.Random;
-import org.apache.lucene.analysis.NumericTokenStream;
import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
+import org.apache.lucene.document.NumericField;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriter.MaxFieldLength;
+import org.apache.lucene.search.SortField;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.NumericUtils;
@@ -37,15 +38,6 @@
// number of docs to generate for testing
private static final int noDocs = 10000;
- private static Field newField(String name, int precisionStep) {
- NumericTokenStream stream = new NumericTokenStream(precisionStep);
- stream.setUseNewAPI(true);
- Field f=new Field(name, stream);
- f.setOmitTermFreqAndPositions(true);
- f.setOmitNorms(true);
- return f;
- }
-
private static final RAMDirectory directory;
private static final IndexSearcher searcher;
static {
@@ -57,34 +49,31 @@
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(),
true, MaxFieldLength.UNLIMITED);
- Field
- field8 = newField("field8", 8),
- field4 = newField("field4", 4),
- field2 = newField("field2", 2),
- ascfield8 = newField("ascfield8", 8),
- ascfield4 = newField("ascfield4", 4),
- ascfield2 = newField("ascfield2", 2);
+ NumericField
+ field8 = new NumericField("field8", 8, Field.Store.YES, true),
+ field4 = new NumericField("field4", 4, Field.Store.YES, true),
+ field2 = new NumericField("field2", 2, Field.Store.YES, true),
+ ascfield8 = new NumericField("ascfield8", 8, Field.Store.NO, true),
+ ascfield4 = new NumericField("ascfield4", 4, Field.Store.NO, true),
+ ascfield2 = new NumericField("ascfield2", 2, Field.Store.NO, true);
- // Add a series of noDocs docs with increasing long values
+ Document doc = new Document();
+ // add fields, that have a distance to test general functionality
+ doc.add(field8); doc.add(field4); doc.add(field2);
+ // add ascending fields with a distance of 1, beginning at -noDocs/2 to test the correct splitting of range and inclusive/exclusive
+ doc.add(ascfield8); doc.add(ascfield4); doc.add(ascfield2);
+
+ // Add a series of noDocs docs with increasing long values, by updating the fields
for (int l=0; l<noDocs; l++) {
- Document doc=new Document();
- // add fields, that have a distance to test general functionality
long val=distance*l+startOffset;
- doc.add(new Field("value", Long.toString(val), Field.Store.YES, Field.Index.NO));
- ((NumericTokenStream)field8.tokenStreamValue()).setLongValue(val);
- doc.add(field8);
- ((NumericTokenStream)field4.tokenStreamValue()).setLongValue(val);
- doc.add(field4);
- ((NumericTokenStream)field2.tokenStreamValue()).setLongValue(val);
- doc.add(field2);
- // add ascending fields with a distance of 1, beginning at -noDocs/2 to test the correct splitting of range and inclusive/exclusive
+ field8.setLongValue(val);
+ field4.setLongValue(val);
+ field2.setLongValue(val);
+
val=l-(noDocs/2);
- ((NumericTokenStream)ascfield8.tokenStreamValue()).setLongValue(val);
- doc.add(ascfield8);
- ((NumericTokenStream)ascfield4.tokenStreamValue()).setLongValue(val);
- doc.add(ascfield4);
- ((NumericTokenStream)ascfield2.tokenStreamValue()).setLongValue(val);
- doc.add(ascfield2);
+ ascfield8.setLongValue(val);
+ ascfield4.setLongValue(val);
+ ascfield2.setLongValue(val);
writer.addDocument(doc);
}
@@ -136,9 +125,9 @@
assertNotNull(sd);
assertEquals("Score doc count"+type, count, sd.length );
Document doc=searcher.doc(sd[0].doc);
- assertEquals("First doc"+type, 2*distance+startOffset, Long.parseLong(doc.get("value")) );
+ assertEquals("First doc"+type, 2*distance+startOffset, Long.parseLong(doc.get(field)) );
doc=searcher.doc(sd[sd.length-1].doc);
- assertEquals("Last doc"+type, (1+count)*distance+startOffset, Long.parseLong(doc.get("value")) );
+ assertEquals("Last doc"+type, (1+count)*distance+startOffset, Long.parseLong(doc.get(field)) );
if (i>0) {
assertEquals("Distinct term number is equal for all query types", lastTerms, terms);
}
@@ -174,9 +163,9 @@
assertNotNull(sd);
assertEquals("Score doc count", count, sd.length );
Document doc=searcher.doc(sd[0].doc);
- assertEquals("First doc", startOffset, Long.parseLong(doc.get("value")) );
+ assertEquals("First doc", startOffset, Long.parseLong(doc.get(field)) );
doc=searcher.doc(sd[sd.length-1].doc);
- assertEquals("Last doc", (count-1)*distance+startOffset, Long.parseLong(doc.get("value")) );
+ assertEquals("Last doc", (count-1)*distance+startOffset, Long.parseLong(doc.get(field)) );
}
public void testLeftOpenRange_8bit() throws Exception {
@@ -202,9 +191,9 @@
assertNotNull(sd);
assertEquals("Score doc count", noDocs-count, sd.length );
Document doc=searcher.doc(sd[0].doc);
- assertEquals("First doc", count*distance+startOffset, Long.parseLong(doc.get("value")) );
+ assertEquals("First doc", count*distance+startOffset, Long.parseLong(doc.get(field)) );
doc=searcher.doc(sd[sd.length-1].doc);
- assertEquals("Last doc", (noDocs-1)*distance+startOffset, Long.parseLong(doc.get("value")) );
+ assertEquals("Last doc", (noDocs-1)*distance+startOffset, Long.parseLong(doc.get(field)) );
}
public void testRightOpenRange_8bit() throws Exception {
@@ -364,13 +353,13 @@
long a=lower; lower=upper; upper=a;
}
Query tq=NumericRangeQuery.newLongRange(field, precisionStep, new Long(lower), new Long(upper), true, true);
- TopDocs topDocs = searcher.search(tq, null, noDocs, new Sort(NumericUtils.getLongSortField(field, true)));
+ TopDocs topDocs = searcher.search(tq, null, noDocs, new Sort(new SortField(field, SortField.LONG, true)));
if (topDocs.totalHits==0) continue;
ScoreDoc[] sd = topDocs.scoreDocs;
assertNotNull(sd);
- long last=Long.parseLong(searcher.doc(sd[0].doc).get("value"));
+ long last=Long.parseLong(searcher.doc(sd[0].doc).get(field));
for (int j=1; j<sd.length; j++) {
- long act=Long.parseLong(searcher.doc(sd[j].doc).get("value"));
+ long act=Long.parseLong(searcher.doc(sd[j].doc).get(field));
assertTrue("Docs should be sorted backwards", last>act );
last=act;
}
Modified: lucene/java/trunk/src/test/org/apache/lucene/search/TestSort.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/src/test/org/apache/lucene/search/TestSort.java?rev=787723&r1=787722&r2=787723&view=diff
==============================================================================
--- lucene/java/trunk/src/test/org/apache/lucene/search/TestSort.java (original)
+++ lucene/java/trunk/src/test/org/apache/lucene/search/TestSort.java Tue Jun 23 15:42:12 2009
@@ -338,14 +338,14 @@
}), SortField.FIELD_DOC });
assertMatches (full, queryA, sort, "JIHGFEDCBA");
- sort.setSort (new SortField[] { new SortField ("parser", new ExtendedFieldCache.LongParser(){
+ sort.setSort (new SortField[] { new SortField ("parser", new FieldCache.LongParser(){
public final long parseLong(final String val) {
return (val.charAt(0)-'A') * 1234567890L;
}
}), SortField.FIELD_DOC });
assertMatches (full, queryA, sort, "JIHGFEDCBA");
- sort.setSort (new SortField[] { new SortField ("parser", new ExtendedFieldCache.DoubleParser(){
+ sort.setSort (new SortField[] { new SortField ("parser", new FieldCache.DoubleParser(){
public final double parseDouble(final String val) {
return Math.pow( val.charAt(0), (val.charAt(0)-'A') );
}