You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by rm...@apache.org on 2016/08/17 13:29:56 UTC
[5/7] lucene-solr:master: LUCENE-7413: move legacy numeric support to
backwards module
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/105c7eae/lucene/backward-codecs/src/test/org/apache/lucene/legacy/TestNumericRangeQuery32.java
----------------------------------------------------------------------
diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/legacy/TestNumericRangeQuery32.java b/lucene/backward-codecs/src/test/org/apache/lucene/legacy/TestNumericRangeQuery32.java
new file mode 100644
index 0000000..acd0c04
--- /dev/null
+++ b/lucene/backward-codecs/src/test/org/apache/lucene/legacy/TestNumericRangeQuery32.java
@@ -0,0 +1,461 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.legacy;
+
+
+import org.apache.lucene.analysis.MockAnalyzer;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.index.DirectoryReader;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.RandomIndexWriter;
+import org.apache.lucene.search.BooleanQuery;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.MultiTermQuery;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.QueryUtils;
+import org.apache.lucene.search.ScoreDoc;
+import org.apache.lucene.search.Sort;
+import org.apache.lucene.search.TopDocs;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.util.LuceneTestCase;
+import org.apache.lucene.util.NumericUtils;
+import org.apache.lucene.util.TestUtil;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class TestNumericRangeQuery32 extends LuceneTestCase {
+ // distance of entries
+ private static int distance;
+ // shift the starting of the values to the left, to also have negative values:
+ private static final int startOffset = - 1 << 15;
+ // number of docs to generate for testing
+ private static int noDocs;
+
+ private static Directory directory = null;
+ private static IndexReader reader = null;
+ private static IndexSearcher searcher = null;
+
+ @BeforeClass
+ public static void beforeClass() throws Exception {
+ noDocs = atLeast(4096);
+ distance = (1 << 30) / noDocs;
+ directory = newDirectory();
+ RandomIndexWriter writer = new RandomIndexWriter(random(), directory,
+ newIndexWriterConfig(new MockAnalyzer(random()))
+ .setMaxBufferedDocs(TestUtil.nextInt(random(), 100, 1000))
+ .setMergePolicy(newLogMergePolicy()));
+
+ final LegacyFieldType storedInt = new LegacyFieldType(LegacyIntField.TYPE_NOT_STORED);
+ storedInt.setStored(true);
+ storedInt.freeze();
+
+ final LegacyFieldType storedInt8 = new LegacyFieldType(storedInt);
+ storedInt8.setNumericPrecisionStep(8);
+
+ final LegacyFieldType storedInt4 = new LegacyFieldType(storedInt);
+ storedInt4.setNumericPrecisionStep(4);
+
+ final LegacyFieldType storedInt2 = new LegacyFieldType(storedInt);
+ storedInt2.setNumericPrecisionStep(2);
+
+ final LegacyFieldType storedIntNone = new LegacyFieldType(storedInt);
+ storedIntNone.setNumericPrecisionStep(Integer.MAX_VALUE);
+
+ final LegacyFieldType unstoredInt = LegacyIntField.TYPE_NOT_STORED;
+
+ final LegacyFieldType unstoredInt8 = new LegacyFieldType(unstoredInt);
+ unstoredInt8.setNumericPrecisionStep(8);
+
+ final LegacyFieldType unstoredInt4 = new LegacyFieldType(unstoredInt);
+ unstoredInt4.setNumericPrecisionStep(4);
+
+ final LegacyFieldType unstoredInt2 = new LegacyFieldType(unstoredInt);
+ unstoredInt2.setNumericPrecisionStep(2);
+
+ LegacyIntField
+ field8 = new LegacyIntField("field8", 0, storedInt8),
+ field4 = new LegacyIntField("field4", 0, storedInt4),
+ field2 = new LegacyIntField("field2", 0, storedInt2),
+ fieldNoTrie = new LegacyIntField("field"+Integer.MAX_VALUE, 0, storedIntNone),
+ ascfield8 = new LegacyIntField("ascfield8", 0, unstoredInt8),
+ ascfield4 = new LegacyIntField("ascfield4", 0, unstoredInt4),
+ ascfield2 = new LegacyIntField("ascfield2", 0, unstoredInt2);
+
+ Document doc = new Document();
+ // add fields, that have a distance to test general functionality
+ doc.add(field8); doc.add(field4); doc.add(field2); doc.add(fieldNoTrie);
+ // add ascending fields with a distance of 1, beginning at -noDocs/2 to test the correct splitting of range and inclusive/exclusive
+ doc.add(ascfield8); doc.add(ascfield4); doc.add(ascfield2);
+
+ // Add a series of noDocs docs with increasing int values
+ for (int l=0; l<noDocs; l++) {
+ int val=distance*l+startOffset;
+ field8.setIntValue(val);
+ field4.setIntValue(val);
+ field2.setIntValue(val);
+ fieldNoTrie.setIntValue(val);
+
+ val=l-(noDocs/2);
+ ascfield8.setIntValue(val);
+ ascfield4.setIntValue(val);
+ ascfield2.setIntValue(val);
+ writer.addDocument(doc);
+ }
+
+ reader = writer.getReader();
+ searcher=newSearcher(reader);
+ writer.close();
+ }
+
+ @AfterClass
+ public static void afterClass() throws Exception {
+ searcher = null;
+ reader.close();
+ reader = null;
+ directory.close();
+ directory = null;
+ }
+
+ @Override
+ public void setUp() throws Exception {
+ super.setUp();
+ // set the theoretical maximum term count for 8bit (see docs for the number)
+ // super.tearDown will restore the default
+ BooleanQuery.setMaxClauseCount(3*255*2 + 255);
+ }
+
+ /** test for both constant score and boolean query, the other tests only use the constant score mode */
+ private void testRange(int precisionStep) throws Exception {
+ String field="field"+precisionStep;
+ int count=3000;
+ int lower=(distance*3/2)+startOffset, upper=lower + count*distance + (distance/3);
+ LegacyNumericRangeQuery<Integer> q = LegacyNumericRangeQuery.newIntRange(field, precisionStep, lower, upper, true, true);
+ for (byte i=0; i<2; i++) {
+ TopDocs topDocs;
+ String type;
+ switch (i) {
+ case 0:
+ type = " (constant score filter rewrite)";
+ q.setRewriteMethod(MultiTermQuery.CONSTANT_SCORE_REWRITE);
+ topDocs = searcher.search(q, noDocs, Sort.INDEXORDER);
+ break;
+ case 1:
+ type = " (constant score boolean rewrite)";
+ q.setRewriteMethod(MultiTermQuery.CONSTANT_SCORE_BOOLEAN_REWRITE);
+ topDocs = searcher.search(q, noDocs, Sort.INDEXORDER);
+ break;
+ default:
+ return;
+ }
+ ScoreDoc[] sd = topDocs.scoreDocs;
+ assertNotNull(sd);
+ assertEquals("Score doc count"+type, count, sd.length );
+ Document doc=searcher.doc(sd[0].doc);
+ assertEquals("First doc"+type, 2*distance+startOffset, doc.getField(field).numericValue().intValue());
+ doc=searcher.doc(sd[sd.length-1].doc);
+ assertEquals("Last doc"+type, (1+count)*distance+startOffset, doc.getField(field).numericValue().intValue());
+ }
+ }
+
+ @Test
+ public void testRange_8bit() throws Exception {
+ testRange(8);
+ }
+
+ @Test
+ public void testRange_4bit() throws Exception {
+ testRange(4);
+ }
+
+ @Test
+ public void testRange_2bit() throws Exception {
+ testRange(2);
+ }
+
+ @Test
+ public void testOneMatchQuery() throws Exception {
+ LegacyNumericRangeQuery<Integer> q = LegacyNumericRangeQuery.newIntRange("ascfield8", 8, 1000, 1000, true, true);
+ TopDocs topDocs = searcher.search(q, noDocs);
+ ScoreDoc[] sd = topDocs.scoreDocs;
+ assertNotNull(sd);
+ assertEquals("Score doc count", 1, sd.length );
+ }
+
+ private void testLeftOpenRange(int precisionStep) throws Exception {
+ String field="field"+precisionStep;
+ int count=3000;
+ int upper=(count-1)*distance + (distance/3) + startOffset;
+ LegacyNumericRangeQuery<Integer> q= LegacyNumericRangeQuery.newIntRange(field, precisionStep, null, upper, true, true);
+ TopDocs topDocs = searcher.search(q, noDocs, Sort.INDEXORDER);
+ ScoreDoc[] sd = topDocs.scoreDocs;
+ assertNotNull(sd);
+ assertEquals("Score doc count", count, sd.length );
+ Document doc=searcher.doc(sd[0].doc);
+ assertEquals("First doc", startOffset, doc.getField(field).numericValue().intValue());
+ doc=searcher.doc(sd[sd.length-1].doc);
+ assertEquals("Last doc", (count-1)*distance+startOffset, doc.getField(field).numericValue().intValue());
+
+ q= LegacyNumericRangeQuery.newIntRange(field, precisionStep, null, upper, false, true);
+ topDocs = searcher.search(q, noDocs, Sort.INDEXORDER);
+ sd = topDocs.scoreDocs;
+ assertNotNull(sd);
+ assertEquals("Score doc count", count, sd.length );
+ doc=searcher.doc(sd[0].doc);
+ assertEquals("First doc", startOffset, doc.getField(field).numericValue().intValue());
+ doc=searcher.doc(sd[sd.length-1].doc);
+ assertEquals("Last doc", (count-1)*distance+startOffset, doc.getField(field).numericValue().intValue());
+ }
+
+ @Test
+ public void testLeftOpenRange_8bit() throws Exception {
+ testLeftOpenRange(8);
+ }
+
+ @Test
+ public void testLeftOpenRange_4bit() throws Exception {
+ testLeftOpenRange(4);
+ }
+
+ @Test
+ public void testLeftOpenRange_2bit() throws Exception {
+ testLeftOpenRange(2);
+ }
+
+ private void testRightOpenRange(int precisionStep) throws Exception {
+ String field="field"+precisionStep;
+ int count=3000;
+ int lower=(count-1)*distance + (distance/3) +startOffset;
+ LegacyNumericRangeQuery<Integer> q= LegacyNumericRangeQuery.newIntRange(field, precisionStep, lower, null, true, true);
+ TopDocs topDocs = searcher.search(q, noDocs, Sort.INDEXORDER);
+ ScoreDoc[] sd = topDocs.scoreDocs;
+ assertNotNull(sd);
+ assertEquals("Score doc count", noDocs-count, sd.length );
+ Document doc=searcher.doc(sd[0].doc);
+ assertEquals("First doc", count*distance+startOffset, doc.getField(field).numericValue().intValue());
+ doc=searcher.doc(sd[sd.length-1].doc);
+ assertEquals("Last doc", (noDocs-1)*distance+startOffset, doc.getField(field).numericValue().intValue());
+
+ q= LegacyNumericRangeQuery.newIntRange(field, precisionStep, lower, null, true, false);
+ topDocs = searcher.search(q, noDocs, Sort.INDEXORDER);
+ sd = topDocs.scoreDocs;
+ assertNotNull(sd);
+ assertEquals("Score doc count", noDocs-count, sd.length );
+ doc=searcher.doc(sd[0].doc);
+ assertEquals("First doc", count*distance+startOffset, doc.getField(field).numericValue().intValue() );
+ doc=searcher.doc(sd[sd.length-1].doc);
+ assertEquals("Last doc", (noDocs-1)*distance+startOffset, doc.getField(field).numericValue().intValue() );
+ }
+
+ @Test
+ public void testRightOpenRange_8bit() throws Exception {
+ testRightOpenRange(8);
+ }
+
+ @Test
+ public void testRightOpenRange_4bit() throws Exception {
+ testRightOpenRange(4);
+ }
+
+ @Test
+ public void testRightOpenRange_2bit() throws Exception {
+ testRightOpenRange(2);
+ }
+
+ @Test
+ public void testInfiniteValues() throws Exception {
+ Directory dir = newDirectory();
+ RandomIndexWriter writer = new RandomIndexWriter(random(), dir,
+ newIndexWriterConfig(new MockAnalyzer(random())));
+ Document doc = new Document();
+ doc.add(new LegacyFloatField("float", Float.NEGATIVE_INFINITY, Field.Store.NO));
+ doc.add(new LegacyIntField("int", Integer.MIN_VALUE, Field.Store.NO));
+ writer.addDocument(doc);
+
+ doc = new Document();
+ doc.add(new LegacyFloatField("float", Float.POSITIVE_INFINITY, Field.Store.NO));
+ doc.add(new LegacyIntField("int", Integer.MAX_VALUE, Field.Store.NO));
+ writer.addDocument(doc);
+
+ doc = new Document();
+ doc.add(new LegacyFloatField("float", 0.0f, Field.Store.NO));
+ doc.add(new LegacyIntField("int", 0, Field.Store.NO));
+ writer.addDocument(doc);
+
+ for (float f : TestLegacyNumericUtils.FLOAT_NANs) {
+ doc = new Document();
+ doc.add(new LegacyFloatField("float", f, Field.Store.NO));
+ writer.addDocument(doc);
+ }
+
+ writer.close();
+
+ IndexReader r = DirectoryReader.open(dir);
+ IndexSearcher s = newSearcher(r);
+
+ Query q= LegacyNumericRangeQuery.newIntRange("int", null, null, true, true);
+ TopDocs topDocs = s.search(q, 10);
+ assertEquals("Score doc count", 3, topDocs.scoreDocs.length );
+
+ q= LegacyNumericRangeQuery.newIntRange("int", null, null, false, false);
+ topDocs = s.search(q, 10);
+ assertEquals("Score doc count", 3, topDocs.scoreDocs.length );
+
+ q= LegacyNumericRangeQuery.newIntRange("int", Integer.MIN_VALUE, Integer.MAX_VALUE, true, true);
+ topDocs = s.search(q, 10);
+ assertEquals("Score doc count", 3, topDocs.scoreDocs.length );
+
+ q= LegacyNumericRangeQuery.newIntRange("int", Integer.MIN_VALUE, Integer.MAX_VALUE, false, false);
+ topDocs = s.search(q, 10);
+ assertEquals("Score doc count", 1, topDocs.scoreDocs.length );
+
+ q= LegacyNumericRangeQuery.newFloatRange("float", null, null, true, true);
+ topDocs = s.search(q, 10);
+ assertEquals("Score doc count", 3, topDocs.scoreDocs.length );
+
+ q= LegacyNumericRangeQuery.newFloatRange("float", null, null, false, false);
+ topDocs = s.search(q, 10);
+ assertEquals("Score doc count", 3, topDocs.scoreDocs.length );
+
+ q= LegacyNumericRangeQuery.newFloatRange("float", Float.NEGATIVE_INFINITY, Float.POSITIVE_INFINITY, true, true);
+ topDocs = s.search(q, 10);
+ assertEquals("Score doc count", 3, topDocs.scoreDocs.length );
+
+ q= LegacyNumericRangeQuery.newFloatRange("float", Float.NEGATIVE_INFINITY, Float.POSITIVE_INFINITY, false, false);
+ topDocs = s.search(q, 10);
+ assertEquals("Score doc count", 1, topDocs.scoreDocs.length );
+
+ q= LegacyNumericRangeQuery.newFloatRange("float", Float.NaN, Float.NaN, true, true);
+ topDocs = s.search(q, 10);
+ assertEquals("Score doc count", TestLegacyNumericUtils.FLOAT_NANs.length, topDocs.scoreDocs.length );
+
+ r.close();
+ dir.close();
+ }
+
+ private void testRangeSplit(int precisionStep) throws Exception {
+ String field="ascfield"+precisionStep;
+ // 10 random tests
+ int num = TestUtil.nextInt(random(), 10, 20);
+ for (int i =0; i< num; i++) {
+ int lower=(int)(random().nextDouble()*noDocs - noDocs/2);
+ int upper=(int)(random().nextDouble()*noDocs - noDocs/2);
+ if (lower>upper) {
+ int a=lower; lower=upper; upper=a;
+ }
+ // test inclusive range
+ Query tq= LegacyNumericRangeQuery.newIntRange(field, precisionStep, lower, upper, true, true);
+ TopDocs tTopDocs = searcher.search(tq, 1);
+ assertEquals("Returned count of range query must be equal to inclusive range length", upper-lower+1, tTopDocs.totalHits );
+ // test exclusive range
+ tq= LegacyNumericRangeQuery.newIntRange(field, precisionStep, lower, upper, false, false);
+ tTopDocs = searcher.search(tq, 1);
+ assertEquals("Returned count of range query must be equal to exclusive range length", Math.max(upper-lower-1, 0), tTopDocs.totalHits );
+ // test left exclusive range
+ tq= LegacyNumericRangeQuery.newIntRange(field, precisionStep, lower, upper, false, true);
+ tTopDocs = searcher.search(tq, 1);
+ assertEquals("Returned count of range query must be equal to half exclusive range length", upper-lower, tTopDocs.totalHits );
+ // test right exclusive range
+ tq= LegacyNumericRangeQuery.newIntRange(field, precisionStep, lower, upper, true, false);
+ tTopDocs = searcher.search(tq, 1);
+ assertEquals("Returned count of range query must be equal to half exclusive range length", upper-lower, tTopDocs.totalHits );
+ }
+ }
+
+ @Test
+ public void testRangeSplit_8bit() throws Exception {
+ testRangeSplit(8);
+ }
+
+ @Test
+ public void testRangeSplit_4bit() throws Exception {
+ testRangeSplit(4);
+ }
+
+ @Test
+ public void testRangeSplit_2bit() throws Exception {
+ testRangeSplit(2);
+ }
+
+ /** we fake a float test using int2float conversion of LegacyNumericUtils */
+ private void testFloatRange(int precisionStep) throws Exception {
+ final String field="ascfield"+precisionStep;
+ final int lower=-1000, upper=+2000;
+
+ Query tq= LegacyNumericRangeQuery.newFloatRange(field, precisionStep,
+ NumericUtils.sortableIntToFloat(lower), NumericUtils.sortableIntToFloat(upper), true, true);
+ TopDocs tTopDocs = searcher.search(tq, 1);
+ assertEquals("Returned count of range query must be equal to inclusive range length", upper-lower+1, tTopDocs.totalHits );
+ }
+
+ @Test
+ public void testFloatRange_8bit() throws Exception {
+ testFloatRange(8);
+ }
+
+ @Test
+ public void testFloatRange_4bit() throws Exception {
+ testFloatRange(4);
+ }
+
+ @Test
+ public void testFloatRange_2bit() throws Exception {
+ testFloatRange(2);
+ }
+
+ @Test
+ public void testEqualsAndHash() throws Exception {
+ QueryUtils.checkHashEquals(LegacyNumericRangeQuery.newIntRange("test1", 4, 10, 20, true, true));
+ QueryUtils.checkHashEquals(LegacyNumericRangeQuery.newIntRange("test2", 4, 10, 20, false, true));
+ QueryUtils.checkHashEquals(LegacyNumericRangeQuery.newIntRange("test3", 4, 10, 20, true, false));
+ QueryUtils.checkHashEquals(LegacyNumericRangeQuery.newIntRange("test4", 4, 10, 20, false, false));
+ QueryUtils.checkHashEquals(LegacyNumericRangeQuery.newIntRange("test5", 4, 10, null, true, true));
+ QueryUtils.checkHashEquals(LegacyNumericRangeQuery.newIntRange("test6", 4, null, 20, true, true));
+ QueryUtils.checkHashEquals(LegacyNumericRangeQuery.newIntRange("test7", 4, null, null, true, true));
+ QueryUtils.checkEqual(
+ LegacyNumericRangeQuery.newIntRange("test8", 4, 10, 20, true, true),
+ LegacyNumericRangeQuery.newIntRange("test8", 4, 10, 20, true, true)
+ );
+ QueryUtils.checkUnequal(
+ LegacyNumericRangeQuery.newIntRange("test9", 4, 10, 20, true, true),
+ LegacyNumericRangeQuery.newIntRange("test9", 8, 10, 20, true, true)
+ );
+ QueryUtils.checkUnequal(
+ LegacyNumericRangeQuery.newIntRange("test10a", 4, 10, 20, true, true),
+ LegacyNumericRangeQuery.newIntRange("test10b", 4, 10, 20, true, true)
+ );
+ QueryUtils.checkUnequal(
+ LegacyNumericRangeQuery.newIntRange("test11", 4, 10, 20, true, true),
+ LegacyNumericRangeQuery.newIntRange("test11", 4, 20, 10, true, true)
+ );
+ QueryUtils.checkUnequal(
+ LegacyNumericRangeQuery.newIntRange("test12", 4, 10, 20, true, true),
+ LegacyNumericRangeQuery.newIntRange("test12", 4, 10, 20, false, true)
+ );
+ QueryUtils.checkUnequal(
+ LegacyNumericRangeQuery.newIntRange("test13", 4, 10, 20, true, true),
+ LegacyNumericRangeQuery.newFloatRange("test13", 4, 10f, 20f, true, true)
+ );
+ // the following produces a hash collision, because Long and Integer have the same hashcode, so only test equality:
+ Query q1 = LegacyNumericRangeQuery.newIntRange("test14", 4, 10, 20, true, true);
+ Query q2 = LegacyNumericRangeQuery.newLongRange("test14", 4, 10L, 20L, true, true);
+ assertFalse(q1.equals(q2));
+ assertFalse(q2.equals(q1));
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/105c7eae/lucene/backward-codecs/src/test/org/apache/lucene/legacy/TestNumericRangeQuery64.java
----------------------------------------------------------------------
diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/legacy/TestNumericRangeQuery64.java b/lucene/backward-codecs/src/test/org/apache/lucene/legacy/TestNumericRangeQuery64.java
new file mode 100644
index 0000000..b3ce55a
--- /dev/null
+++ b/lucene/backward-codecs/src/test/org/apache/lucene/legacy/TestNumericRangeQuery64.java
@@ -0,0 +1,490 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.legacy;
+
+
+import org.apache.lucene.analysis.MockAnalyzer;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.index.DirectoryReader;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.RandomIndexWriter;
+import org.apache.lucene.search.BooleanQuery;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.MultiTermQuery;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.QueryUtils;
+import org.apache.lucene.search.ScoreDoc;
+import org.apache.lucene.search.Sort;
+import org.apache.lucene.search.TopDocs;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.util.LuceneTestCase;
+import org.apache.lucene.util.NumericUtils;
+import org.apache.lucene.util.TestUtil;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class TestNumericRangeQuery64 extends LuceneTestCase {
+ // distance of entries
+ private static long distance;
+ // shift the starting of the values to the left, to also have negative values:
+ private static final long startOffset = - 1L << 31;
+ // number of docs to generate for testing
+ private static int noDocs;
+
+ private static Directory directory = null;
+ private static IndexReader reader = null;
+ private static IndexSearcher searcher = null;
+
+ @BeforeClass
+ public static void beforeClass() throws Exception {
+ noDocs = atLeast(4096);
+ distance = (1L << 60) / noDocs;
+ directory = newDirectory();
+ RandomIndexWriter writer = new RandomIndexWriter(random(), directory,
+ newIndexWriterConfig(new MockAnalyzer(random()))
+ .setMaxBufferedDocs(TestUtil.nextInt(random(), 100, 1000))
+ .setMergePolicy(newLogMergePolicy()));
+
+ final LegacyFieldType storedLong = new LegacyFieldType(LegacyLongField.TYPE_NOT_STORED);
+ storedLong.setStored(true);
+ storedLong.freeze();
+
+ final LegacyFieldType storedLong8 = new LegacyFieldType(storedLong);
+ storedLong8.setNumericPrecisionStep(8);
+
+ final LegacyFieldType storedLong4 = new LegacyFieldType(storedLong);
+ storedLong4.setNumericPrecisionStep(4);
+
+ final LegacyFieldType storedLong6 = new LegacyFieldType(storedLong);
+ storedLong6.setNumericPrecisionStep(6);
+
+ final LegacyFieldType storedLong2 = new LegacyFieldType(storedLong);
+ storedLong2.setNumericPrecisionStep(2);
+
+ final LegacyFieldType storedLongNone = new LegacyFieldType(storedLong);
+ storedLongNone.setNumericPrecisionStep(Integer.MAX_VALUE);
+
+ final LegacyFieldType unstoredLong = LegacyLongField.TYPE_NOT_STORED;
+
+ final LegacyFieldType unstoredLong8 = new LegacyFieldType(unstoredLong);
+ unstoredLong8.setNumericPrecisionStep(8);
+
+ final LegacyFieldType unstoredLong6 = new LegacyFieldType(unstoredLong);
+ unstoredLong6.setNumericPrecisionStep(6);
+
+ final LegacyFieldType unstoredLong4 = new LegacyFieldType(unstoredLong);
+ unstoredLong4.setNumericPrecisionStep(4);
+
+ final LegacyFieldType unstoredLong2 = new LegacyFieldType(unstoredLong);
+ unstoredLong2.setNumericPrecisionStep(2);
+
+ LegacyLongField
+ field8 = new LegacyLongField("field8", 0L, storedLong8),
+ field6 = new LegacyLongField("field6", 0L, storedLong6),
+ field4 = new LegacyLongField("field4", 0L, storedLong4),
+ field2 = new LegacyLongField("field2", 0L, storedLong2),
+ fieldNoTrie = new LegacyLongField("field"+Integer.MAX_VALUE, 0L, storedLongNone),
+ ascfield8 = new LegacyLongField("ascfield8", 0L, unstoredLong8),
+ ascfield6 = new LegacyLongField("ascfield6", 0L, unstoredLong6),
+ ascfield4 = new LegacyLongField("ascfield4", 0L, unstoredLong4),
+ ascfield2 = new LegacyLongField("ascfield2", 0L, unstoredLong2);
+
+ Document doc = new Document();
+ // add fields, that have a distance to test general functionality
+ doc.add(field8); doc.add(field6); doc.add(field4); doc.add(field2); doc.add(fieldNoTrie);
+ // add ascending fields with a distance of 1, beginning at -noDocs/2 to test the correct splitting of range and inclusive/exclusive
+ doc.add(ascfield8); doc.add(ascfield6); doc.add(ascfield4); doc.add(ascfield2);
+
+ // Add a series of noDocs docs with increasing long values, by updating the fields
+ for (int l=0; l<noDocs; l++) {
+ long val=distance*l+startOffset;
+ field8.setLongValue(val);
+ field6.setLongValue(val);
+ field4.setLongValue(val);
+ field2.setLongValue(val);
+ fieldNoTrie.setLongValue(val);
+
+ val=l-(noDocs/2);
+ ascfield8.setLongValue(val);
+ ascfield6.setLongValue(val);
+ ascfield4.setLongValue(val);
+ ascfield2.setLongValue(val);
+ writer.addDocument(doc);
+ }
+ reader = writer.getReader();
+ searcher=newSearcher(reader);
+ writer.close();
+ }
+
+ @AfterClass
+ public static void afterClass() throws Exception {
+ searcher = null;
+ reader.close();
+ reader = null;
+ directory.close();
+ directory = null;
+ }
+
+ @Override
+ public void setUp() throws Exception {
+ super.setUp();
+ // set the theoretical maximum term count for 8bit (see docs for the number)
+ // super.tearDown will restore the default
+ BooleanQuery.setMaxClauseCount(7*255*2 + 255);
+ }
+
+ /** test for constant score + boolean query + filter, the other tests only use the constant score mode */
+ private void testRange(int precisionStep) throws Exception {
+ String field="field"+precisionStep;
+ int count=3000;
+ long lower=(distance*3/2)+startOffset, upper=lower + count*distance + (distance/3);
+ LegacyNumericRangeQuery<Long> q = LegacyNumericRangeQuery.newLongRange(field, precisionStep, lower, upper, true, true);
+ for (byte i=0; i<2; i++) {
+ TopDocs topDocs;
+ String type;
+ switch (i) {
+ case 0:
+ type = " (constant score filter rewrite)";
+ q.setRewriteMethod(MultiTermQuery.CONSTANT_SCORE_REWRITE);
+ topDocs = searcher.search(q, noDocs, Sort.INDEXORDER);
+ break;
+ case 1:
+ type = " (constant score boolean rewrite)";
+ q.setRewriteMethod(MultiTermQuery.CONSTANT_SCORE_BOOLEAN_REWRITE);
+ topDocs = searcher.search(q, noDocs, Sort.INDEXORDER);
+ break;
+ default:
+ return;
+ }
+ ScoreDoc[] sd = topDocs.scoreDocs;
+ assertNotNull(sd);
+ assertEquals("Score doc count"+type, count, sd.length );
+ Document doc=searcher.doc(sd[0].doc);
+ assertEquals("First doc"+type, 2*distance+startOffset, doc.getField(field).numericValue().longValue() );
+ doc=searcher.doc(sd[sd.length-1].doc);
+ assertEquals("Last doc"+type, (1+count)*distance+startOffset, doc.getField(field).numericValue().longValue() );
+ }
+ }
+
+ @Test
+ public void testRange_8bit() throws Exception {
+ testRange(8);
+ }
+
+ @Test
+ public void testRange_6bit() throws Exception {
+ testRange(6);
+ }
+
+ @Test
+ public void testRange_4bit() throws Exception {
+ testRange(4);
+ }
+
+ @Test
+ public void testRange_2bit() throws Exception {
+ testRange(2);
+ }
+
+ @Test
+ public void testOneMatchQuery() throws Exception {
+ LegacyNumericRangeQuery<Long> q = LegacyNumericRangeQuery.newLongRange("ascfield8", 8, 1000L, 1000L, true, true);
+ TopDocs topDocs = searcher.search(q, noDocs);
+ ScoreDoc[] sd = topDocs.scoreDocs;
+ assertNotNull(sd);
+ assertEquals("Score doc count", 1, sd.length );
+ }
+
+ private void testLeftOpenRange(int precisionStep) throws Exception {
+ String field="field"+precisionStep;
+ int count=3000;
+ long upper=(count-1)*distance + (distance/3) + startOffset;
+ LegacyNumericRangeQuery<Long> q= LegacyNumericRangeQuery.newLongRange(field, precisionStep, null, upper, true, true);
+ TopDocs topDocs = searcher.search(q, noDocs, Sort.INDEXORDER);
+ ScoreDoc[] sd = topDocs.scoreDocs;
+ assertNotNull(sd);
+ assertEquals("Score doc count", count, sd.length );
+ Document doc=searcher.doc(sd[0].doc);
+ assertEquals("First doc", startOffset, doc.getField(field).numericValue().longValue() );
+ doc=searcher.doc(sd[sd.length-1].doc);
+ assertEquals("Last doc", (count-1)*distance+startOffset, doc.getField(field).numericValue().longValue() );
+
+ q= LegacyNumericRangeQuery.newLongRange(field, precisionStep, null, upper, false, true);
+ topDocs = searcher.search(q, noDocs, Sort.INDEXORDER);
+ sd = topDocs.scoreDocs;
+ assertNotNull(sd);
+ assertEquals("Score doc count", count, sd.length );
+ doc=searcher.doc(sd[0].doc);
+ assertEquals("First doc", startOffset, doc.getField(field).numericValue().longValue() );
+ doc=searcher.doc(sd[sd.length-1].doc);
+ assertEquals("Last doc", (count-1)*distance+startOffset, doc.getField(field).numericValue().longValue() );
+ }
+
+ @Test
+ public void testLeftOpenRange_8bit() throws Exception {
+ testLeftOpenRange(8);
+ }
+
+ @Test
+ public void testLeftOpenRange_6bit() throws Exception {
+ testLeftOpenRange(6);
+ }
+
+ @Test
+ public void testLeftOpenRange_4bit() throws Exception {
+ testLeftOpenRange(4);
+ }
+
+ @Test
+ public void testLeftOpenRange_2bit() throws Exception {
+ testLeftOpenRange(2);
+ }
+
+ private void testRightOpenRange(int precisionStep) throws Exception {
+ String field="field"+precisionStep;
+ int count=3000;
+ long lower=(count-1)*distance + (distance/3) +startOffset;
+ LegacyNumericRangeQuery<Long> q= LegacyNumericRangeQuery.newLongRange(field, precisionStep, lower, null, true, true);
+ TopDocs topDocs = searcher.search(q, noDocs, Sort.INDEXORDER);
+ ScoreDoc[] sd = topDocs.scoreDocs;
+ assertNotNull(sd);
+ assertEquals("Score doc count", noDocs-count, sd.length );
+ Document doc=searcher.doc(sd[0].doc);
+ assertEquals("First doc", count*distance+startOffset, doc.getField(field).numericValue().longValue() );
+ doc=searcher.doc(sd[sd.length-1].doc);
+ assertEquals("Last doc", (noDocs-1)*distance+startOffset, doc.getField(field).numericValue().longValue() );
+
+ q= LegacyNumericRangeQuery.newLongRange(field, precisionStep, lower, null, true, false);
+ topDocs = searcher.search(q, noDocs, Sort.INDEXORDER);
+ sd = topDocs.scoreDocs;
+ assertNotNull(sd);
+ assertEquals("Score doc count", noDocs-count, sd.length );
+ doc=searcher.doc(sd[0].doc);
+ assertEquals("First doc", count*distance+startOffset, doc.getField(field).numericValue().longValue() );
+ doc=searcher.doc(sd[sd.length-1].doc);
+ assertEquals("Last doc", (noDocs-1)*distance+startOffset, doc.getField(field).numericValue().longValue() );
+ }
+
+ @Test
+ public void testRightOpenRange_8bit() throws Exception {
+ testRightOpenRange(8);
+ }
+
+ @Test
+ public void testRightOpenRange_6bit() throws Exception {
+ testRightOpenRange(6);
+ }
+
+ @Test
+ public void testRightOpenRange_4bit() throws Exception {
+ testRightOpenRange(4);
+ }
+
+ @Test
+ public void testRightOpenRange_2bit() throws Exception {
+ testRightOpenRange(2);
+ }
+
+ @Test
+ public void testInfiniteValues() throws Exception {
+ Directory dir = newDirectory();
+ RandomIndexWriter writer = new RandomIndexWriter(random(), dir,
+ newIndexWriterConfig(new MockAnalyzer(random())));
+ Document doc = new Document();
+ doc.add(new LegacyDoubleField("double", Double.NEGATIVE_INFINITY, Field.Store.NO));
+ doc.add(new LegacyLongField("long", Long.MIN_VALUE, Field.Store.NO));
+ writer.addDocument(doc);
+
+ doc = new Document();
+ doc.add(new LegacyDoubleField("double", Double.POSITIVE_INFINITY, Field.Store.NO));
+ doc.add(new LegacyLongField("long", Long.MAX_VALUE, Field.Store.NO));
+ writer.addDocument(doc);
+
+ doc = new Document();
+ doc.add(new LegacyDoubleField("double", 0.0, Field.Store.NO));
+ doc.add(new LegacyLongField("long", 0L, Field.Store.NO));
+ writer.addDocument(doc);
+
+ for (double d : TestLegacyNumericUtils.DOUBLE_NANs) {
+ doc = new Document();
+ doc.add(new LegacyDoubleField("double", d, Field.Store.NO));
+ writer.addDocument(doc);
+ }
+
+ writer.close();
+
+ IndexReader r = DirectoryReader.open(dir);
+ IndexSearcher s = newSearcher(r);
+
+ Query q= LegacyNumericRangeQuery.newLongRange("long", null, null, true, true);
+ TopDocs topDocs = s.search(q, 10);
+ assertEquals("Score doc count", 3, topDocs.scoreDocs.length );
+
+ q= LegacyNumericRangeQuery.newLongRange("long", null, null, false, false);
+ topDocs = s.search(q, 10);
+ assertEquals("Score doc count", 3, topDocs.scoreDocs.length );
+
+ q= LegacyNumericRangeQuery.newLongRange("long", Long.MIN_VALUE, Long.MAX_VALUE, true, true);
+ topDocs = s.search(q, 10);
+ assertEquals("Score doc count", 3, topDocs.scoreDocs.length );
+
+ q= LegacyNumericRangeQuery.newLongRange("long", Long.MIN_VALUE, Long.MAX_VALUE, false, false);
+ topDocs = s.search(q, 10);
+ assertEquals("Score doc count", 1, topDocs.scoreDocs.length );
+
+ q= LegacyNumericRangeQuery.newDoubleRange("double", null, null, true, true);
+ topDocs = s.search(q, 10);
+ assertEquals("Score doc count", 3, topDocs.scoreDocs.length );
+
+ q= LegacyNumericRangeQuery.newDoubleRange("double", null, null, false, false);
+ topDocs = s.search(q, 10);
+ assertEquals("Score doc count", 3, topDocs.scoreDocs.length );
+
+ q= LegacyNumericRangeQuery.newDoubleRange("double", Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, true, true);
+ topDocs = s.search(q, 10);
+ assertEquals("Score doc count", 3, topDocs.scoreDocs.length );
+
+ q= LegacyNumericRangeQuery.newDoubleRange("double", Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, false, false);
+ topDocs = s.search(q, 10);
+ assertEquals("Score doc count", 1, topDocs.scoreDocs.length );
+
+ q= LegacyNumericRangeQuery.newDoubleRange("double", Double.NaN, Double.NaN, true, true);
+ topDocs = s.search(q, 10);
+ assertEquals("Score doc count", TestLegacyNumericUtils.DOUBLE_NANs.length, topDocs.scoreDocs.length );
+
+ r.close();
+ dir.close();
+ }
+
+ private void testRangeSplit(int precisionStep) throws Exception {
+ String field="ascfield"+precisionStep;
+ // 10 random tests
+ int num = TestUtil.nextInt(random(), 10, 20);
+ for (int i = 0; i < num; i++) {
+ long lower=(long)(random().nextDouble()*noDocs - noDocs/2);
+ long upper=(long)(random().nextDouble()*noDocs - noDocs/2);
+ if (lower>upper) {
+ long a=lower; lower=upper; upper=a;
+ }
+ // test inclusive range
+ Query tq= LegacyNumericRangeQuery.newLongRange(field, precisionStep, lower, upper, true, true);
+ TopDocs tTopDocs = searcher.search(tq, 1);
+ assertEquals("Returned count of range query must be equal to inclusive range length", upper-lower+1, tTopDocs.totalHits );
+ // test exclusive range
+ tq= LegacyNumericRangeQuery.newLongRange(field, precisionStep, lower, upper, false, false);
+ tTopDocs = searcher.search(tq, 1);
+ assertEquals("Returned count of range query must be equal to exclusive range length", Math.max(upper-lower-1, 0), tTopDocs.totalHits );
+ // test left exclusive range
+ tq= LegacyNumericRangeQuery.newLongRange(field, precisionStep, lower, upper, false, true);
+ tTopDocs = searcher.search(tq, 1);
+ assertEquals("Returned count of range query must be equal to half exclusive range length", upper-lower, tTopDocs.totalHits );
+ // test right exclusive range
+ tq= LegacyNumericRangeQuery.newLongRange(field, precisionStep, lower, upper, true, false);
+ tTopDocs = searcher.search(tq, 1);
+ assertEquals("Returned count of range query must be equal to half exclusive range length", upper-lower, tTopDocs.totalHits );
+ }
+ }
+
+ @Test
+ public void testRangeSplit_8bit() throws Exception {
+ testRangeSplit(8);
+ }
+
+ @Test
+ public void testRangeSplit_6bit() throws Exception {
+ testRangeSplit(6);
+ }
+
+ @Test
+ public void testRangeSplit_4bit() throws Exception {
+ testRangeSplit(4);
+ }
+
+ @Test
+ public void testRangeSplit_2bit() throws Exception {
+ testRangeSplit(2);
+ }
+
+ /** we fake a double test using long2double conversion of LegacyNumericUtils */
+ private void testDoubleRange(int precisionStep) throws Exception {
+ final String field="ascfield"+precisionStep;
+ final long lower=-1000L, upper=+2000L;
+
+ Query tq= LegacyNumericRangeQuery.newDoubleRange(field, precisionStep,
+ NumericUtils.sortableLongToDouble(lower), NumericUtils.sortableLongToDouble(upper), true, true);
+ TopDocs tTopDocs = searcher.search(tq, 1);
+ assertEquals("Returned count of range query must be equal to inclusive range length", upper-lower+1, tTopDocs.totalHits );
+ }
+
+ @Test
+ public void testDoubleRange_8bit() throws Exception {
+ testDoubleRange(8);
+ }
+
+ @Test
+ public void testDoubleRange_6bit() throws Exception {
+ testDoubleRange(6);
+ }
+
+ @Test
+ public void testDoubleRange_4bit() throws Exception {
+ testDoubleRange(4);
+ }
+
+ @Test
+ public void testDoubleRange_2bit() throws Exception {
+ testDoubleRange(2);
+ }
+
+ @Test
+ public void testEqualsAndHash() throws Exception {
+ QueryUtils.checkHashEquals(LegacyNumericRangeQuery.newLongRange("test1", 4, 10L, 20L, true, true));
+ QueryUtils.checkHashEquals(LegacyNumericRangeQuery.newLongRange("test2", 4, 10L, 20L, false, true));
+ QueryUtils.checkHashEquals(LegacyNumericRangeQuery.newLongRange("test3", 4, 10L, 20L, true, false));
+ QueryUtils.checkHashEquals(LegacyNumericRangeQuery.newLongRange("test4", 4, 10L, 20L, false, false));
+ QueryUtils.checkHashEquals(LegacyNumericRangeQuery.newLongRange("test5", 4, 10L, null, true, true));
+ QueryUtils.checkHashEquals(LegacyNumericRangeQuery.newLongRange("test6", 4, null, 20L, true, true));
+ QueryUtils.checkHashEquals(LegacyNumericRangeQuery.newLongRange("test7", 4, null, null, true, true));
+ QueryUtils.checkEqual(
+ LegacyNumericRangeQuery.newLongRange("test8", 4, 10L, 20L, true, true),
+ LegacyNumericRangeQuery.newLongRange("test8", 4, 10L, 20L, true, true)
+ );
+ QueryUtils.checkUnequal(
+ LegacyNumericRangeQuery.newLongRange("test9", 4, 10L, 20L, true, true),
+ LegacyNumericRangeQuery.newLongRange("test9", 8, 10L, 20L, true, true)
+ );
+ QueryUtils.checkUnequal(
+ LegacyNumericRangeQuery.newLongRange("test10a", 4, 10L, 20L, true, true),
+ LegacyNumericRangeQuery.newLongRange("test10b", 4, 10L, 20L, true, true)
+ );
+ QueryUtils.checkUnequal(
+ LegacyNumericRangeQuery.newLongRange("test11", 4, 10L, 20L, true, true),
+ LegacyNumericRangeQuery.newLongRange("test11", 4, 20L, 10L, true, true)
+ );
+ QueryUtils.checkUnequal(
+ LegacyNumericRangeQuery.newLongRange("test12", 4, 10L, 20L, true, true),
+ LegacyNumericRangeQuery.newLongRange("test12", 4, 10L, 20L, false, true)
+ );
+ QueryUtils.checkUnequal(
+ LegacyNumericRangeQuery.newLongRange("test13", 4, 10L, 20L, true, true),
+ LegacyNumericRangeQuery.newFloatRange("test13", 4, 10f, 20f, true, true)
+ );
+ // difference to int range is tested in TestNumericRangeQuery32
+ }
+}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/105c7eae/lucene/backward-codecs/src/test/org/apache/lucene/legacy/TestNumericTokenStream.java
----------------------------------------------------------------------
diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/legacy/TestNumericTokenStream.java b/lucene/backward-codecs/src/test/org/apache/lucene/legacy/TestNumericTokenStream.java
new file mode 100644
index 0000000..a507af0
--- /dev/null
+++ b/lucene/backward-codecs/src/test/org/apache/lucene/legacy/TestNumericTokenStream.java
@@ -0,0 +1,188 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.legacy;
+
+
+import org.apache.lucene.util.AttributeImpl;
+import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute;
+import org.apache.lucene.analysis.tokenattributes.TypeAttribute;
+import org.apache.lucene.legacy.LegacyNumericTokenStream;
+import org.apache.lucene.legacy.LegacyNumericUtils;
+import org.apache.lucene.legacy.LegacyNumericTokenStream.LegacyNumericTermAttributeImpl;
+import org.apache.lucene.analysis.BaseTokenStreamTestCase;
+import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
+import org.apache.lucene.analysis.tokenattributes.CharTermAttributeImpl;
+
+@Deprecated
+public class TestNumericTokenStream extends BaseTokenStreamTestCase {
+
+ final long lvalue = random().nextLong();
+ final int ivalue = random().nextInt();
+
+ public void testLongStream() throws Exception {
+ @SuppressWarnings("resource")
+ final LegacyNumericTokenStream stream=new LegacyNumericTokenStream().setLongValue(lvalue);
+ final TermToBytesRefAttribute bytesAtt = stream.getAttribute(TermToBytesRefAttribute.class);
+ assertNotNull(bytesAtt);
+ final TypeAttribute typeAtt = stream.getAttribute(TypeAttribute.class);
+ assertNotNull(typeAtt);
+ final LegacyNumericTokenStream.LegacyNumericTermAttribute numericAtt = stream.getAttribute(LegacyNumericTokenStream.LegacyNumericTermAttribute.class);
+ assertNotNull(numericAtt);
+ stream.reset();
+ assertEquals(64, numericAtt.getValueSize());
+ for (int shift=0; shift<64; shift+= LegacyNumericUtils.PRECISION_STEP_DEFAULT) {
+ assertTrue("New token is available", stream.incrementToken());
+ assertEquals("Shift value wrong", shift, numericAtt.getShift());
+ assertEquals("Term is incorrectly encoded", lvalue & ~((1L << shift) - 1L), LegacyNumericUtils.prefixCodedToLong(bytesAtt.getBytesRef()));
+ assertEquals("Term raw value is incorrectly encoded", lvalue & ~((1L << shift) - 1L), numericAtt.getRawValue());
+ assertEquals("Type incorrect", (shift == 0) ? LegacyNumericTokenStream.TOKEN_TYPE_FULL_PREC : LegacyNumericTokenStream.TOKEN_TYPE_LOWER_PREC, typeAtt.type());
+ }
+ assertFalse("More tokens available", stream.incrementToken());
+ stream.end();
+ stream.close();
+ }
+
+ public void testIntStream() throws Exception {
+ @SuppressWarnings("resource")
+ final LegacyNumericTokenStream stream=new LegacyNumericTokenStream().setIntValue(ivalue);
+ final TermToBytesRefAttribute bytesAtt = stream.getAttribute(TermToBytesRefAttribute.class);
+ assertNotNull(bytesAtt);
+ final TypeAttribute typeAtt = stream.getAttribute(TypeAttribute.class);
+ assertNotNull(typeAtt);
+ final LegacyNumericTokenStream.LegacyNumericTermAttribute numericAtt = stream.getAttribute(LegacyNumericTokenStream.LegacyNumericTermAttribute.class);
+ assertNotNull(numericAtt);
+ stream.reset();
+ assertEquals(32, numericAtt.getValueSize());
+ for (int shift=0; shift<32; shift+= LegacyNumericUtils.PRECISION_STEP_DEFAULT) {
+ assertTrue("New token is available", stream.incrementToken());
+ assertEquals("Shift value wrong", shift, numericAtt.getShift());
+ assertEquals("Term is incorrectly encoded", ivalue & ~((1 << shift) - 1), LegacyNumericUtils.prefixCodedToInt(bytesAtt.getBytesRef()));
+ assertEquals("Term raw value is incorrectly encoded", ((long) ivalue) & ~((1L << shift) - 1L), numericAtt.getRawValue());
+ assertEquals("Type incorrect", (shift == 0) ? LegacyNumericTokenStream.TOKEN_TYPE_FULL_PREC : LegacyNumericTokenStream.TOKEN_TYPE_LOWER_PREC, typeAtt.type());
+ }
+ assertFalse("More tokens available", stream.incrementToken());
+ stream.end();
+ stream.close();
+ }
+
+ public void testNotInitialized() throws Exception {
+ final LegacyNumericTokenStream stream=new LegacyNumericTokenStream();
+
+ expectThrows(IllegalStateException.class, () -> {
+ stream.reset();
+ });
+
+ expectThrows(IllegalStateException.class, () -> {
+ stream.incrementToken();
+ });
+
+ stream.close();
+ }
+
+ public static interface TestAttribute extends CharTermAttribute {}
+ public static class TestAttributeImpl extends CharTermAttributeImpl implements TestAttribute {}
+
+ public void testCTA() throws Exception {
+ final LegacyNumericTokenStream stream=new LegacyNumericTokenStream();
+ IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> {
+ stream.addAttribute(CharTermAttribute.class);
+ });
+ assertTrue(e.getMessage().startsWith("LegacyNumericTokenStream does not support"));
+
+ e = expectThrows(IllegalArgumentException.class, () -> {
+ stream.addAttribute(TestAttribute.class);
+ });
+ assertTrue(e.getMessage().startsWith("LegacyNumericTokenStream does not support"));
+ stream.close();
+ }
+
+ /** LUCENE-7027 */
+ public void testCaptureStateAfterExhausted() throws Exception {
+ // default precstep
+ try (LegacyNumericTokenStream stream=new LegacyNumericTokenStream()) {
+ // int
+ stream.setIntValue(ivalue);
+ stream.reset();
+ while (stream.incrementToken());
+ stream.captureState();
+ stream.end();
+ stream.captureState();
+ // long
+ stream.setLongValue(lvalue);
+ stream.reset();
+ while (stream.incrementToken());
+ stream.captureState();
+ stream.end();
+ stream.captureState();
+ }
+ // huge precstep
+ try (LegacyNumericTokenStream stream=new LegacyNumericTokenStream(Integer.MAX_VALUE)) {
+ // int
+ stream.setIntValue(ivalue);
+ stream.reset();
+ while (stream.incrementToken());
+ stream.captureState();
+ stream.end();
+ stream.captureState();
+ // long
+ stream.setLongValue(lvalue);
+ stream.reset();
+ while (stream.incrementToken());
+ stream.captureState();
+ stream.end();
+ stream.captureState();
+ }
+ }
+
+ public void testAttributeClone() throws Exception {
+ LegacyNumericTermAttributeImpl att = new LegacyNumericTermAttributeImpl();
+ att.init(lvalue, 64, 8, 0); // set some value, to make getBytesRef() work
+ LegacyNumericTermAttributeImpl copy = assertCloneIsEqual(att);
+ assertNotSame(att.getBytesRef(), copy.getBytesRef());
+ LegacyNumericTermAttributeImpl copy2 = assertCopyIsEqual(att);
+ assertNotSame(att.getBytesRef(), copy2.getBytesRef());
+
+ // LUCENE-7027 test
+ att.init(lvalue, 64, 8, 64); // Exhausted TokenStream -> should return empty BytesRef
+ assertEquals(new BytesRef(), att.getBytesRef());
+ copy = assertCloneIsEqual(att);
+ assertEquals(new BytesRef(), copy.getBytesRef());
+ assertNotSame(att.getBytesRef(), copy.getBytesRef());
+ copy2 = assertCopyIsEqual(att);
+ assertEquals(new BytesRef(), copy2.getBytesRef());
+ assertNotSame(att.getBytesRef(), copy2.getBytesRef());
+ }
+
+ public static <T extends AttributeImpl> T assertCloneIsEqual(T att) {
+ @SuppressWarnings("unchecked")
+ T clone = (T) att.clone();
+ assertEquals("Clone must be equal", att, clone);
+ assertEquals("Clone's hashcode must be equal", att.hashCode(), clone.hashCode());
+ return clone;
+ }
+
+ public static <T extends AttributeImpl> T assertCopyIsEqual(T att) throws Exception {
+ @SuppressWarnings("unchecked")
+ T copy = (T) att.getClass().newInstance();
+ att.copyTo(copy);
+ assertEquals("Copied instance must be equal", att, copy);
+ assertEquals("Copied instance's hashcode must be equal", att.hashCode(), copy.hashCode());
+ return copy;
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/105c7eae/lucene/core/src/java/org/apache/lucene/analysis/LegacyNumericTokenStream.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/analysis/LegacyNumericTokenStream.java b/lucene/core/src/java/org/apache/lucene/analysis/LegacyNumericTokenStream.java
deleted file mode 100644
index 19f7d37..0000000
--- a/lucene/core/src/java/org/apache/lucene/analysis/LegacyNumericTokenStream.java
+++ /dev/null
@@ -1,357 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.lucene.analysis;
-
-
-import java.util.Objects;
-
-import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
-import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
-import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute;
-import org.apache.lucene.analysis.tokenattributes.TypeAttribute;
-import org.apache.lucene.util.Attribute;
-import org.apache.lucene.util.AttributeFactory;
-import org.apache.lucene.util.AttributeImpl;
-import org.apache.lucene.util.AttributeReflector;
-import org.apache.lucene.util.BytesRef;
-import org.apache.lucene.util.BytesRefBuilder;
-import org.apache.lucene.util.LegacyNumericUtils;
-import org.apache.lucene.util.NumericUtils;
-
-/**
- * <b>Expert:</b> This class provides a {@link TokenStream}
- * for indexing numeric values that can be used by {@link
- * org.apache.lucene.search.LegacyNumericRangeQuery}.
- *
- * <p>Note that for simple usage, {@link org.apache.lucene.document.LegacyIntField}, {@link
- * org.apache.lucene.document.LegacyLongField}, {@link org.apache.lucene.document.LegacyFloatField} or {@link org.apache.lucene.document.LegacyDoubleField} is
- * recommended. These fields disable norms and
- * term freqs, as they are not usually needed during
- * searching. If you need to change these settings, you
- * should use this class.
- *
- * <p>Here's an example usage, for an <code>int</code> field:
- *
- * <pre class="prettyprint">
- * FieldType fieldType = new FieldType(TextField.TYPE_NOT_STORED);
- * fieldType.setOmitNorms(true);
- * fieldType.setIndexOptions(IndexOptions.DOCS_ONLY);
- * Field field = new Field(name, new LegacyNumericTokenStream(precisionStep).setIntValue(value), fieldType);
- * document.add(field);
- * </pre>
- *
- * <p>For optimal performance, re-use the TokenStream and Field instance
- * for more than one document:
- *
- * <pre class="prettyprint">
- * LegacyNumericTokenStream stream = new LegacyNumericTokenStream(precisionStep);
- * FieldType fieldType = new FieldType(TextField.TYPE_NOT_STORED);
- * fieldType.setOmitNorms(true);
- * fieldType.setIndexOptions(IndexOptions.DOCS_ONLY);
- * Field field = new Field(name, stream, fieldType);
- * Document document = new Document();
- * document.add(field);
- *
- * for(all documents) {
- * stream.setIntValue(value)
- * writer.addDocument(document);
- * }
- * </pre>
- *
- * <p>This stream is not intended to be used in analyzers;
- * it's more for iterating the different precisions during
- * indexing a specific numeric value.</p>
-
- * <p><b>NOTE</b>: as token streams are only consumed once
- * the document is added to the index, if you index more
- * than one numeric field, use a separate <code>LegacyNumericTokenStream</code>
- * instance for each.</p>
- *
- * <p>See {@link org.apache.lucene.search.LegacyNumericRangeQuery} for more details on the
- * <a
- * href="../search/LegacyNumericRangeQuery.html#precisionStepDesc"><code>precisionStep</code></a>
- * parameter as well as how numeric fields work under the hood.</p>
- *
- * @deprecated Please switch to {@link org.apache.lucene.index.PointValues} instead
- *
- * @since 2.9
- */
-@Deprecated
-public final class LegacyNumericTokenStream extends TokenStream {
-
- /** The full precision token gets this token type assigned. */
- public static final String TOKEN_TYPE_FULL_PREC = "fullPrecNumeric";
-
- /** The lower precision tokens gets this token type assigned. */
- public static final String TOKEN_TYPE_LOWER_PREC = "lowerPrecNumeric";
-
- /** <b>Expert:</b> Use this attribute to get the details of the currently generated token.
- * @lucene.experimental
- * @since 4.0
- */
- public interface LegacyNumericTermAttribute extends Attribute {
- /** Returns current shift value, undefined before first token */
- int getShift();
- /** Returns current token's raw value as {@code long} with all {@link #getShift} applied, undefined before first token */
- long getRawValue();
- /** Returns value size in bits (32 for {@code float}, {@code int}; 64 for {@code double}, {@code long}) */
- int getValueSize();
-
- /** <em>Don't call this method!</em>
- * @lucene.internal */
- void init(long value, int valSize, int precisionStep, int shift);
-
- /** <em>Don't call this method!</em>
- * @lucene.internal */
- void setShift(int shift);
-
- /** <em>Don't call this method!</em>
- * @lucene.internal */
- int incShift();
- }
-
- // just a wrapper to prevent adding CTA
- private static final class NumericAttributeFactory extends AttributeFactory {
- private final AttributeFactory delegate;
-
- NumericAttributeFactory(AttributeFactory delegate) {
- this.delegate = delegate;
- }
-
- @Override
- public AttributeImpl createAttributeInstance(Class<? extends Attribute> attClass) {
- if (CharTermAttribute.class.isAssignableFrom(attClass))
- throw new IllegalArgumentException("LegacyNumericTokenStream does not support CharTermAttribute.");
- return delegate.createAttributeInstance(attClass);
- }
- }
-
- /** Implementation of {@link org.apache.lucene.analysis.LegacyNumericTokenStream.LegacyNumericTermAttribute}.
- * @lucene.internal
- * @since 4.0
- */
- public static final class LegacyNumericTermAttributeImpl extends AttributeImpl implements LegacyNumericTermAttribute,TermToBytesRefAttribute {
- private long value = 0L;
- private int valueSize = 0, shift = 0, precisionStep = 0;
- private BytesRefBuilder bytes = new BytesRefBuilder();
-
- /**
- * Creates, but does not yet initialize this attribute instance
- * @see #init(long, int, int, int)
- */
- public LegacyNumericTermAttributeImpl() {}
-
- @Override
- public BytesRef getBytesRef() {
- assert valueSize == 64 || valueSize == 32;
- if (shift >= valueSize) {
- bytes.clear();
- } else if (valueSize == 64) {
- LegacyNumericUtils.longToPrefixCoded(value, shift, bytes);
- } else {
- LegacyNumericUtils.intToPrefixCoded((int) value, shift, bytes);
- }
- return bytes.get();
- }
-
- @Override
- public int getShift() { return shift; }
- @Override
- public void setShift(int shift) { this.shift = shift; }
- @Override
- public int incShift() {
- return (shift += precisionStep);
- }
-
- @Override
- public long getRawValue() { return value & ~((1L << shift) - 1L); }
- @Override
- public int getValueSize() { return valueSize; }
-
- @Override
- public void init(long value, int valueSize, int precisionStep, int shift) {
- this.value = value;
- this.valueSize = valueSize;
- this.precisionStep = precisionStep;
- this.shift = shift;
- }
-
- @Override
- public void clear() {
- // this attribute has no contents to clear!
- // we keep it untouched as it's fully controlled by outer class.
- }
-
- @Override
- public void reflectWith(AttributeReflector reflector) {
- reflector.reflect(TermToBytesRefAttribute.class, "bytes", getBytesRef());
- reflector.reflect(LegacyNumericTermAttribute.class, "shift", shift);
- reflector.reflect(LegacyNumericTermAttribute.class, "rawValue", getRawValue());
- reflector.reflect(LegacyNumericTermAttribute.class, "valueSize", valueSize);
- }
-
- @Override
- public void copyTo(AttributeImpl target) {
- final LegacyNumericTermAttribute a = (LegacyNumericTermAttribute) target;
- a.init(value, valueSize, precisionStep, shift);
- }
-
- @Override
- public LegacyNumericTermAttributeImpl clone() {
- LegacyNumericTermAttributeImpl t = (LegacyNumericTermAttributeImpl)super.clone();
- // Do a deep clone
- t.bytes = new BytesRefBuilder();
- t.bytes.copyBytes(getBytesRef());
- return t;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash(precisionStep, shift, value, valueSize);
- }
-
- @Override
- public boolean equals(Object obj) {
- if (this == obj) return true;
- if (obj == null) return false;
- if (getClass() != obj.getClass()) return false;
- LegacyNumericTermAttributeImpl other = (LegacyNumericTermAttributeImpl) obj;
- if (precisionStep != other.precisionStep) return false;
- if (shift != other.shift) return false;
- if (value != other.value) return false;
- if (valueSize != other.valueSize) return false;
- return true;
- }
- }
-
- /**
- * Creates a token stream for numeric values using the default <code>precisionStep</code>
- * {@link org.apache.lucene.util.LegacyNumericUtils#PRECISION_STEP_DEFAULT} (16). The stream is not yet initialized,
- * before using set a value using the various set<em>???</em>Value() methods.
- */
- public LegacyNumericTokenStream() {
- this(AttributeFactory.DEFAULT_ATTRIBUTE_FACTORY, LegacyNumericUtils.PRECISION_STEP_DEFAULT);
- }
-
- /**
- * Creates a token stream for numeric values with the specified
- * <code>precisionStep</code>. The stream is not yet initialized,
- * before using set a value using the various set<em>???</em>Value() methods.
- */
- public LegacyNumericTokenStream(final int precisionStep) {
- this(AttributeFactory.DEFAULT_ATTRIBUTE_FACTORY, precisionStep);
- }
-
- /**
- * Expert: Creates a token stream for numeric values with the specified
- * <code>precisionStep</code> using the given
- * {@link org.apache.lucene.util.AttributeFactory}.
- * The stream is not yet initialized,
- * before using set a value using the various set<em>???</em>Value() methods.
- */
- public LegacyNumericTokenStream(AttributeFactory factory, final int precisionStep) {
- super(new NumericAttributeFactory(factory));
- if (precisionStep < 1)
- throw new IllegalArgumentException("precisionStep must be >=1");
- this.precisionStep = precisionStep;
- numericAtt.setShift(-precisionStep);
- }
-
- /**
- * Initializes the token stream with the supplied <code>long</code> value.
- * @param value the value, for which this TokenStream should enumerate tokens.
- * @return this instance, because of this you can use it the following way:
- * <code>new Field(name, new LegacyNumericTokenStream(precisionStep).setLongValue(value))</code>
- */
- public LegacyNumericTokenStream setLongValue(final long value) {
- numericAtt.init(value, valSize = 64, precisionStep, -precisionStep);
- return this;
- }
-
- /**
- * Initializes the token stream with the supplied <code>int</code> value.
- * @param value the value, for which this TokenStream should enumerate tokens.
- * @return this instance, because of this you can use it the following way:
- * <code>new Field(name, new LegacyNumericTokenStream(precisionStep).setIntValue(value))</code>
- */
- public LegacyNumericTokenStream setIntValue(final int value) {
- numericAtt.init(value, valSize = 32, precisionStep, -precisionStep);
- return this;
- }
-
- /**
- * Initializes the token stream with the supplied <code>double</code> value.
- * @param value the value, for which this TokenStream should enumerate tokens.
- * @return this instance, because of this you can use it the following way:
- * <code>new Field(name, new LegacyNumericTokenStream(precisionStep).setDoubleValue(value))</code>
- */
- public LegacyNumericTokenStream setDoubleValue(final double value) {
- numericAtt.init(NumericUtils.doubleToSortableLong(value), valSize = 64, precisionStep, -precisionStep);
- return this;
- }
-
- /**
- * Initializes the token stream with the supplied <code>float</code> value.
- * @param value the value, for which this TokenStream should enumerate tokens.
- * @return this instance, because of this you can use it the following way:
- * <code>new Field(name, new LegacyNumericTokenStream(precisionStep).setFloatValue(value))</code>
- */
- public LegacyNumericTokenStream setFloatValue(final float value) {
- numericAtt.init(NumericUtils.floatToSortableInt(value), valSize = 32, precisionStep, -precisionStep);
- return this;
- }
-
- @Override
- public void reset() {
- if (valSize == 0)
- throw new IllegalStateException("call set???Value() before usage");
- numericAtt.setShift(-precisionStep);
- }
-
- @Override
- public boolean incrementToken() {
- if (valSize == 0)
- throw new IllegalStateException("call set???Value() before usage");
-
- // this will only clear all other attributes in this TokenStream
- clearAttributes();
-
- final int shift = numericAtt.incShift();
- typeAtt.setType((shift == 0) ? TOKEN_TYPE_FULL_PREC : TOKEN_TYPE_LOWER_PREC);
- posIncrAtt.setPositionIncrement((shift == 0) ? 1 : 0);
- return (shift < valSize);
- }
-
- /** Returns the precision step. */
- public int getPrecisionStep() {
- return precisionStep;
- }
-
- @Override
- public String toString() {
- // We override default because it can throw cryptic "illegal shift value":
- return getClass().getSimpleName() + "(precisionStep=" + precisionStep + " valueSize=" + numericAtt.getValueSize() + " shift=" + numericAtt.getShift() + ")";
- }
-
- // members
- private final LegacyNumericTermAttribute numericAtt = addAttribute(LegacyNumericTermAttribute.class);
- private final TypeAttribute typeAtt = addAttribute(TypeAttribute.class);
- private final PositionIncrementAttribute posIncrAtt = addAttribute(PositionIncrementAttribute.class);
-
- private int valSize = 0; // valSize==0 means not initialized
- private final int precisionStep;
-}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/105c7eae/lucene/core/src/java/org/apache/lucene/document/Field.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/document/Field.java b/lucene/core/src/java/org/apache/lucene/document/Field.java
index 8798610..8f5f869 100644
--- a/lucene/core/src/java/org/apache/lucene/document/Field.java
+++ b/lucene/core/src/java/org/apache/lucene/document/Field.java
@@ -21,7 +21,6 @@ import java.io.IOException;
import java.io.Reader;
import org.apache.lucene.analysis.Analyzer;
-import org.apache.lucene.analysis.LegacyNumericTokenStream;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.BytesTermAttribute;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
@@ -426,9 +425,6 @@ public class Field implements IndexableField {
if (type.indexOptions() == IndexOptions.NONE || !type.tokenized()) {
throw new IllegalArgumentException("TokenStream fields must be indexed and tokenized");
}
- if (type.numericType() != null) {
- throw new IllegalArgumentException("cannot set private TokenStream on numeric fields");
- }
this.tokenStream = tokenStream;
}
@@ -511,35 +507,6 @@ public class Field implements IndexableField {
return null;
}
- final FieldType.LegacyNumericType numericType = fieldType().numericType();
- if (numericType != null) {
- if (!(reuse instanceof LegacyNumericTokenStream && ((LegacyNumericTokenStream)reuse).getPrecisionStep() == type.numericPrecisionStep())) {
- // lazy init the TokenStream as it is heavy to instantiate
- // (attributes,...) if not needed (stored field loading)
- reuse = new LegacyNumericTokenStream(type.numericPrecisionStep());
- }
- final LegacyNumericTokenStream nts = (LegacyNumericTokenStream) reuse;
- // initialize value in TokenStream
- final Number val = (Number) fieldsData;
- switch (numericType) {
- case INT:
- nts.setIntValue(val.intValue());
- break;
- case LONG:
- nts.setLongValue(val.longValue());
- break;
- case FLOAT:
- nts.setFloatValue(val.floatValue());
- break;
- case DOUBLE:
- nts.setDoubleValue(val.doubleValue());
- break;
- default:
- throw new AssertionError("Should never get here");
- }
- return reuse;
- }
-
if (!fieldType().tokenized()) {
if (stringValue() != null) {
if (!(reuse instanceof StringTokenStream)) {
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/105c7eae/lucene/core/src/java/org/apache/lucene/document/FieldType.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/document/FieldType.java b/lucene/core/src/java/org/apache/lucene/document/FieldType.java
index e0f058f..6f206a4 100644
--- a/lucene/core/src/java/org/apache/lucene/document/FieldType.java
+++ b/lucene/core/src/java/org/apache/lucene/document/FieldType.java
@@ -22,30 +22,12 @@ import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableFieldType;
import org.apache.lucene.index.PointValues;
-import org.apache.lucene.util.LegacyNumericUtils;
/**
* Describes the properties of a field.
*/
public class FieldType implements IndexableFieldType {
- /** Data type of the numeric value
- * @since 3.2
- *
- * @deprecated Please switch to {@link org.apache.lucene.index.PointValues} instead
- */
- @Deprecated
- public enum LegacyNumericType {
- /** 32-bit integer numeric type */
- INT,
- /** 64-bit long numeric type */
- LONG,
- /** 32-bit float numeric type */
- FLOAT,
- /** 64-bit double numeric type */
- DOUBLE
- }
-
private boolean stored;
private boolean tokenized = true;
private boolean storeTermVectors;
@@ -54,9 +36,7 @@ public class FieldType implements IndexableFieldType {
private boolean storeTermVectorPayloads;
private boolean omitNorms;
private IndexOptions indexOptions = IndexOptions.NONE;
- private LegacyNumericType numericType;
private boolean frozen;
- private int numericPrecisionStep = LegacyNumericUtils.PRECISION_STEP_DEFAULT;
private DocValuesType docValuesType = DocValuesType.NONE;
private int dimensionCount;
private int dimensionNumBytes;
@@ -73,8 +53,6 @@ public class FieldType implements IndexableFieldType {
this.storeTermVectorPayloads = ref.storeTermVectorPayloads();
this.omitNorms = ref.omitNorms();
this.indexOptions = ref.indexOptions();
- this.numericType = ref.numericType();
- this.numericPrecisionStep = ref.numericPrecisionStep();
this.docValuesType = ref.docValuesType();
this.dimensionCount = ref.dimensionCount;
this.dimensionNumBytes = ref.dimensionNumBytes;
@@ -298,70 +276,6 @@ public class FieldType implements IndexableFieldType {
}
/**
- * Specifies the field's numeric type.
- * @param type numeric type, or null if the field has no numeric type.
- * @throws IllegalStateException if this FieldType is frozen against
- * future modifications.
- * @see #numericType()
- *
- * @deprecated Please switch to {@link org.apache.lucene.index.PointValues} instead
- */
- @Deprecated
- public void setNumericType(LegacyNumericType type) {
- checkIfFrozen();
- numericType = type;
- }
-
- /**
- * LegacyNumericType: if non-null then the field's value will be indexed
- * numerically so that {@link org.apache.lucene.search.LegacyNumericRangeQuery} can be used at
- * search time.
- * <p>
- * The default is <code>null</code> (no numeric type)
- * @see #setNumericType(org.apache.lucene.document.FieldType.LegacyNumericType)
- *
- * @deprecated Please switch to {@link org.apache.lucene.index.PointValues} instead
- */
- @Deprecated
- public LegacyNumericType numericType() {
- return numericType;
- }
-
- /**
- * Sets the numeric precision step for the field.
- * @param precisionStep numeric precision step for the field
- * @throws IllegalArgumentException if precisionStep is less than 1.
- * @throws IllegalStateException if this FieldType is frozen against
- * future modifications.
- * @see #numericPrecisionStep()
- *
- * @deprecated Please switch to {@link org.apache.lucene.index.PointValues} instead
- */
- @Deprecated
- public void setNumericPrecisionStep(int precisionStep) {
- checkIfFrozen();
- if (precisionStep < 1) {
- throw new IllegalArgumentException("precisionStep must be >= 1 (got " + precisionStep + ")");
- }
- this.numericPrecisionStep = precisionStep;
- }
-
- /**
- * Precision step for numeric field.
- * <p>
- * This has no effect if {@link #numericType()} returns null.
- * <p>
- * The default is {@link org.apache.lucene.util.LegacyNumericUtils#PRECISION_STEP_DEFAULT}
- * @see #setNumericPrecisionStep(int)
- *
- * @deprecated Please switch to {@link org.apache.lucene.index.PointValues} instead
- */
- @Deprecated
- public int numericPrecisionStep() {
- return numericPrecisionStep;
- }
-
- /**
* Enables points indexing.
*/
public void setDimensions(int dimensionCount, int dimensionNumBytes) {
@@ -403,7 +317,7 @@ public class FieldType implements IndexableFieldType {
/** Prints a Field for human consumption. */
@Override
- public final String toString() {
+ public String toString() {
StringBuilder result = new StringBuilder();
if (stored()) {
result.append("stored");
@@ -434,12 +348,6 @@ public class FieldType implements IndexableFieldType {
result.append(",indexOptions=");
result.append(indexOptions);
}
- if (numericType != null) {
- result.append(",numericType=");
- result.append(numericType);
- result.append(",numericPrecisionStep=");
- result.append(numericPrecisionStep);
- }
}
if (dimensionCount != 0) {
if (result.length() > 0) {
@@ -495,8 +403,6 @@ public class FieldType implements IndexableFieldType {
result = prime * result + dimensionNumBytes;
result = prime * result + ((docValuesType == null) ? 0 : docValuesType.hashCode());
result = prime * result + indexOptions.hashCode();
- result = prime * result + numericPrecisionStep;
- result = prime * result + ((numericType == null) ? 0 : numericType.hashCode());
result = prime * result + (omitNorms ? 1231 : 1237);
result = prime * result + (storeTermVectorOffsets ? 1231 : 1237);
result = prime * result + (storeTermVectorPayloads ? 1231 : 1237);
@@ -517,8 +423,6 @@ public class FieldType implements IndexableFieldType {
if (dimensionNumBytes != other.dimensionNumBytes) return false;
if (docValuesType != other.docValuesType) return false;
if (indexOptions != other.indexOptions) return false;
- if (numericPrecisionStep != other.numericPrecisionStep) return false;
- if (numericType != other.numericType) return false;
if (omitNorms != other.omitNorms) return false;
if (storeTermVectorOffsets != other.storeTermVectorOffsets) return false;
if (storeTermVectorPayloads != other.storeTermVectorPayloads) return false;
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/105c7eae/lucene/core/src/java/org/apache/lucene/document/LegacyDoubleField.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/document/LegacyDoubleField.java b/lucene/core/src/java/org/apache/lucene/document/LegacyDoubleField.java
deleted file mode 100644
index 55ba81c..0000000
--- a/lucene/core/src/java/org/apache/lucene/document/LegacyDoubleField.java
+++ /dev/null
@@ -1,172 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.lucene.document;
-
-
-import org.apache.lucene.index.IndexOptions;
-
-
-/**
- * <p>
- * Field that indexes <code>double</code> values
- * for efficient range filtering and sorting. Here's an example usage:
- *
- * <pre class="prettyprint">
- * document.add(new LegacyDoubleField(name, 6.0, Field.Store.NO));
- * </pre>
- *
- * For optimal performance, re-use the <code>LegacyDoubleField</code> and
- * {@link Document} instance for more than one document:
- *
- * <pre class="prettyprint">
- * LegacyDoubleField field = new LegacyDoubleField(name, 0.0, Field.Store.NO);
- * Document document = new Document();
- * document.add(field);
- *
- * for(all documents) {
- * ...
- * field.setDoubleValue(value)
- * writer.addDocument(document);
- * ...
- * }
- * </pre>
- *
- * See also {@link LegacyIntField}, {@link LegacyLongField}, {@link
- * LegacyFloatField}.
- *
- * <p>To perform range querying or filtering against a
- * <code>LegacyDoubleField</code>, use {@link org.apache.lucene.search.LegacyNumericRangeQuery}.
- * To sort according to a
- * <code>LegacyDoubleField</code>, use the normal numeric sort types, eg
- * {@link org.apache.lucene.search.SortField.Type#DOUBLE}. <code>LegacyDoubleField</code>
- * values can also be loaded directly from {@link org.apache.lucene.index.LeafReader#getNumericDocValues}.</p>
- *
- * <p>You may add the same field name as an <code>LegacyDoubleField</code> to
- * the same document more than once. Range querying and
- * filtering will be the logical OR of all values; so a range query
- * will hit all documents that have at least one value in
- * the range. However sort behavior is not defined. If you need to sort,
- * you should separately index a single-valued <code>LegacyDoubleField</code>.</p>
- *
- * <p>A <code>LegacyDoubleField</code> will consume somewhat more disk space
- * in the index than an ordinary single-valued field.
- * However, for a typical index that includes substantial
- * textual content per document, this increase will likely
- * be in the noise. </p>
- *
- * <p>Within Lucene, each numeric value is indexed as a
- * <em>trie</em> structure, where each term is logically
- * assigned to larger and larger pre-defined brackets (which
- * are simply lower-precision representations of the value).
- * The step size between each successive bracket is called the
- * <code>precisionStep</code>, measured in bits. Smaller
- * <code>precisionStep</code> values result in larger number
- * of brackets, which consumes more disk space in the index
- * but may result in faster range search performance. The
- * default value, 16, was selected for a reasonable tradeoff
- * of disk space consumption versus performance. You can
- * create a custom {@link FieldType} and invoke the {@link
- * FieldType#setNumericPrecisionStep} method if you'd
- * like to change the value. Note that you must also
- * specify a congruent value when creating {@link
- * org.apache.lucene.search.LegacyNumericRangeQuery}.
- * For low cardinality fields larger precision steps are good.
- * If the cardinality is < 100, it is fair
- * to use {@link Integer#MAX_VALUE}, which produces one
- * term per value.
- *
- * <p>For more information on the internals of numeric trie
- * indexing, including the <a
- * href="../search/LegacyNumericRangeQuery.html#precisionStepDesc"><code>precisionStep</code></a>
- * configuration, see {@link org.apache.lucene.search.LegacyNumericRangeQuery}. The format of
- * indexed values is described in {@link org.apache.lucene.util.LegacyNumericUtils}.
- *
- * <p>If you only need to sort by numeric value, and never
- * run range querying/filtering, you can index using a
- * <code>precisionStep</code> of {@link Integer#MAX_VALUE}.
- * This will minimize disk space consumed. </p>
- *
- * <p>More advanced users can instead use {@link
- * org.apache.lucene.analysis.LegacyNumericTokenStream} directly, when indexing numbers. This
- * class is a wrapper around this token stream type for
- * easier, more intuitive usage.</p>
- *
- * @deprecated Please use {@link DoublePoint} instead
- *
- * @since 2.9
- */
-
-@Deprecated
-public final class LegacyDoubleField extends Field {
-
- /**
- * Type for a LegacyDoubleField that is not stored:
- * normalization factors, frequencies, and positions are omitted.
- */
- public static final FieldType TYPE_NOT_STORED = new FieldType();
- static {
- TYPE_NOT_STORED.setTokenized(true);
- TYPE_NOT_STORED.setOmitNorms(true);
- TYPE_NOT_STORED.setIndexOptions(IndexOptions.DOCS);
- TYPE_NOT_STORED.setNumericType(FieldType.LegacyNumericType.DOUBLE);
- TYPE_NOT_STORED.freeze();
- }
-
- /**
- * Type for a stored LegacyDoubleField:
- * normalization factors, frequencies, and positions are omitted.
- */
- public static final FieldType TYPE_STORED = new FieldType();
- static {
- TYPE_STORED.setTokenized(true);
- TYPE_STORED.setOmitNorms(true);
- TYPE_STORED.setIndexOptions(IndexOptions.DOCS);
- TYPE_STORED.setNumericType(FieldType.LegacyNumericType.DOUBLE);
- TYPE_STORED.setStored(true);
- TYPE_STORED.freeze();
- }
-
- /** Creates a stored or un-stored LegacyDoubleField with the provided value
- * and default <code>precisionStep</code> {@link
- * org.apache.lucene.util.LegacyNumericUtils#PRECISION_STEP_DEFAULT} (16).
- * @param name field name
- * @param value 64-bit double value
- * @param stored Store.YES if the content should also be stored
- * @throws IllegalArgumentException if the field name is null.
- */
- public LegacyDoubleField(String name, double value, Store stored) {
- super(name, stored == Store.YES ? TYPE_STORED : TYPE_NOT_STORED);
- fieldsData = Double.valueOf(value);
- }
-
- /** Expert: allows you to customize the {@link
- * FieldType}.
- * @param name field name
- * @param value 64-bit double value
- * @param type customized field type: must have {@link FieldType#numericType()}
- * of {@link org.apache.lucene.document.FieldType.LegacyNumericType#DOUBLE}.
- * @throws IllegalArgumentException if the field name or type is null, or
- * if the field type does not have a DOUBLE numericType()
- */
- public LegacyDoubleField(String name, double value, FieldType type) {
- super(name, type);
- if (type.numericType() != FieldType.LegacyNumericType.DOUBLE) {
- throw new IllegalArgumentException("type.numericType() must be DOUBLE but got " + type.numericType());
- }
- fieldsData = Double.valueOf(value);
- }
-}