You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by jp...@apache.org on 2015/02/27 19:09:02 UTC
svn commit: r1662774 [2/2] - in /lucene/dev/trunk: lucene/
lucene/core/src/java/org/apache/lucene/index/
lucene/core/src/java/org/apache/lucene/search/
lucene/core/src/java/org/apache/lucene/util/
lucene/core/src/java/org/apache/lucene/util/packed/ luc...
Added: lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/search/TestLRUQueryCache.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/search/TestLRUQueryCache.java?rev=1662774&view=auto
==============================================================================
--- lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/search/TestLRUQueryCache.java (added)
+++ lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/search/TestLRUQueryCache.java Fri Feb 27 18:09:01 2015
@@ -0,0 +1,822 @@
+package org.apache.lucene.search;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.io.IOException;
+import java.lang.reflect.Field;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicLong;
+import java.util.concurrent.atomic.AtomicReference;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field.Store;
+import org.apache.lucene.document.StringField;
+import org.apache.lucene.index.DirectoryReader;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.IndexWriterConfig;
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.index.RandomIndexWriter;
+import org.apache.lucene.index.SerialMergeScheduler;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.search.BooleanClause.Occur;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.util.Bits;
+import org.apache.lucene.util.LuceneTestCase;
+import org.apache.lucene.util.RamUsageTester;
+
+import com.carrotsearch.randomizedtesting.generators.RandomPicks;
+
+public class TestLRUQueryCache extends LuceneTestCase {
+
+ private static final QueryCachingPolicy NEVER_CACHE = new QueryCachingPolicy() {
+
+ @Override
+ public void onUse(Query query) {}
+
+ @Override
+ public boolean shouldCache(Query query, LeafReaderContext context) throws IOException {
+ return false;
+ }
+
+ };
+
+ public void testFilterRamBytesUsed() {
+ final Query simpleQuery = new TermQuery(new Term("some_field", "some_term"));
+ final long actualRamBytesUsed = RamUsageTester.sizeOf(simpleQuery);
+ final long ramBytesUsed = LRUQueryCache.QUERY_DEFAULT_RAM_BYTES_USED;
+ // we cannot assert exactly that the constant is correct since actual
+ // memory usage depends on JVM implementations and settings (eg. UseCompressedOops)
+ assertEquals(actualRamBytesUsed, ramBytesUsed, actualRamBytesUsed / 2);
+ }
+
+ public void testConcurrency() throws Throwable {
+ final LRUQueryCache queryCache = new LRUQueryCache(1 + random().nextInt(20), 1 + random().nextInt(10000));
+ Directory dir = newDirectory();
+ final RandomIndexWriter w = new RandomIndexWriter(random(), dir);
+ final SearcherFactory searcherFactory = new SearcherFactory() {
+ @Override
+ public IndexSearcher newSearcher(IndexReader reader) throws IOException {
+ IndexSearcher searcher = new IndexSearcher(reader);
+ searcher.setQueryCachingPolicy(MAYBE_CACHE_POLICY);
+ searcher.setQueryCache(queryCache);
+ return searcher;
+ }
+ };
+ final boolean applyDeletes = random().nextBoolean();
+ final SearcherManager mgr = new SearcherManager(w.w, applyDeletes, searcherFactory);
+ final AtomicBoolean indexing = new AtomicBoolean(true);
+ final AtomicReference<Throwable> error = new AtomicReference<>();
+ final int numDocs = atLeast(10000);
+ Thread[] threads = new Thread[3];
+ threads[0] = new Thread() {
+ public void run() {
+ Document doc = new Document();
+ StringField f = new StringField("color", "", Store.NO);
+ doc.add(f);
+ for (int i = 0; indexing.get() && i < numDocs; ++i) {
+ f.setStringValue(RandomPicks.randomFrom(random(), new String[] {"blue", "red", "yellow"}));
+ try {
+ w.addDocument(doc);
+ if ((i & 63) == 0) {
+ mgr.maybeRefresh();
+ if (rarely()) {
+ queryCache.clear();
+ }
+ if (rarely()) {
+ final String color = RandomPicks.randomFrom(random(), new String[] {"blue", "red", "yellow"});
+ w.deleteDocuments(new Term("color", color));
+ }
+ }
+ } catch (Throwable t) {
+ error.compareAndSet(null, t);
+ break;
+ }
+ }
+ indexing.set(false);
+ }
+ };
+ for (int i = 1; i < threads.length; ++i) {
+ threads[i] = new Thread() {
+ @Override
+ public void run() {
+ while (indexing.get()) {
+ try {
+ final IndexSearcher searcher = mgr.acquire();
+ try {
+ final String value = RandomPicks.randomFrom(random(), new String[] {"blue", "red", "yellow", "green"});
+ final Query q = new TermQuery(new Term("color", value));
+ TotalHitCountCollector collector = new TotalHitCountCollector();
+ searcher.search(q, collector); // will use the cache
+ final int totalHits1 = collector.getTotalHits();
+ final int totalHits2 = searcher.search(q, 1).totalHits; // will not use the cache because of scores
+ assertEquals(totalHits2, totalHits1);
+ } finally {
+ mgr.release(searcher);
+ }
+ } catch (Throwable t) {
+ error.compareAndSet(null, t);
+ }
+ }
+ }
+ };
+ }
+
+ for (Thread thread : threads) {
+ thread.start();
+ }
+
+ for (Thread thread : threads) {
+ thread.join();
+ }
+
+ if (error.get() != null) {
+ throw error.get();
+ }
+ queryCache.assertConsistent();
+ mgr.close();
+ w.close();
+ dir.close();
+ queryCache.assertConsistent();
+ }
+
+ public void testLRUEviction() throws Exception {
+ Directory dir = newDirectory();
+ final RandomIndexWriter w = new RandomIndexWriter(random(), dir);
+
+ Document doc = new Document();
+ StringField f = new StringField("color", "blue", Store.NO);
+ doc.add(f);
+ w.addDocument(doc);
+ f.setStringValue("red");
+ w.addDocument(doc);
+ f.setStringValue("green");
+ w.addDocument(doc);
+ final DirectoryReader reader = w.getReader();
+ final IndexSearcher searcher = newSearcher(reader);
+ final LRUQueryCache queryCache = new LRUQueryCache(2, 100000);
+
+ final Query blue = new TermQuery(new Term("color", "blue"));
+ final Query red = new TermQuery(new Term("color", "red"));
+ final Query green = new TermQuery(new Term("color", "green"));
+
+ assertEquals(Collections.emptyList(), queryCache.cachedQueries());
+
+ searcher.setQueryCache(queryCache);
+ // the filter is not cached on any segment: no changes
+ searcher.setQueryCachingPolicy(NEVER_CACHE);
+ searcher.search(new ConstantScoreQuery(green), 1);
+ assertEquals(Collections.emptyList(), queryCache.cachedQueries());
+
+ searcher.setQueryCachingPolicy(QueryCachingPolicy.ALWAYS_CACHE);
+ searcher.search(new ConstantScoreQuery(red), 1);
+ assertEquals(Collections.singletonList(red), queryCache.cachedQueries());
+
+ searcher.search(new ConstantScoreQuery(green), 1);
+ assertEquals(Arrays.asList(red, green), queryCache.cachedQueries());
+
+ searcher.search(new ConstantScoreQuery(red), 1);
+ assertEquals(Arrays.asList(green, red), queryCache.cachedQueries());
+
+ searcher.search(new ConstantScoreQuery(blue), 1);
+ assertEquals(Arrays.asList(red, blue), queryCache.cachedQueries());
+
+ searcher.search(new ConstantScoreQuery(blue), 1);
+ assertEquals(Arrays.asList(red, blue), queryCache.cachedQueries());
+
+ searcher.search(new ConstantScoreQuery(green), 1);
+ assertEquals(Arrays.asList(blue, green), queryCache.cachedQueries());
+
+ searcher.setQueryCachingPolicy(NEVER_CACHE);
+ searcher.search(new ConstantScoreQuery(red), 1);
+ assertEquals(Arrays.asList(blue, green), queryCache.cachedQueries());
+
+ reader.close();
+ w.close();
+ dir.close();
+ }
+
+ public void testClearFilter() throws IOException {
+ Directory dir = newDirectory();
+ final RandomIndexWriter w = new RandomIndexWriter(random(), dir);
+
+ Document doc = new Document();
+ StringField f = new StringField("color", "", Store.NO);
+ doc.add(f);
+ final int numDocs = atLeast(10);
+ for (int i = 0; i < numDocs; ++i) {
+ f.setStringValue(random().nextBoolean() ? "red" : "blue");
+ w.addDocument(doc);
+ }
+ final DirectoryReader reader = w.getReader();
+ final IndexSearcher searcher = newSearcher(reader);
+
+ final Query query1 = new TermQuery(new Term("color", "blue"));
+ query1.setBoost(random().nextFloat());
+ // different instance yet equal
+ final Query query2 = new TermQuery(new Term("color", "blue"));
+ query2.setBoost(random().nextFloat());
+
+ final LRUQueryCache queryCache = new LRUQueryCache(Integer.MAX_VALUE, Long.MAX_VALUE);
+ searcher.setQueryCache(queryCache);
+ searcher.setQueryCachingPolicy(QueryCachingPolicy.ALWAYS_CACHE);
+
+ searcher.search(new ConstantScoreQuery(query1), 1);
+ assertEquals(1, queryCache.cachedQueries().size());
+
+ queryCache.clearQuery(query2);
+
+ assertTrue(queryCache.cachedQueries().isEmpty());
+ queryCache.assertConsistent();
+
+ reader.close();
+ w.close();
+ dir.close();
+ }
+
+ // This test makes sure that by making the same assumptions as LRUQueryCache, RAMUsageTester
+ // computes the same memory usage.
+ public void testRamBytesUsedAgreesWithRamUsageTester() throws IOException {
+ final LRUQueryCache queryCache = new LRUQueryCache(1 + random().nextInt(5), 1 + random().nextInt(10000));
+ // an accumulator that only sums up memory usage of referenced filters and doc id sets
+ final RamUsageTester.Accumulator acc = new RamUsageTester.Accumulator() {
+ @Override
+ public long accumulateObject(Object o, long shallowSize, Map<Field,Object> fieldValues, Collection<Object> queue) {
+ if (o instanceof DocIdSet) {
+ return ((DocIdSet) o).ramBytesUsed();
+ }
+ if (o instanceof Query) {
+ return queryCache.ramBytesUsed((Query) o);
+ }
+ if (o.getClass().getSimpleName().equals("SegmentCoreReaders")) {
+ // do not take core cache keys into account
+ return 0;
+ }
+ if (o instanceof Map) {
+ Map<?,?> map = (Map<?,?>) o;
+ queue.addAll(map.keySet());
+ queue.addAll(map.values());
+ final long sizePerEntry = o instanceof LinkedHashMap
+ ? LRUQueryCache.LINKED_HASHTABLE_RAM_BYTES_PER_ENTRY
+ : LRUQueryCache.HASHTABLE_RAM_BYTES_PER_ENTRY;
+ return sizePerEntry * map.size();
+ }
+ // follow links to other objects, but ignore their memory usage
+ super.accumulateObject(o, shallowSize, fieldValues, queue);
+ return 0;
+ }
+ @Override
+ public long accumulateArray(Object array, long shallowSize, List<Object> values, Collection<Object> queue) {
+ // follow links to other objects, but ignore their memory usage
+ super.accumulateArray(array, shallowSize, values, queue);
+ return 0;
+ }
+ };
+
+ Directory dir = newDirectory();
+ // serial merges so that segments do not get closed while we are measuring ram usage
+ // with RamUsageTester
+ IndexWriterConfig iwc = newIndexWriterConfig().setMergeScheduler(new SerialMergeScheduler());
+ final RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc);
+
+ final List<String> colors = Arrays.asList("blue", "red", "green", "yellow");
+
+ Document doc = new Document();
+ StringField f = new StringField("color", "", Store.NO);
+ doc.add(f);
+ final int iters = atLeast(5);
+ for (int iter = 0; iter < iters; ++iter) {
+ final int numDocs = atLeast(10);
+ for (int i = 0; i < numDocs; ++i) {
+ f.setStringValue(RandomPicks.randomFrom(random(), colors));
+ w.addDocument(doc);
+ }
+ try (final DirectoryReader reader = w.getReader()) {
+ final IndexSearcher searcher = newSearcher(reader);
+ searcher.setQueryCache(queryCache);
+ searcher.setQueryCachingPolicy(MAYBE_CACHE_POLICY);
+ for (int i = 0; i < 3; ++i) {
+ final Query query = new TermQuery(new Term("color", RandomPicks.randomFrom(random(), colors)));
+ searcher.search(new ConstantScoreQuery(query), 1);
+ }
+ }
+ queryCache.assertConsistent();
+ assertEquals(RamUsageTester.sizeOf(queryCache, acc), queryCache.ramBytesUsed());
+ }
+
+ w.close();
+ dir.close();
+ }
+
+ /** A query that doesn't match anything */
+ private static class DummyQuery extends Query {
+
+ private static int COUNTER = 0;
+ private final int id;
+
+ DummyQuery() {
+ id = COUNTER++;
+ }
+
+ @Override
+ public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
+ return new ConstantScoreWeight(this) {
+ @Override
+ Scorer scorer(LeafReaderContext context, Bits acceptDocs, float score) throws IOException {
+ return null;
+ }
+ };
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (obj instanceof DummyQuery == false) {
+ return false;
+ }
+ return id == ((DummyQuery) obj).id;
+ }
+
+ @Override
+ public int hashCode() {
+ return id;
+ }
+
+ @Override
+ public String toString(String field) {
+ return "DummyQuery";
+ }
+
+ }
+
+ // Test what happens when the cache contains only filters and doc id sets
+ // that require very little memory. In that case most of the memory is taken
+ // by the cache itself, not cache entries, and we want to make sure that
+ // memory usage is not grossly underestimated.
+ public void testRamBytesUsedConstantEntryOverhead() throws IOException {
+ final LRUQueryCache queryCache = new LRUQueryCache(1000000, 10000000);
+
+ final RamUsageTester.Accumulator acc = new RamUsageTester.Accumulator() {
+ @Override
+ public long accumulateObject(Object o, long shallowSize, Map<Field,Object> fieldValues, Collection<Object> queue) {
+ if (o instanceof DocIdSet) {
+ return ((DocIdSet) o).ramBytesUsed();
+ }
+ if (o instanceof Query) {
+ return queryCache.ramBytesUsed((Query) o);
+ }
+ if (o.getClass().getSimpleName().equals("SegmentCoreReaders")) {
+ // do not follow references to core cache keys
+ return 0;
+ }
+ return super.accumulateObject(o, shallowSize, fieldValues, queue);
+ }
+ };
+
+ Directory dir = newDirectory();
+ final RandomIndexWriter w = new RandomIndexWriter(random(), dir);
+ Document doc = new Document();
+ final int numDocs = atLeast(100);
+ for (int i = 0; i < numDocs; ++i) {
+ w.addDocument(doc);
+ }
+ final DirectoryReader reader = w.getReader();
+ final IndexSearcher searcher = new IndexSearcher(reader);
+ searcher.setQueryCache(queryCache);
+ searcher.setQueryCachingPolicy(QueryCachingPolicy.ALWAYS_CACHE);
+
+ final int numQueries = atLeast(1000);
+ for (int i = 0; i < numQueries; ++i) {
+ final Query query = new DummyQuery();
+ searcher.search(new ConstantScoreQuery(query), 1);
+ }
+ assertTrue(queryCache.getCacheCount() > 0);
+
+ final long actualRamBytesUsed = RamUsageTester.sizeOf(queryCache, acc);
+ final long expectedRamBytesUsed = queryCache.ramBytesUsed();
+ // error < 30%
+ assertEquals(actualRamBytesUsed, expectedRamBytesUsed, 30 * actualRamBytesUsed / 100);
+
+ reader.close();
+ w.close();
+ dir.close();
+ }
+
+ public void testOnUse() throws IOException {
+ final LRUQueryCache queryCache = new LRUQueryCache(1 + random().nextInt(5), 1 + random().nextInt(1000));
+
+ Directory dir = newDirectory();
+ final RandomIndexWriter w = new RandomIndexWriter(random(), dir);
+
+ Document doc = new Document();
+ StringField f = new StringField("color", "", Store.NO);
+ doc.add(f);
+ final int numDocs = atLeast(10);
+ for (int i = 0; i < numDocs; ++i) {
+ f.setStringValue(RandomPicks.randomFrom(random(), Arrays.asList("red", "blue", "green", "yellow")));
+ w.addDocument(doc);
+ if (random().nextBoolean()) {
+ w.getReader().close();
+ }
+ }
+ final DirectoryReader reader = w.getReader();
+ final IndexSearcher searcher = new IndexSearcher(reader);
+
+ final Map<Query, Integer> actualCounts = new HashMap<>();
+ final Map<Query, Integer> expectedCounts = new HashMap<>();
+
+ final QueryCachingPolicy countingPolicy = new QueryCachingPolicy() {
+
+ @Override
+ public boolean shouldCache(Query query, LeafReaderContext context) throws IOException {
+ return random().nextBoolean();
+ }
+
+ @Override
+ public void onUse(Query query) {
+ expectedCounts.put(query, 1 + expectedCounts.getOrDefault(query, 0));
+ }
+ };
+
+ Query[] queries = new Query[10 + random().nextInt(10)];
+ for (int i = 0; i < queries.length; ++i) {
+ queries[i] = new TermQuery(new Term("color", RandomPicks.randomFrom(random(), Arrays.asList("red", "blue", "green", "yellow"))));
+ queries[i].setBoost(random().nextFloat());
+ }
+
+ searcher.setQueryCache(queryCache);
+ searcher.setQueryCachingPolicy(countingPolicy);
+ for (int i = 0; i < 20; ++i) {
+ final int idx = random().nextInt(queries.length);
+ searcher.search(new ConstantScoreQuery(queries[idx]), 1);
+ actualCounts.put(queries[idx], 1 + actualCounts.getOrDefault(queries[idx], 0));
+ }
+
+ assertEquals(actualCounts, expectedCounts);
+
+ reader.close();
+ w.close();
+ dir.close();
+ }
+
+ public void testStats() throws IOException {
+ final LRUQueryCache queryCache = new LRUQueryCache(1, 10000000);
+
+ Directory dir = newDirectory();
+ final RandomIndexWriter w = new RandomIndexWriter(random(), dir);
+
+ final List<String> colors = Arrays.asList("blue", "red", "green", "yellow");
+
+ Document doc = new Document();
+ StringField f = new StringField("color", "", Store.NO);
+ doc.add(f);
+ for (int i = 0; i < 10; ++i) {
+ f.setStringValue(RandomPicks.randomFrom(random(), colors));
+ w.addDocument(doc);
+ if (random().nextBoolean()) {
+ w.getReader().close();
+ }
+ }
+
+ final DirectoryReader reader = w.getReader();
+ final int segmentCount = reader.leaves().size();
+ final IndexSearcher searcher = new IndexSearcher(reader);
+ final Query query = new TermQuery(new Term("color", "red"));
+ final Query query2 = new TermQuery(new Term("color", "blue"));
+
+ searcher.setQueryCache(queryCache);
+ // first pass, lookups without caching that all miss
+ searcher.setQueryCachingPolicy(NEVER_CACHE);
+ for (int i = 0; i < 10; ++i) {
+ searcher.search(new ConstantScoreQuery(query), 1);
+ }
+ assertEquals(10 * segmentCount, queryCache.getTotalCount());
+ assertEquals(0, queryCache.getHitCount());
+ assertEquals(10 * segmentCount, queryCache.getMissCount());
+ assertEquals(0, queryCache.getCacheCount());
+ assertEquals(0, queryCache.getEvictionCount());
+ assertEquals(0, queryCache.getCacheSize());
+
+ // second pass, lookups + caching, only the first one is a miss
+ searcher.setQueryCachingPolicy(QueryCachingPolicy.ALWAYS_CACHE);
+ for (int i = 0; i < 10; ++i) {
+ searcher.search(new ConstantScoreQuery(query), 1);
+ }
+ assertEquals(20 * segmentCount, queryCache.getTotalCount());
+ assertEquals(9 * segmentCount, queryCache.getHitCount());
+ assertEquals(11 * segmentCount, queryCache.getMissCount());
+ assertEquals(1 * segmentCount, queryCache.getCacheCount());
+ assertEquals(0, queryCache.getEvictionCount());
+ assertEquals(1 * segmentCount, queryCache.getCacheSize());
+
+ // third pass lookups without caching, we only have hits
+ searcher.setQueryCachingPolicy(NEVER_CACHE);
+ for (int i = 0; i < 10; ++i) {
+ searcher.search(new ConstantScoreQuery(query), 1);
+ }
+ assertEquals(30 * segmentCount, queryCache.getTotalCount());
+ assertEquals(19 * segmentCount, queryCache.getHitCount());
+ assertEquals(11 * segmentCount, queryCache.getMissCount());
+ assertEquals(1 * segmentCount, queryCache.getCacheCount());
+ assertEquals(0, queryCache.getEvictionCount());
+ assertEquals(1 * segmentCount, queryCache.getCacheSize());
+
+ // fourth pass with a different filter which will trigger evictions since the size is 1
+ searcher.setQueryCachingPolicy(QueryCachingPolicy.ALWAYS_CACHE);
+ for (int i = 0; i < 10; ++i) {
+ searcher.search(new ConstantScoreQuery(query2), 1);
+ }
+ assertEquals(40 * segmentCount, queryCache.getTotalCount());
+ assertEquals(28 * segmentCount, queryCache.getHitCount());
+ assertEquals(12 * segmentCount, queryCache.getMissCount());
+ assertEquals(2 * segmentCount, queryCache.getCacheCount());
+ assertEquals(1 * segmentCount, queryCache.getEvictionCount());
+ assertEquals(1 * segmentCount, queryCache.getCacheSize());
+
+ // now close, causing evictions due to the closing of segment cores
+ reader.close();
+ w.close();
+ assertEquals(40 * segmentCount, queryCache.getTotalCount());
+ assertEquals(28 * segmentCount, queryCache.getHitCount());
+ assertEquals(12 * segmentCount, queryCache.getMissCount());
+ assertEquals(2 * segmentCount, queryCache.getCacheCount());
+ assertEquals(2 * segmentCount, queryCache.getEvictionCount());
+ assertEquals(0, queryCache.getCacheSize());
+
+ dir.close();
+ }
+
+ public void testFineGrainedStats() throws IOException {
+ Directory dir1 = newDirectory();
+ final RandomIndexWriter w1 = new RandomIndexWriter(random(), dir1);
+ Directory dir2 = newDirectory();
+ final RandomIndexWriter w2 = new RandomIndexWriter(random(), dir2);
+
+ final List<String> colors = Arrays.asList("blue", "red", "green", "yellow");
+
+ Document doc = new Document();
+ StringField f = new StringField("color", "", Store.NO);
+ doc.add(f);
+ for (RandomIndexWriter w : Arrays.asList(w1, w2)) {
+ for (int i = 0; i < 10; ++i) {
+ f.setStringValue(RandomPicks.randomFrom(random(), colors));
+ w.addDocument(doc);
+ if (random().nextBoolean()) {
+ w.getReader().close();
+ }
+ }
+ }
+
+ final DirectoryReader reader1 = w1.getReader();
+ final int segmentCount1 = reader1.leaves().size();
+ final IndexSearcher searcher1 = new IndexSearcher(reader1);
+
+ final DirectoryReader reader2 = w2.getReader();
+ final int segmentCount2 = reader2.leaves().size();
+ final IndexSearcher searcher2 = new IndexSearcher(reader2);
+
+ final Map<Object, Integer> indexId = new HashMap<>();
+ for (LeafReaderContext ctx : reader1.leaves()) {
+ indexId.put(ctx.reader().getCoreCacheKey(), 1);
+ }
+ for (LeafReaderContext ctx : reader2.leaves()) {
+ indexId.put(ctx.reader().getCoreCacheKey(), 2);
+ }
+
+ final AtomicLong hitCount1 = new AtomicLong();
+ final AtomicLong hitCount2 = new AtomicLong();
+ final AtomicLong missCount1 = new AtomicLong();
+ final AtomicLong missCount2 = new AtomicLong();
+
+ final AtomicLong ramBytesUsage = new AtomicLong();
+ final AtomicLong cacheSize = new AtomicLong();
+
+ final LRUQueryCache queryCache = new LRUQueryCache(2, 10000000) {
+ @Override
+ protected void onHit(Object readerCoreKey, Query query) {
+ super.onHit(readerCoreKey, query);
+ switch(indexId.get(readerCoreKey).intValue()) {
+ case 1:
+ hitCount1.incrementAndGet();
+ break;
+ case 2:
+ hitCount2.incrementAndGet();
+ break;
+ default:
+ throw new AssertionError();
+ }
+ }
+
+ @Override
+ protected void onMiss(Object readerCoreKey, Query query) {
+ super.onMiss(readerCoreKey, query);
+ switch(indexId.get(readerCoreKey).intValue()) {
+ case 1:
+ missCount1.incrementAndGet();
+ break;
+ case 2:
+ missCount2.incrementAndGet();
+ break;
+ default:
+ throw new AssertionError();
+ }
+ }
+
+ @Override
+ protected void onQueryCache(Query query, long ramBytesUsed) {
+ super.onQueryCache(query, ramBytesUsed);
+ ramBytesUsage.addAndGet(ramBytesUsed);
+ }
+
+ @Override
+ protected void onQueryEviction(Query query, long ramBytesUsed) {
+ super.onQueryEviction(query, ramBytesUsed);
+ ramBytesUsage.addAndGet(-ramBytesUsed);
+ }
+
+ @Override
+ protected void onDocIdSetCache(Object readerCoreKey, long ramBytesUsed) {
+ super.onDocIdSetCache(readerCoreKey, ramBytesUsed);
+ ramBytesUsage.addAndGet(ramBytesUsed);
+ cacheSize.incrementAndGet();
+ }
+
+ @Override
+ protected void onDocIdSetEviction(Object readerCoreKey, int numEntries, long sumRamBytesUsed) {
+ super.onDocIdSetEviction(readerCoreKey, numEntries, sumRamBytesUsed);
+ ramBytesUsage.addAndGet(-sumRamBytesUsed);
+ cacheSize.addAndGet(-numEntries);
+ }
+
+ @Override
+ protected void onClear() {
+ super.onClear();
+ ramBytesUsage.set(0);
+ cacheSize.set(0);
+ }
+ };
+
+ final Query query = new TermQuery(new Term("color", "red"));
+ final Query query2 = new TermQuery(new Term("color", "blue"));
+ final Query query3 = new TermQuery(new Term("color", "green"));
+
+ for (IndexSearcher searcher : Arrays.asList(searcher1, searcher2)) {
+ searcher.setQueryCache(queryCache);
+ searcher.setQueryCachingPolicy(QueryCachingPolicy.ALWAYS_CACHE);
+ }
+
+ // search on searcher1
+ for (int i = 0; i < 10; ++i) {
+ searcher1.search(new ConstantScoreQuery(query), 1);
+ }
+ assertEquals(9 * segmentCount1, hitCount1.longValue());
+ assertEquals(0, hitCount2.longValue());
+ assertEquals(segmentCount1, missCount1.longValue());
+ assertEquals(0, missCount2.longValue());
+
+ // then on searcher2
+ for (int i = 0; i < 20; ++i) {
+ searcher2.search(new ConstantScoreQuery(query2), 1);
+ }
+ assertEquals(9 * segmentCount1, hitCount1.longValue());
+ assertEquals(19 * segmentCount2, hitCount2.longValue());
+ assertEquals(segmentCount1, missCount1.longValue());
+ assertEquals(segmentCount2, missCount2.longValue());
+
+ // now on searcher1 again to trigger evictions
+ for (int i = 0; i < 30; ++i) {
+ searcher1.search(new ConstantScoreQuery(query3), 1);
+ }
+ assertEquals(segmentCount1, queryCache.getEvictionCount());
+ assertEquals(38 * segmentCount1, hitCount1.longValue());
+ assertEquals(19 * segmentCount2, hitCount2.longValue());
+ assertEquals(2 * segmentCount1, missCount1.longValue());
+ assertEquals(segmentCount2, missCount2.longValue());
+
+ // check that the recomputed stats are the same as those reported by the cache
+ assertEquals(queryCache.ramBytesUsed(), (segmentCount1 + segmentCount2) * LRUQueryCache.HASHTABLE_RAM_BYTES_PER_ENTRY + ramBytesUsage.longValue());
+ assertEquals(queryCache.getCacheSize(), cacheSize.longValue());
+
+ reader1.close();
+ reader2.close();
+ w1.close();
+ w2.close();
+
+ assertEquals(queryCache.ramBytesUsed(), ramBytesUsage.longValue());
+ assertEquals(0, cacheSize.longValue());
+
+ queryCache.clear();
+ assertEquals(0, ramBytesUsage.longValue());
+ assertEquals(0, cacheSize.longValue());
+
+ dir1.close();
+ dir2.close();
+ }
+
+ public void testUseRewrittenQueryAsCacheKey() throws IOException {
+ final Query expectedCacheKey = new TermQuery(new Term("foo", "bar"));
+ final BooleanQuery query = new BooleanQuery();
+ final Query sub = expectedCacheKey.clone();
+ sub.setBoost(42);
+ query.add(sub, Occur.MUST);
+
+ final LRUQueryCache queryCache = new LRUQueryCache(1000000, 10000000);
+ Directory dir = newDirectory();
+ final RandomIndexWriter w = new RandomIndexWriter(random(), dir);
+ Document doc = new Document();
+ doc.add(new StringField("foo", "bar", Store.YES));
+ w.addDocument(doc);
+ w.commit();
+ final IndexReader reader = w.getReader();
+ final IndexSearcher searcher = newSearcher(reader);
+ w.close();
+
+ final QueryCachingPolicy policy = new QueryCachingPolicy() {
+
+ @Override
+ public boolean shouldCache(Query query, LeafReaderContext context) throws IOException {
+ assertEquals(expectedCacheKey, QueryCache.cacheKey(query));
+ return true;
+ }
+
+ @Override
+ public void onUse(Query query) {
+ assertEquals(expectedCacheKey, QueryCache.cacheKey(query));
+ }
+ };
+
+ searcher.setQueryCache(queryCache);
+ searcher.setQueryCachingPolicy(policy);
+ searcher.search(query, new TotalHitCountCollector());
+
+ reader.close();
+ dir.close();
+ }
+
+ public void testBooleanQueryCachesSubClauses() throws IOException {
+ Directory dir = newDirectory();
+ final RandomIndexWriter w = new RandomIndexWriter(random(), dir);
+ Document doc = new Document();
+ doc.add(new StringField("foo", "bar", Store.YES));
+ w.addDocument(doc);
+ w.commit();
+ final IndexReader reader = w.getReader();
+ final IndexSearcher searcher = newSearcher(reader);
+ w.close();
+
+ final LRUQueryCache queryCache = new LRUQueryCache(1000000, 10000000);
+ searcher.setQueryCache(queryCache);
+ searcher.setQueryCachingPolicy(QueryCachingPolicy.ALWAYS_CACHE);
+
+ BooleanQuery bq = new BooleanQuery();
+ TermQuery should = new TermQuery(new Term("foo", "baz"));
+ TermQuery must = new TermQuery(new Term("foo", "bar"));
+ TermQuery filter = new TermQuery(new Term("foo", "bar"));
+ TermQuery mustNot = new TermQuery(new Term("foo", "foo"));
+ bq.add(should, Occur.SHOULD);
+ bq.add(must, Occur.MUST);
+ bq.add(filter, Occur.FILTER);
+ bq.add(mustNot, Occur.MUST_NOT);
+
+ // same bq but with FILTER instead of MUST
+ BooleanQuery bq2 = new BooleanQuery();
+ bq2.add(should, Occur.SHOULD);
+ bq2.add(must, Occur.FILTER);
+ bq2.add(filter, Occur.FILTER);
+ bq2.add(mustNot, Occur.MUST_NOT);
+
+ assertEquals(Collections.emptySet(), new HashSet<>(queryCache.cachedQueries()));
+ searcher.search(bq, 1);
+ assertEquals(new HashSet<>(Arrays.asList(filter, mustNot)), new HashSet<>(queryCache.cachedQueries()));
+
+ queryCache.clear();
+ assertEquals(Collections.emptySet(), new HashSet<>(queryCache.cachedQueries()));
+ searcher.search(new ConstantScoreQuery(bq), 1);
+ assertEquals(new HashSet<>(Arrays.asList(bq2, should, must, filter, mustNot)), new HashSet<>(queryCache.cachedQueries()));
+
+ reader.close();
+ dir.close();
+ }
+
+}
Modified: lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/search/TestPrefixRandom.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/search/TestPrefixRandom.java?rev=1662774&r1=1662773&r2=1662774&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/search/TestPrefixRandom.java (original)
+++ lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/search/TestPrefixRandom.java Fri Feb 27 18:09:01 2015
@@ -107,6 +107,15 @@ public class TestPrefixRandom extends Lu
public String toString(String field) {
return field.toString() + ":" + prefix.toString();
}
+
+ @Override
+ public boolean equals(Object obj) {
+ if (super.equals(obj) == false) {
+ return false;
+ }
+ final DumbPrefixQuery that = (DumbPrefixQuery) obj;
+ return prefix.equals(that.prefix);
+ }
}
/** test a bunch of random prefixes */
Modified: lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/search/TestQueryWrapperFilter.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/search/TestQueryWrapperFilter.java?rev=1662774&r1=1662773&r2=1662774&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/search/TestQueryWrapperFilter.java (original)
+++ lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/search/TestQueryWrapperFilter.java Fri Feb 27 18:09:01 2015
@@ -25,15 +25,38 @@ import org.apache.lucene.document.Field;
import org.apache.lucene.document.Field.Store;
import org.apache.lucene.document.StringField;
import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.util.Bits;
import org.apache.lucene.util.English;
import org.apache.lucene.util.LuceneTestCase;
public class TestQueryWrapperFilter extends LuceneTestCase {
+ // a filter for which other queries don't have special rewrite rules
+ private static class FilterWrapper extends Filter {
+
+ private final Filter in;
+
+ FilterWrapper(Filter in) {
+ this.in = in;
+ }
+
+ @Override
+ public DocIdSet getDocIdSet(LeafReaderContext context, Bits acceptDocs) throws IOException {
+ return in.getDocIdSet(context, acceptDocs);
+ }
+
+ @Override
+ public String toString(String field) {
+ return in.toString(field);
+ }
+
+ }
+
public void testBasic() throws Exception {
Directory dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
@@ -51,7 +74,7 @@ public class TestQueryWrapperFilter exte
IndexSearcher searcher = newSearcher(reader);
TopDocs hits = searcher.search(new FilteredQuery(new MatchAllDocsQuery(), qwf), 10);
assertEquals(1, hits.totalHits);
- hits = searcher.search(new FilteredQuery(new MatchAllDocsQuery(), new CachingWrapperFilter(qwf)), 10);
+ hits = searcher.search(new FilteredQuery(new MatchAllDocsQuery(), new FilterWrapper(qwf)), 10);
assertEquals(1, hits.totalHits);
// should not throw exception with complex primitive query
@@ -63,7 +86,7 @@ public class TestQueryWrapperFilter exte
hits = searcher.search(new FilteredQuery(new MatchAllDocsQuery(), qwf), 10);
assertEquals(1, hits.totalHits);
- hits = searcher.search(new FilteredQuery(new MatchAllDocsQuery(), new CachingWrapperFilter(qwf)), 10);
+ hits = searcher.search(new FilteredQuery(new MatchAllDocsQuery(), new FilterWrapper(qwf)), 10);
assertEquals(1, hits.totalHits);
// should not throw exception with non primitive Query (doesn't implement
@@ -72,7 +95,7 @@ public class TestQueryWrapperFilter exte
hits = searcher.search(new FilteredQuery(new MatchAllDocsQuery(), qwf), 10);
assertEquals(1, hits.totalHits);
- hits = searcher.search(new FilteredQuery(new MatchAllDocsQuery(), new CachingWrapperFilter(qwf)), 10);
+ hits = searcher.search(new FilteredQuery(new MatchAllDocsQuery(), new FilterWrapper(qwf)), 10);
assertEquals(1, hits.totalHits);
// test a query with no hits
@@ -80,7 +103,7 @@ public class TestQueryWrapperFilter exte
qwf = new QueryWrapperFilter(termQuery);
hits = searcher.search(new FilteredQuery(new MatchAllDocsQuery(), qwf), 10);
assertEquals(0, hits.totalHits);
- hits = searcher.search(new FilteredQuery(new MatchAllDocsQuery(), new CachingWrapperFilter(qwf)), 10);
+ hits = searcher.search(new FilteredQuery(new MatchAllDocsQuery(), new FilterWrapper(qwf)), 10);
assertEquals(0, hits.totalHits);
reader.close();
dir.close();
Modified: lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/search/TestRegexpRandom2.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/search/TestRegexpRandom2.java?rev=1662774&r1=1662773&r2=1662774&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/search/TestRegexpRandom2.java (original)
+++ lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/search/TestRegexpRandom2.java Fri Feb 27 18:09:01 2015
@@ -138,6 +138,15 @@ public class TestRegexpRandom2 extends L
public String toString(String field) {
return field.toString() + automaton.toString();
}
+
+ @Override
+ public boolean equals(Object obj) {
+ if (super.equals(obj) == false) {
+ return false;
+ }
+ final DumbRegexpQuery that = (DumbRegexpQuery) obj;
+ return automaton.equals(that.automaton);
+ }
}
/** test a bunch of random regular expressions */
@@ -146,7 +155,7 @@ public class TestRegexpRandom2 extends L
for (int i = 0; i < num; i++) {
String reg = AutomatonTestUtil.randomRegexp(random());
if (VERBOSE) {
- System.out.println("TEST: regexp=" + reg);
+ System.out.println("TEST: regexp='" + reg + "'");
}
assertSame(reg);
}
Modified: lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/search/TestUsageTrackingFilterCachingPolicy.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/search/TestUsageTrackingFilterCachingPolicy.java?rev=1662774&r1=1662773&r2=1662774&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/search/TestUsageTrackingFilterCachingPolicy.java (original)
+++ lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/search/TestUsageTrackingFilterCachingPolicy.java Fri Feb 27 18:09:01 2015
@@ -19,26 +19,26 @@ package org.apache.lucene.search;
import org.apache.lucene.index.Term;
import org.apache.lucene.util.LuceneTestCase;
-import org.apache.lucene.util.RoaringDocIdSet;
public class TestUsageTrackingFilterCachingPolicy extends LuceneTestCase {
- public void testCheapToCache() {
- assertTrue(UsageTrackingFilterCachingPolicy.isCheapToCache(null));
- assertTrue(UsageTrackingFilterCachingPolicy.isCheapToCache(DocIdSet.EMPTY));
- assertTrue(UsageTrackingFilterCachingPolicy.isCheapToCache(new RoaringDocIdSet.Builder(5).add(3).build()));
- assertFalse(UsageTrackingFilterCachingPolicy.isCheapToCache(new DocValuesDocIdSet(5, null) {
- @Override
- protected boolean matchDoc(int doc) {
- return false;
- }
- }));
+ public void testCostlyFilter() {
+ assertTrue(UsageTrackingQueryCachingPolicy.isCostly(new PrefixQuery(new Term("field", "prefix"))));
+ assertTrue(UsageTrackingQueryCachingPolicy.isCostly(NumericRangeQuery.newIntRange("intField", 8, 1, 1000, true, true)));
+ assertFalse(UsageTrackingQueryCachingPolicy.isCostly(new TermQuery(new Term("field", "value"))));
}
- public void testCostlyFilter() {
- assertTrue(UsageTrackingFilterCachingPolicy.isCostly(new QueryWrapperFilter(new PrefixQuery(new Term("field", "prefix")))));
- assertTrue(UsageTrackingFilterCachingPolicy.isCostly(new QueryWrapperFilter(NumericRangeQuery.newIntRange("intField", 8, 1, 1000, true, true))));
- assertFalse(UsageTrackingFilterCachingPolicy.isCostly(new QueryWrapperFilter(new TermQuery(new Term("field", "value")))));
+ public void testBoostIgnored() {
+ Query q1 = new TermQuery(new Term("foo", "bar"));
+ q1.setBoost(2);
+ Query q2 = q1.clone();
+ q2.setBoost(3);
+ Query q3 = q1.clone();
+ q3.setBoost(4);
+ UsageTrackingQueryCachingPolicy policy = new UsageTrackingQueryCachingPolicy();
+ policy.onUse(q1);
+ policy.onUse(q2);
+ assertEquals(2, policy.frequency(q3));
}
}
Modified: lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/util/TestNotDocIdSet.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/util/TestNotDocIdSet.java?rev=1662774&r1=1662773&r2=1662774&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/util/TestNotDocIdSet.java (original)
+++ lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/util/TestNotDocIdSet.java Fri Feb 27 18:09:01 2015
@@ -38,7 +38,6 @@ public class TestNotDocIdSet extends Bas
throws IOException {
super.assertEquals(numBits, ds1, ds2);
final Bits bits2 = ds2.bits();
- assertTrue(ds2.isCacheable()); // since we wrapped a FixedBitSet
assertNotNull(bits2); // since we wrapped a FixedBitSet
assertEquals(numBits, bits2.length());
for (int i = 0; i < numBits; ++i) {
Modified: lucene/dev/trunk/lucene/facet/src/java/org/apache/lucene/facet/DrillSideways.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/facet/src/java/org/apache/lucene/facet/DrillSideways.java?rev=1662774&r1=1662773&r2=1662774&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/facet/src/java/org/apache/lucene/facet/DrillSideways.java (original)
+++ lucene/dev/trunk/lucene/facet/src/java/org/apache/lucene/facet/DrillSideways.java Fri Feb 27 18:09:01 2015
@@ -26,6 +26,7 @@ import org.apache.lucene.facet.sortedset
import org.apache.lucene.facet.sortedset.SortedSetDocValuesReaderState;
import org.apache.lucene.facet.taxonomy.FastTaxonomyFacetCounts;
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
+import org.apache.lucene.search.FilterCollector;
import org.apache.lucene.search.LeafCollector;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
@@ -173,6 +174,16 @@ public class DrillSideways {
drillDownQueries[i-startClause] = clauses[i].getQuery();
}
DrillSidewaysQuery dsq = new DrillSidewaysQuery(baseQuery, drillDownCollector, drillSidewaysCollectors, drillDownQueries, scoreSubDocsAtOnce());
+ if (hitCollector.needsScores() == false) {
+ // this is a borrible hack in order to make sure IndexSearcher will not
+ // attempt to cache the DrillSidewaysQuery
+ hitCollector = new FilterCollector(hitCollector) {
+ @Override
+ public boolean needsScores() {
+ return true;
+ }
+ };
+ }
searcher.search(dsq, hitCollector);
return new DrillSidewaysResult(buildFacetsResult(drillDownCollector, drillSidewaysCollectors, drillDownDims.keySet().toArray(new String[drillDownDims.size()])), null);
Modified: lucene/dev/trunk/lucene/facet/src/java/org/apache/lucene/facet/DrillSidewaysQuery.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/facet/src/java/org/apache/lucene/facet/DrillSidewaysQuery.java?rev=1662774&r1=1662773&r2=1662774&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/facet/src/java/org/apache/lucene/facet/DrillSidewaysQuery.java (original)
+++ lucene/dev/trunk/lucene/facet/src/java/org/apache/lucene/facet/DrillSidewaysQuery.java Fri Feb 27 18:09:01 2015
@@ -33,8 +33,10 @@ import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Weight;
import org.apache.lucene.util.Bits;
/** Only purpose is to punch through and return a
- * DrillSidewaysScorer */
+ * DrillSidewaysScorer*/
+// TODO change the way DrillSidewaysScorer is used, this query does not work
+// with filter caching
class DrillSidewaysQuery extends Query {
final Query baseQuery;
final Collector drillDownCollector;
Modified: lucene/dev/trunk/lucene/facet/src/test/org/apache/lucene/facet/range/TestRangeFacetCounts.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/facet/src/test/org/apache/lucene/facet/range/TestRangeFacetCounts.java?rev=1662774&r1=1662773&r2=1662774&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/facet/src/test/org/apache/lucene/facet/range/TestRangeFacetCounts.java (original)
+++ lucene/dev/trunk/lucene/facet/src/test/org/apache/lucene/facet/range/TestRangeFacetCounts.java Fri Feb 27 18:09:01 2015
@@ -55,17 +55,18 @@ import org.apache.lucene.queries.functio
import org.apache.lucene.queries.function.valuesource.DoubleFieldSource;
import org.apache.lucene.queries.function.valuesource.FloatFieldSource;
import org.apache.lucene.queries.function.valuesource.LongFieldSource;
-import org.apache.lucene.search.CachingWrapperFilter;
+import org.apache.lucene.search.CachingWrapperQuery;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Filter;
-import org.apache.lucene.search.FilterCachingPolicy;
+import org.apache.lucene.search.QueryCachingPolicy;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.NumericRangeQuery;
import org.apache.lucene.search.QueryWrapperFilter;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BitDocIdSet;
+import org.apache.lucene.util.Bits;
import org.apache.lucene.util.FixedBitSet;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.TestUtil;
@@ -908,16 +909,19 @@ public class TestRangeFacetCounts extend
final AtomicBoolean filterWasUsed = new AtomicBoolean();
if (random().nextBoolean()) {
// Sort of silly:
- fastMatchFilter = new CachingWrapperFilter(new QueryWrapperFilter(new MatchAllDocsQuery()), FilterCachingPolicy.ALWAYS_CACHE) {
- @Override
- protected DocIdSet cacheImpl(DocIdSetIterator iterator, LeafReader reader)
+ final Filter in = new QueryWrapperFilter(new MatchAllDocsQuery());
+ fastMatchFilter = new Filter() {
+ @Override
+ public DocIdSet getDocIdSet(LeafReaderContext context, Bits acceptDocs)
throws IOException {
- final FixedBitSet cached = new FixedBitSet(reader.maxDoc());
- filterWasUsed.set(true);
- cached.or(iterator);
- return new BitDocIdSet(cached);
- }
- };
+ filterWasUsed.set(true);
+ return in.getDocIdSet(context, acceptDocs);
+ }
+ @Override
+ public String toString(String field) {
+ return in.toString(field);
+ }
+ };
} else {
fastMatchFilter = null;
}
Modified: lucene/dev/trunk/lucene/grouping/src/test/org/apache/lucene/search/grouping/GroupingSearchTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/grouping/src/test/org/apache/lucene/search/grouping/GroupingSearchTest.java?rev=1662774&r1=1662773&r2=1662774&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/grouping/src/test/org/apache/lucene/search/grouping/GroupingSearchTest.java (original)
+++ lucene/dev/trunk/lucene/grouping/src/test/org/apache/lucene/search/grouping/GroupingSearchTest.java Fri Feb 27 18:09:01 2015
@@ -28,7 +28,7 @@ import org.apache.lucene.index.RandomInd
import org.apache.lucene.index.Term;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.queries.function.valuesource.BytesRefFieldSource;
-import org.apache.lucene.search.CachingWrapperFilter;
+import org.apache.lucene.search.CachingWrapperQuery;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.QueryWrapperFilter;
@@ -159,7 +159,7 @@ public class GroupingSearchTest extends
assertEquals(1, group.scoreDocs.length);
assertEquals(6, group.scoreDocs[0].doc);
- Filter lastDocInBlock = new CachingWrapperFilter(new QueryWrapperFilter(new TermQuery(new Term("groupend", "x"))));
+ Filter lastDocInBlock = new QueryWrapperFilter(new TermQuery(new Term("groupend", "x")));
groupingSearch = new GroupingSearch(lastDocInBlock);
groups = groupingSearch.search(indexSearcher, new TermQuery(new Term("content", "random")), 0, 10);
Modified: lucene/dev/trunk/lucene/grouping/src/test/org/apache/lucene/search/grouping/TestGrouping.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/grouping/src/test/org/apache/lucene/search/grouping/TestGrouping.java?rev=1662774&r1=1662773&r2=1662774&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/grouping/src/test/org/apache/lucene/search/grouping/TestGrouping.java (original)
+++ lucene/dev/trunk/lucene/grouping/src/test/org/apache/lucene/search/grouping/TestGrouping.java Fri Feb 27 18:09:01 2015
@@ -51,7 +51,7 @@ import org.apache.lucene.index.Term;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.queries.function.valuesource.BytesRefFieldSource;
import org.apache.lucene.search.CachingCollector;
-import org.apache.lucene.search.CachingWrapperFilter;
+import org.apache.lucene.search.CachingWrapperQuery;
import org.apache.lucene.search.Collector;
import org.apache.lucene.search.FieldDoc;
import org.apache.lucene.search.Filter;
@@ -798,7 +798,7 @@ public class TestGrouping extends Lucene
// group, so we can use single pass collector
dirBlocks = newDirectory();
rBlocks = getDocBlockReader(dirBlocks, groupDocs);
- final Filter lastDocInBlock = new CachingWrapperFilter(new QueryWrapperFilter(new TermQuery(new Term("groupend", "x"))));
+ final Filter lastDocInBlock = new QueryWrapperFilter(new TermQuery(new Term("groupend", "x")));
final NumericDocValues docIDToIDBlocks = MultiDocValues.getNumericValues(rBlocks, "id");
assertNotNull(docIDToIDBlocks);
Modified: lucene/dev/trunk/lucene/join/src/java/org/apache/lucene/search/join/BitDocIdSetCachingWrapperFilter.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/join/src/java/org/apache/lucene/search/join/BitDocIdSetCachingWrapperFilter.java?rev=1662774&r1=1662773&r2=1662774&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/join/src/java/org/apache/lucene/search/join/BitDocIdSetCachingWrapperFilter.java (original)
+++ lucene/dev/trunk/lucene/join/src/java/org/apache/lucene/search/join/BitDocIdSetCachingWrapperFilter.java Fri Feb 27 18:09:01 2015
@@ -18,81 +18,82 @@ package org.apache.lucene.search.join;
*/
import java.io.IOException;
-import java.util.Collection;
+import java.util.Collections;
+import java.util.Map;
+import java.util.WeakHashMap;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext;
-import org.apache.lucene.search.CachingWrapperFilter;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Filter;
-import org.apache.lucene.search.FilterCachingPolicy;
-import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.BitDocIdSet;
/**
- * A filter wrapper that transforms the produces doc id sets into
- * {@link BitDocIdSet}s if necessary and caches them.
+ * {@link Filter} wrapper that implements {@link BitDocIdSetFilter}.
*/
-public class BitDocIdSetCachingWrapperFilter extends BitDocIdSetFilter implements Accountable {
-
- private final CachingWrapperFilter filter;
-
- /** Sole constructor. */
+public class BitDocIdSetCachingWrapperFilter extends BitDocIdSetFilter {
+ private final Filter filter;
+ private final Map<Object,DocIdSet> cache = Collections.synchronizedMap(new WeakHashMap<>());
+
+ /** Wraps another filter's result and caches it into bitsets.
+ * @param filter Filter to cache results of
+ */
public BitDocIdSetCachingWrapperFilter(Filter filter) {
- super();
- this.filter = new CachingWrapperFilter(filter, FilterCachingPolicy.ALWAYS_CACHE) {
- @Override
- protected BitDocIdSet docIdSetToCache(DocIdSet docIdSet, LeafReader reader) throws IOException {
- if (docIdSet == null || docIdSet instanceof BitDocIdSet) {
- // this is different from CachingWrapperFilter: even when the DocIdSet is
- // cacheable, we convert it to a BitSet since we require all the
- // cached filters to be BitSets
- return (BitDocIdSet) docIdSet;
- }
-
- final DocIdSetIterator it = docIdSet.iterator();
- if (it == null) {
- return null;
- }
- BitDocIdSet.Builder builder = new BitDocIdSet.Builder(reader.maxDoc());
- builder.or(it);
- return builder.build();
- }
- };
+ this.filter = filter;
}
- @Override
- public BitDocIdSet getDocIdSet(LeafReaderContext context) throws IOException {
- return (BitDocIdSet) filter.getDocIdSet(context, null);
+ /**
+ * Gets the contained filter.
+ * @return the contained filter.
+ */
+ public Filter getFilter() {
+ return filter;
+ }
+
+ private BitDocIdSet docIdSetToCache(DocIdSet docIdSet, LeafReader reader) throws IOException {
+ final DocIdSetIterator it = docIdSet.iterator();
+ if (it == null) {
+ return null;
+ } else {
+ BitDocIdSet.Builder builder = new BitDocIdSet.Builder(reader.maxDoc());
+ builder.or(it);
+ return builder.build();
+ }
}
-
+
@Override
- public int hashCode() {
- return getClass().hashCode() ^ filter.hashCode();
- }
+ public BitDocIdSet getDocIdSet(LeafReaderContext context) throws IOException {
+ final LeafReader reader = context.reader();
+ final Object key = reader.getCoreCacheKey();
- @Override
- public boolean equals(Object obj) {
- if (obj instanceof BitDocIdSetCachingWrapperFilter == false) {
- return false;
+ DocIdSet docIdSet = cache.get(key);
+ if (docIdSet == null) {
+ docIdSet = filter.getDocIdSet(context, null);
+ docIdSet = docIdSetToCache(docIdSet, reader);
+ if (docIdSet == null) {
+ // We use EMPTY as a sentinel for the empty set, which is cacheable
+ docIdSet = DocIdSet.EMPTY;
+ }
+ cache.put(key, docIdSet);
}
- return filter.equals(((BitDocIdSetCachingWrapperFilter) obj).filter);
+ return docIdSet == DocIdSet.EMPTY ? null : (BitDocIdSet) docIdSet;
}
-
+
@Override
public String toString(String field) {
- return filter.toString();
+ return getClass().getSimpleName() + "("+filter.toString(field)+")";
}
@Override
- public long ramBytesUsed() {
- return filter.ramBytesUsed();
+ public boolean equals(Object o) {
+ if (o == null || !getClass().equals(o.getClass())) return false;
+ final BitDocIdSetCachingWrapperFilter other = (BitDocIdSetCachingWrapperFilter) o;
+ return this.filter.equals(other.filter);
}
@Override
- public Collection<Accountable> getChildResources() {
- return filter.getChildResources();
+ public int hashCode() {
+ return (filter.hashCode() ^ getClass().hashCode());
}
-
}
Modified: lucene/dev/trunk/lucene/join/src/java/org/apache/lucene/search/join/ToParentBlockJoinCollector.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/join/src/java/org/apache/lucene/search/join/ToParentBlockJoinCollector.java?rev=1662774&r1=1662773&r2=1662774&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/join/src/java/org/apache/lucene/search/join/ToParentBlockJoinCollector.java (original)
+++ lucene/dev/trunk/lucene/join/src/java/org/apache/lucene/search/join/ToParentBlockJoinCollector.java Fri Feb 27 18:09:01 2015
@@ -501,6 +501,8 @@ public class ToParentBlockJoinCollector
@Override
public boolean needsScores() {
- return trackScores || trackMaxScore || sort.needsScores();
+ // needed so that eg. BooleanQuery does not rewrite its MUST clauses to
+ // FILTER since the filter scorers are hidden in Scorer.getChildren().
+ return true;
}
}
Modified: lucene/dev/trunk/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoinValidation.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoinValidation.java?rev=1662774&r1=1662773&r2=1662774&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoinValidation.java (original)
+++ lucene/dev/trunk/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoinValidation.java Fri Feb 27 18:09:01 2015
@@ -17,6 +17,7 @@ package org.apache.lucene.search.join;
* limitations under the License.
*/
+import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
@@ -27,10 +28,12 @@ import org.apache.lucene.index.Directory
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
+import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
-import org.apache.lucene.search.CachingWrapperFilter;
+import org.apache.lucene.search.CachingWrapperQuery;
+import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.FilteredQuery;
import org.apache.lucene.search.IndexSearcher;
@@ -39,6 +42,7 @@ import org.apache.lucene.search.QueryWra
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.WildcardQuery;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.util.Bits;
import org.apache.lucene.util.LuceneTestCase;
import org.junit.After;
import org.junit.Before;
@@ -115,12 +119,33 @@ public class TestBlockJoinValidation ext
indexSearcher.search(blockJoinQuery, 1);
}
+ // a filter for which other queries don't have special rewrite rules
+ private static class FilterWrapper extends Filter {
+
+ private final Filter in;
+
+ FilterWrapper(Filter in) {
+ this.in = in;
+ }
+
+ @Override
+ public DocIdSet getDocIdSet(LeafReaderContext context, Bits acceptDocs) throws IOException {
+ return in.getDocIdSet(context, acceptDocs);
+ }
+
+ @Override
+ public String toString(String field) {
+ return in.toString(field);
+ }
+
+ }
+
@Test
public void testValidationForToChildBjqWithChildFilterQuery() throws Exception {
Query parentQueryWithRandomChild = createParentQuery();
ToChildBlockJoinQuery blockJoinQuery = new ToChildBlockJoinQuery(parentQueryWithRandomChild, parentsFilter);
- Filter childFilter = new CachingWrapperFilter(new QueryWrapperFilter(new TermQuery(new Term("common_field", "1"))));
+ Filter childFilter = new FilterWrapper(new QueryWrapperFilter(new TermQuery(new Term("common_field", "1"))));
thrown.expect(IllegalStateException.class);
thrown.expectMessage(ToChildBlockJoinQuery.ILLEGAL_ADVANCE_ON_PARENT);
indexSearcher.search(new FilteredQuery(blockJoinQuery, childFilter), 1);
@@ -137,8 +162,8 @@ public class TestBlockJoinValidation ext
// advance() method is used by ConjunctionScorer, so we need to create Boolean conjunction query
BooleanQuery conjunctionQuery = new BooleanQuery();
WildcardQuery childQuery = new WildcardQuery(new Term("child", createFieldValue(randomChildNumber)));
- conjunctionQuery.add(new BooleanClause(childQuery, BooleanClause.Occur.MUST));
- conjunctionQuery.add(new BooleanClause(blockJoinQuery, BooleanClause.Occur.MUST));
+ conjunctionQuery.add(childQuery, BooleanClause.Occur.MUST);
+ conjunctionQuery.add(blockJoinQuery, BooleanClause.Occur.MUST);
thrown.expect(IllegalStateException.class);
thrown.expectMessage(ToChildBlockJoinQuery.INVALID_QUERY_MESSAGE);
Modified: lucene/dev/trunk/lucene/misc/src/test/org/apache/lucene/index/TestBlockJoinSorter.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/misc/src/test/org/apache/lucene/index/TestBlockJoinSorter.java?rev=1662774&r1=1662773&r2=1662774&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/misc/src/test/org/apache/lucene/index/TestBlockJoinSorter.java (original)
+++ lucene/dev/trunk/lucene/misc/src/test/org/apache/lucene/index/TestBlockJoinSorter.java Fri Feb 27 18:09:01 2015
@@ -17,9 +17,14 @@ package org.apache.lucene.index;
* limitations under the License.
*/
+import static org.apache.lucene.search.DocIdSet.EMPTY;
+
import java.io.IOException;
import java.util.ArrayList;
+import java.util.Collections;
import java.util.List;
+import java.util.Map;
+import java.util.WeakHashMap;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document;
@@ -32,37 +37,60 @@ import org.apache.lucene.index.IndexWrit
import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.Term;
+import org.apache.lucene.search.BitsFilteredDocIdSet;
import org.apache.lucene.search.BlockJoinComparatorSource;
-import org.apache.lucene.search.CachingWrapperFilter;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Filter;
-import org.apache.lucene.search.FilterCachingPolicy;
import org.apache.lucene.search.QueryWrapperFilter;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.BitDocIdSet;
+import org.apache.lucene.util.Bits;
import org.apache.lucene.util.FixedBitSet;
import org.apache.lucene.util.LuceneTestCase;
+import org.apache.lucene.util.SparseFixedBitSet;
public class TestBlockJoinSorter extends LuceneTestCase {
- private static class FixedBitSetCachingWrapperFilter extends CachingWrapperFilter {
+ private static class BitSetCachingWrapperFilter extends Filter {
+
+ private final Filter filter;
+ private final Map<Object,BitDocIdSet> cache = Collections.synchronizedMap(new WeakHashMap<Object,BitDocIdSet>());
- public FixedBitSetCachingWrapperFilter(Filter filter) {
- super(filter, FilterCachingPolicy.ALWAYS_CACHE);
+ public BitSetCachingWrapperFilter(Filter filter) {
+ this.filter = filter;
}
@Override
- protected DocIdSet cacheImpl(DocIdSetIterator iterator, LeafReader reader)
- throws IOException {
- final FixedBitSet cached = new FixedBitSet(reader.maxDoc());
- cached.or(iterator);
- return new BitDocIdSet(cached);
+ public DocIdSet getDocIdSet(LeafReaderContext context, final Bits acceptDocs) throws IOException {
+ final LeafReader reader = context.reader();
+ final Object key = reader.getCoreCacheKey();
+
+ BitDocIdSet docIdSet = cache.get(key);
+ if (docIdSet == null) {
+ final DocIdSet uncached = filter.getDocIdSet(context, null);
+ final DocIdSetIterator it = uncached == null ? null : uncached.iterator();
+ if (it != null) {
+ BitDocIdSet.Builder builder = new BitDocIdSet.Builder(context.reader().maxDoc());
+ builder.or(it);
+ docIdSet = builder.build();
+ }
+ if (docIdSet == null) {
+ docIdSet = new BitDocIdSet(new SparseFixedBitSet(context.reader().maxDoc()));
+ }
+ cache.put(key, docIdSet);
+ }
+
+ return docIdSet == EMPTY ? null : BitsFilteredDocIdSet.wrap(docIdSet, acceptDocs);
}
+ @Override
+ public String toString(String field) {
+ return getClass().getName() + "(" + filter.toString(field) + ")";
+ }
}
public void test() throws IOException {
@@ -92,7 +120,7 @@ public class TestBlockJoinSorter extends
writer.close();
final LeafReader reader = getOnlySegmentReader(indexReader);
- final Filter parentsFilter = new FixedBitSetCachingWrapperFilter(new QueryWrapperFilter(new TermQuery(new Term("parent", "true"))));
+ final Filter parentsFilter = new BitSetCachingWrapperFilter(new QueryWrapperFilter(new TermQuery(new Term("parent", "true"))));
final FixedBitSet parentBits = (FixedBitSet) parentsFilter.getDocIdSet(reader.getContext(), null).bits();
final NumericDocValues parentValues = reader.getNumericDocValues("parent_val");
final NumericDocValues childValues = reader.getNumericDocValues("child_val");
Modified: lucene/dev/trunk/lucene/queryparser/src/java/org/apache/lucene/queryparser/xml/builders/CachedFilterBuilder.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/queryparser/src/java/org/apache/lucene/queryparser/xml/builders/CachedFilterBuilder.java?rev=1662774&r1=1662773&r2=1662774&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/queryparser/src/java/org/apache/lucene/queryparser/xml/builders/CachedFilterBuilder.java (original)
+++ lucene/dev/trunk/lucene/queryparser/src/java/org/apache/lucene/queryparser/xml/builders/CachedFilterBuilder.java Fri Feb 27 18:09:01 2015
@@ -4,7 +4,7 @@
package org.apache.lucene.queryparser.xml.builders;
import org.apache.lucene.queryparser.xml.*;
-import org.apache.lucene.search.CachingWrapperFilter;
+import org.apache.lucene.search.CachingWrapperQuery;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.QueryWrapperFilter;
@@ -48,7 +48,7 @@ public class CachedFilterBuilder impleme
private final QueryBuilderFactory queryFactory;
private final FilterBuilderFactory filterFactory;
- private LRUCache<Object, Filter> filterCache;
+ private LRUCache<Object, Query> filterCache;
private final int cacheSize;
@@ -81,20 +81,20 @@ public class CachedFilterBuilder impleme
f = filterFactory.getFilter(childElement);
cacheKey = f;
}
- Filter cachedFilter = filterCache.get(cacheKey);
+ Query cachedFilter = filterCache.get(cacheKey);
if (cachedFilter != null) {
- return cachedFilter; // cache hit
+ return new QueryWrapperFilter(cachedFilter); // cache hit
}
//cache miss
if (qb != null) {
cachedFilter = new QueryWrapperFilter(q);
} else {
- cachedFilter = new CachingWrapperFilter(f);
+ cachedFilter = new CachingWrapperQuery(f);
}
filterCache.put(cacheKey, cachedFilter);
- return cachedFilter;
+ return new QueryWrapperFilter(cachedFilter);
}
static class LRUCache<K, V> extends java.util.LinkedHashMap<K, V> {
Modified: lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java?rev=1662774&r1=1662773&r2=1662774&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java (original)
+++ lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java Fri Feb 27 18:09:01 2015
@@ -94,10 +94,9 @@ import org.apache.lucene.index.*;
import org.apache.lucene.index.IndexReader.ReaderClosedListener;
import org.apache.lucene.index.TermsEnum.SeekStatus;
import org.apache.lucene.search.AssertingIndexSearcher;
-import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.DocIdSetIterator;
-import org.apache.lucene.search.Filter;
-import org.apache.lucene.search.FilterCachingPolicy;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.QueryCachingPolicy;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.QueryUtils.FCInvisibleMultiReader;
import org.apache.lucene.store.BaseDirectoryWrapper;
@@ -442,14 +441,14 @@ public abstract class LuceneTestCase ext
CORE_DIRECTORIES.add("RAMDirectory");
}
- /** A {@link org.apache.lucene.search.FilterCachingPolicy} that randomly caches. */
- public static final FilterCachingPolicy MAYBE_CACHE_POLICY = new FilterCachingPolicy() {
+ /** A {@link org.apache.lucene.search.QueryCachingPolicy} that randomly caches. */
+ public static final QueryCachingPolicy MAYBE_CACHE_POLICY = new QueryCachingPolicy() {
@Override
- public void onUse(Filter filter) {}
+ public void onUse(Query query) {}
@Override
- public boolean shouldCache(Filter filter, LeafReaderContext context, DocIdSet set) throws IOException {
+ public boolean shouldCache(Query query, LeafReaderContext context) throws IOException {
return random().nextBoolean();
}
@@ -1701,6 +1700,7 @@ public abstract class LuceneTestCase ext
ret = random.nextBoolean() ? new IndexSearcher(r) : new IndexSearcher(r.getContext());
}
ret.setSimilarity(classEnvRule.similarity);
+ ret.setQueryCachingPolicy(MAYBE_CACHE_POLICY);
return ret;
} else {
int threads = 0;
@@ -1737,6 +1737,7 @@ public abstract class LuceneTestCase ext
: new IndexSearcher(r.getContext(), ex);
}
ret.setSimilarity(classEnvRule.similarity);
+ ret.setQueryCachingPolicy(MAYBE_CACHE_POLICY);
return ret;
}
}
Modified: lucene/dev/trunk/solr/core/src/java/org/apache/solr/search/BitDocSet.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/core/src/java/org/apache/solr/search/BitDocSet.java?rev=1662774&r1=1662773&r2=1662774&view=diff
==============================================================================
--- lucene/dev/trunk/solr/core/src/java/org/apache/solr/search/BitDocSet.java (original)
+++ lucene/dev/trunk/solr/core/src/java/org/apache/solr/search/BitDocSet.java Fri Feb 27 18:09:01 2015
@@ -332,11 +332,6 @@ public class BitDocSet extends DocSetBas
}
@Override
- public boolean isCacheable() {
- return true;
- }
-
- @Override
public long ramBytesUsed() {
return bs.ramBytesUsed();
}
Modified: lucene/dev/trunk/solr/core/src/java/org/apache/solr/search/DocSetBase.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/core/src/java/org/apache/solr/search/DocSetBase.java?rev=1662774&r1=1662773&r2=1662774&view=diff
==============================================================================
--- lucene/dev/trunk/solr/core/src/java/org/apache/solr/search/DocSetBase.java (original)
+++ lucene/dev/trunk/solr/core/src/java/org/apache/solr/search/DocSetBase.java Fri Feb 27 18:09:01 2015
@@ -212,11 +212,6 @@ abstract class DocSetBase implements Doc
}
@Override
- public boolean isCacheable() {
- return true;
- }
-
- @Override
public long ramBytesUsed() {
return bs.ramBytesUsed();
}
Modified: lucene/dev/trunk/solr/core/src/java/org/apache/solr/search/LuceneQueryOptimizer.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/core/src/java/org/apache/solr/search/LuceneQueryOptimizer.java?rev=1662774&r1=1662773&r2=1662774&view=diff
==============================================================================
--- lucene/dev/trunk/solr/core/src/java/org/apache/solr/search/LuceneQueryOptimizer.java (original)
+++ lucene/dev/trunk/solr/core/src/java/org/apache/solr/search/LuceneQueryOptimizer.java Fri Feb 27 18:09:01 2015
@@ -97,7 +97,7 @@ if (c.query instanceof TermQuery) {
filter = (Filter)cache.get(filterQuery);
}
if (filter == null) { // miss
- filter = new CachingWrapperFilter(new QueryWrapperFilter(filterQuery)); // construct new entry
+ filter = new QueryWrapperFilter(new CachingWrapperQuery(filterQuery)); // construct new entry
synchronized (cache) {
cache.put(filterQuery, filter); // cache it
}
Modified: lucene/dev/trunk/solr/core/src/java/org/apache/solr/search/SortedIntDocSet.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/core/src/java/org/apache/solr/search/SortedIntDocSet.java?rev=1662774&r1=1662773&r2=1662774&view=diff
==============================================================================
--- lucene/dev/trunk/solr/core/src/java/org/apache/solr/search/SortedIntDocSet.java (original)
+++ lucene/dev/trunk/solr/core/src/java/org/apache/solr/search/SortedIntDocSet.java Fri Feb 27 18:09:01 2015
@@ -762,11 +762,6 @@ public class SortedIntDocSet extends Doc
}
@Override
- public boolean isCacheable() {
- return true;
- }
-
- @Override
public long ramBytesUsed() {
return RamUsageEstimator.sizeOf(docs);
}