You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by mi...@apache.org on 2015/02/09 00:53:25 UTC
svn commit: r1658277 [8/38] - in /lucene/dev/branches/lucene6005: ./
dev-tools/ dev-tools/idea/solr/contrib/dataimporthandler/
dev-tools/idea/solr/contrib/velocity/ dev-tools/maven/lucene/replicator/
dev-tools/maven/solr/ dev-tools/maven/solr/contrib/v...
Modified: lucene/dev/branches/lucene6005/lucene/core/src/java/org/apache/lucene/util/automaton/CompiledAutomaton.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/core/src/java/org/apache/lucene/util/automaton/CompiledAutomaton.java?rev=1658277&r1=1658276&r2=1658277&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/core/src/java/org/apache/lucene/util/automaton/CompiledAutomaton.java (original)
+++ lucene/dev/branches/lucene6005/lucene/core/src/java/org/apache/lucene/util/automaton/CompiledAutomaton.java Sun Feb 8 23:53:14 2015
@@ -321,6 +321,7 @@ public class CompiledAutomaton {
if (this.finite) {
commonSuffixRef = null;
} else {
+ // NOTE: this is a very costly operation! We should test if it's really warranted in practice...
commonSuffixRef = Operations.getCommonSuffixBytesRef(binary, maxDeterminizedStates);
}
runAutomaton = new ByteRunAutomaton(binary, true, maxDeterminizedStates);
Modified: lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/TestMergeSchedulerExternal.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/TestMergeSchedulerExternal.java?rev=1658277&r1=1658276&r2=1658277&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/TestMergeSchedulerExternal.java (original)
+++ lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/TestMergeSchedulerExternal.java Sun Feb 8 23:53:14 2015
@@ -16,6 +16,7 @@ package org.apache.lucene;
* See the License for the specific language governing permissions and
* limitations under the License.
*/
+
import java.io.IOException;
import org.apache.lucene.analysis.MockAnalyzer;
@@ -55,21 +56,20 @@ public class TestMergeSchedulerExternal
@Override
protected MergeThread getMergeThread(IndexWriter writer, MergePolicy.OneMerge merge) throws IOException {
MergeThread thread = new MyMergeThread(writer, merge);
- thread.setThreadPriority(getMergeThreadPriority());
thread.setDaemon(true);
thread.setName("MyMergeThread");
return thread;
}
@Override
- protected void handleMergeException(Throwable t) {
+ protected void handleMergeException(Directory dir, Throwable t) {
excCalled = true;
}
- @Override
- protected void doMerge(MergePolicy.OneMerge merge) throws IOException {
+ ;@Override
+ protected void doMerge(IndexWriter writer, MergePolicy.OneMerge merge) throws IOException {
mergeCalled = true;
- super.doMerge(merge);
+ super.doMerge(writer, merge);
}
}
@@ -118,7 +118,7 @@ public class TestMergeSchedulerExternal
OneMerge merge = null;
while ((merge = writer.getNextMerge()) != null) {
if (VERBOSE) {
- System.out.println("executing merge " + merge.segString(writer.getDirectory()));
+ System.out.println("executing merge " + merge.segString());
}
writer.merge(merge);
}
Modified: lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/codecs/compressing/TestCompressingStoredFieldsFormat.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/codecs/compressing/TestCompressingStoredFieldsFormat.java?rev=1658277&r1=1658276&r2=1658277&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/codecs/compressing/TestCompressingStoredFieldsFormat.java (original)
+++ lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/codecs/compressing/TestCompressingStoredFieldsFormat.java Sun Feb 8 23:53:14 2015
@@ -25,15 +25,18 @@ import org.apache.lucene.codecs.Codec;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.LowSchemaField;
import org.apache.lucene.index.BaseStoredFieldsFormatTestCase;
+import org.apache.lucene.index.CodecReader;
+import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.index.NoMergePolicy;
import org.apache.lucene.store.ByteArrayDataInput;
import org.apache.lucene.store.ByteArrayDataOutput;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.MockDirectoryWrapper;
import org.junit.Test;
-
import com.carrotsearch.randomizedtesting.generators.RandomInts;
public class TestCompressingStoredFieldsFormat extends BaseStoredFieldsFormatTestCase {
@@ -260,4 +263,50 @@ public class TestCompressingStoredFields
out.reset(buffer);
}
}
+
+ /**
+ * writes some tiny segments with incomplete compressed blocks,
+ * and ensures merge recompresses them.
+ */
+ public void testChunkCleanup() throws IOException {
+ Directory dir = newDirectory();
+ IndexWriterConfig iwConf = newIndexWriterConfig(new MockAnalyzer(random()));
+ iwConf.setMergePolicy(NoMergePolicy.INSTANCE);
+
+ // we have to enforce certain things like maxDocsPerChunk to cause dirty chunks to be created
+ // by this test.
+ iwConf.setCodec(CompressingCodec.randomInstance(random(), 4*1024, 100, false, 8));
+ IndexWriter iw = new IndexWriter(dir, iwConf);
+ DirectoryReader ir = DirectoryReader.open(iw, true);
+ for (int i = 0; i < 5; i++) {
+ Document doc = iw.newDocument();
+ doc.addStoredString("text", "not very long at all");
+ iw.addDocument(doc);
+ // force flush
+ DirectoryReader ir2 = DirectoryReader.openIfChanged(ir);
+ assertNotNull(ir2);
+ ir.close();
+ ir = ir2;
+ // examine dirty counts:
+ for (LeafReaderContext leaf : ir2.leaves()) {
+ CodecReader sr = (CodecReader) leaf.reader();
+ CompressingStoredFieldsReader reader = (CompressingStoredFieldsReader)sr.getFieldsReader();
+ assertEquals(1, reader.getNumChunks());
+ assertEquals(1, reader.getNumDirtyChunks());
+ }
+ }
+ iw.getConfig().setMergePolicy(newLogMergePolicy());
+ iw.forceMerge(1);
+ DirectoryReader ir2 = DirectoryReader.openIfChanged(ir);
+ assertNotNull(ir2);
+ ir.close();
+ ir = ir2;
+ CodecReader sr = getOnlySegmentReader(ir);
+ CompressingStoredFieldsReader reader = (CompressingStoredFieldsReader)sr.getFieldsReader();
+ // we could get lucky, and have zero, but typically one.
+ assertTrue(reader.getNumDirtyChunks() <= 1);
+ ir.close();
+ iw.close();
+ dir.close();
+ }
}
Modified: lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/codecs/compressing/TestCompressingTermVectorsFormat.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/codecs/compressing/TestCompressingTermVectorsFormat.java?rev=1658277&r1=1658276&r2=1658277&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/codecs/compressing/TestCompressingTermVectorsFormat.java (original)
+++ lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/codecs/compressing/TestCompressingTermVectorsFormat.java Sun Feb 8 23:53:14 2015
@@ -1,13 +1,23 @@
package org.apache.lucene.codecs.compressing;
+import java.io.IOException;
+
+import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.codecs.Codec;
import org.apache.lucene.document.Document;
+import org.apache.lucene.document.FieldTypes;
import org.apache.lucene.index.BaseTermVectorsFormatTestCase;
+import org.apache.lucene.index.CodecReader;
+import org.apache.lucene.index.DirectoryReader;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.LeafReader;
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.index.NoMergePolicy;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.Terms;
-import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.index.TermsEnum.SeekStatus;
+import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
@@ -64,5 +74,53 @@ public class TestCompressingTermVectorsF
ir.close();
iw.close();
dir.close();
+ }
+
+ /**
+ * writes some tiny segments with incomplete compressed blocks,
+ * and ensures merge recompresses them.
+ */
+ public void testChunkCleanup() throws IOException {
+ Directory dir = newDirectory();
+ IndexWriterConfig iwConf = newIndexWriterConfig(new MockAnalyzer(random()));
+ iwConf.setMergePolicy(NoMergePolicy.INSTANCE);
+
+ // we have to enforce certain things like maxDocsPerChunk to cause dirty chunks to be created
+ // by this test.
+ iwConf.setCodec(CompressingCodec.randomInstance(random(), 4*1024, 100, false, 8));
+ IndexWriter iw = new IndexWriter(dir, iwConf);
+ FieldTypes fieldTypes = iw.getFieldTypes();
+ fieldTypes.enableTermVectors("text");
+ DirectoryReader ir = DirectoryReader.open(iw, true);
+ for (int i = 0; i < 5; i++) {
+ Document doc = iw.newDocument();
+ doc.addShortText("text", "not very long at all");
+ iw.addDocument(doc);
+ // force flush
+ DirectoryReader ir2 = DirectoryReader.openIfChanged(ir);
+ assertNotNull(ir2);
+ ir.close();
+ ir = ir2;
+ // examine dirty counts:
+ for (LeafReaderContext leaf : ir2.leaves()) {
+ CodecReader sr = (CodecReader) leaf.reader();
+ CompressingTermVectorsReader reader = (CompressingTermVectorsReader)sr.getTermVectorsReader();
+ assertEquals(1, reader.getNumChunks());
+ assertEquals(1, reader.getNumDirtyChunks());
+ }
+ }
+ iw.getConfig().setMergePolicy(newLogMergePolicy());
+ iw.forceMerge(1);
+ DirectoryReader ir2 = DirectoryReader.openIfChanged(ir);
+ assertNotNull(ir2);
+ ir.close();
+ ir = ir2;
+ CodecReader sr = getOnlySegmentReader(ir);
+ CompressingTermVectorsReader reader = (CompressingTermVectorsReader)sr.getTermVectorsReader();
+ // we could get lucky, and have zero, but typically one.
+ assertTrue(reader.getNumDirtyChunks() <= 1);
+ ir.close();
+ iw.close();
+ dir.close();
}
}
Modified: lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/codecs/lucene50/TestBlockPostingsFormat.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/codecs/lucene50/TestBlockPostingsFormat.java?rev=1658277&r1=1658276&r2=1658277&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/codecs/lucene50/TestBlockPostingsFormat.java (original)
+++ lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/codecs/lucene50/TestBlockPostingsFormat.java Sun Feb 8 23:53:14 2015
@@ -67,4 +67,21 @@ public class TestBlockPostingsFormat ext
w.close();
d.close();
}
+
+ private void shouldFail(int minItemsInBlock, int maxItemsInBlock) {
+ try {
+ new Lucene50PostingsFormat(minItemsInBlock, maxItemsInBlock);
+ fail("did not hit exception");
+ } catch (IllegalArgumentException iae) {
+ // expected
+ }
+ }
+
+ public void testInvalidBlockSizes() throws Exception {
+ shouldFail(0, 0);
+ shouldFail(10, 8);
+ shouldFail(-1, 10);
+ shouldFail(10, -1);
+ shouldFail(10, 12);
+ }
}
Modified: lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/codecs/lucene50/TestLucene50DocValuesFormat.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/codecs/lucene50/TestLucene50DocValuesFormat.java?rev=1658277&r1=1658276&r2=1658277&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/codecs/lucene50/TestLucene50DocValuesFormat.java (original)
+++ lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/codecs/lucene50/TestLucene50DocValuesFormat.java Sun Feb 8 23:53:14 2015
@@ -32,12 +32,14 @@ import org.apache.lucene.index.BaseCompr
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.LeafReader;
+import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.SerialMergeScheduler;
+import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.Terms;
-import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.index.TermsEnum.SeekStatus;
+import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.TestUtil;
@@ -177,10 +179,27 @@ public class TestLucene50DocValuesFormat
writer.deleteDocuments(new Term("id", Integer.toString(id)));
}
+ // compare per-segment
+ DirectoryReader ir = writer.getReader();
+ for (LeafReaderContext context : ir.leaves()) {
+ LeafReader r = context.reader();
+ Terms terms = r.terms("indexed");
+ if (terms != null) {
+ SortedSetDocValues ssdv = r.getSortedSetDocValues("dv");
+ assertEquals(terms.size(), ssdv.getValueCount());
+ TermsEnum expected = terms.iterator(null);
+ TermsEnum actual = r.getSortedSetDocValues("dv").termsEnum();
+ assertEquals(terms.size(), expected, actual);
+
+ doTestSortedSetEnumAdvanceIndependently(ssdv);
+ }
+ }
+ ir.close();
+
writer.forceMerge(1);
// now compare again after the merge
- DirectoryReader ir = writer.getReader();
+ ir = writer.getReader();
LeafReader ar = getOnlySegmentReader(ir);
Terms terms = ar.terms("indexed");
if (terms != null) {
Modified: lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/document/TestFieldTypes.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/document/TestFieldTypes.java?rev=1658277&r1=1658276&r2=1658277&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/document/TestFieldTypes.java (original)
+++ lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/document/TestFieldTypes.java Sun Feb 8 23:53:14 2015
@@ -22,8 +22,10 @@ import org.apache.lucene.index.IndexRead
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.MultiReader;
+import org.apache.lucene.index.SlowCodecReaderWrapper;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.LuceneTestCase;
+import org.apache.lucene.util.TestUtil;
public class TestFieldTypes extends LuceneTestCase {
@@ -102,7 +104,7 @@ public class TestFieldTypes extends Luce
doc.addInt("field", 5);
w.addDocument(doc);
w.close();
- IndexReader sub = DirectoryReader.open(dir);
+ DirectoryReader sub = DirectoryReader.open(dir);
w = newIndexWriter(newIndexWriterConfig().setOpenMode(IndexWriterConfig.OpenMode.CREATE));
doc = w.newDocument();
@@ -110,7 +112,7 @@ public class TestFieldTypes extends Luce
w.addDocument(doc);
try {
- w.addIndexes(sub);
+ TestUtil.addIndexesSlowly(w, sub);
fail("did not hit exception");
} catch (IllegalStateException ise) {
assertEquals("field \"field\": cannot change value type from SHORT_TEXT to INT", ise.getMessage());
Modified: lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/Test2BPostingsBytes.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/Test2BPostingsBytes.java?rev=1658277&r1=1658276&r2=1658277&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/Test2BPostingsBytes.java (original)
+++ lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/Test2BPostingsBytes.java Sun Feb 8 23:53:14 2015
@@ -82,31 +82,29 @@ public class Test2BPostingsBytes extends
w.close();
DirectoryReader oneThousand = DirectoryReader.open(dir);
- IndexReader subReaders[] = new IndexReader[1000];
+ DirectoryReader subReaders[] = new DirectoryReader[1000];
Arrays.fill(subReaders, oneThousand);
- MultiReader mr = new MultiReader(subReaders);
BaseDirectoryWrapper dir2 = newFSDirectory(createTempDir("2BPostingsBytes2"));
if (dir2 instanceof MockDirectoryWrapper) {
((MockDirectoryWrapper)dir2).setThrottling(MockDirectoryWrapper.Throttling.NEVER);
}
IndexWriter w2 = new IndexWriter(dir2,
new IndexWriterConfig(null));
- w2.addIndexes(mr);
+ TestUtil.addIndexesSlowly(w2, subReaders);
w2.forceMerge(1);
w2.close();
oneThousand.close();
DirectoryReader oneMillion = DirectoryReader.open(dir2);
- subReaders = new IndexReader[2000];
+ subReaders = new DirectoryReader[2000];
Arrays.fill(subReaders, oneMillion);
- mr = new MultiReader(subReaders);
BaseDirectoryWrapper dir3 = newFSDirectory(createTempDir("2BPostingsBytes3"));
if (dir3 instanceof MockDirectoryWrapper) {
((MockDirectoryWrapper)dir3).setThrottling(MockDirectoryWrapper.Throttling.NEVER);
}
IndexWriter w3 = new IndexWriter(dir3,
new IndexWriterConfig(null));
- w3.addIndexes(mr);
+ TestUtil.addIndexesSlowly(w3, subReaders);
w3.forceMerge(1);
w3.close();
oneMillion.close();
Modified: lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestAbuseSchema.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestAbuseSchema.java?rev=1658277&r1=1658276&r2=1658277&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestAbuseSchema.java (original)
+++ lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestAbuseSchema.java Sun Feb 8 23:53:14 2015
@@ -789,9 +789,9 @@ public class TestAbuseSchema extends Luc
Directory dir2 = newDirectory();
conf = newIndexWriterConfig(new MockAnalyzer(random()));
writer = new IndexWriter(dir2, conf);
- IndexReader[] readers = new IndexReader[] {DirectoryReader.open(dir)};
- writer.addIndexes(readers);
- readers[0].close();
+ DirectoryReader reader = DirectoryReader.open(dir);
+ TestUtil.addIndexesSlowly(writer, reader);
+ reader.close();
field = new LowSchemaField(a, "dv", new BytesRef("foo"), IndexOptions.NONE, false);
field.setDocValuesType(DocValuesType.BINARY);
doc = new ArrayList<>();
Modified: lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestAddIndexes.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestAddIndexes.java?rev=1658277&r1=1658276&r2=1658277&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestAddIndexes.java (original)
+++ lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestAddIndexes.java Sun Feb 8 23:53:14 2015
@@ -649,7 +649,7 @@ public class TestAddIndexes extends Luce
IndexWriter writer2;
final List<Throwable> failures = new ArrayList<>();
volatile boolean didClose;
- final IndexReader[] readers;
+ final DirectoryReader[] readers;
final int NUM_COPY;
final static int NUM_THREADS = 5;
final Thread[] threads = new Thread[NUM_THREADS];
@@ -668,7 +668,7 @@ public class TestAddIndexes extends Luce
writer2.commit();
- readers = new IndexReader[NUM_COPY];
+ readers = new DirectoryReader[NUM_COPY];
for(int i=0;i<NUM_COPY;i++)
readers[i] = DirectoryReader.open(dir);
}
@@ -767,9 +767,9 @@ public class TestAddIndexes extends Luce
break;
case 2:
if (VERBOSE) {
- System.out.println(Thread.currentThread().getName() + ": TEST: addIndexes(IndexReader[])");
+ System.out.println(Thread.currentThread().getName() + ": TEST: addIndexes(LeafReader[])");
}
- writer2.addIndexes(readers);
+ TestUtil.addIndexesSlowly(writer2, readers);
break;
case 3:
if (VERBOSE) {
@@ -872,9 +872,9 @@ public class TestAddIndexes extends Luce
break;
case 2:
if (VERBOSE) {
- System.out.println("TEST: " + Thread.currentThread().getName() + ": addIndexes(IR[])");
+ System.out.println("TEST: " + Thread.currentThread().getName() + ": addIndexes(LR[])");
}
- writer2.addIndexes(readers);
+ TestUtil.addIndexesSlowly(writer2, readers);
break;
case 3:
if (VERBOSE) {
@@ -952,10 +952,16 @@ public class TestAddIndexes extends Luce
System.out.println("TEST: now force rollback");
}
c.didClose = true;
+ MergeScheduler ms = c.writer2.getConfig().getMergeScheduler();
+
c.writer2.rollback();
c.joinThreads();
+ if (ms instanceof ConcurrentMergeScheduler) {
+ assertEquals(0, ((ConcurrentMergeScheduler) ms).mergeThreadCount());
+ }
+
c.closeDir();
assertTrue(c.failures.size() == 0);
@@ -979,11 +985,8 @@ public class TestAddIndexes extends Luce
// Now delete the document
writer.deleteDocuments(new Term("id", "myid"));
- IndexReader r = DirectoryReader.open(dirs[1]);
- try {
- writer.addIndexes(r);
- } finally {
- r.close();
+ try (DirectoryReader r = DirectoryReader.open(dirs[1])) {
+ TestUtil.addIndexesSlowly(writer, r);
}
writer.commit();
assertEquals("Documents from the incoming index should not have been deleted", 1, writer.numDocs());
@@ -1098,7 +1101,7 @@ public class TestAddIndexes extends Luce
w.close();
}
- IndexReader[] readers = new IndexReader[] { DirectoryReader.open(dirs[0]), DirectoryReader.open(dirs[1]) };
+ DirectoryReader[] readers = new DirectoryReader[] { DirectoryReader.open(dirs[0]), DirectoryReader.open(dirs[1]) };
MockDirectoryWrapper dir = new MockDirectoryWrapper(random(), new RAMDirectory());
dir.setEnableVirusScanner(false); // we check for specific list of files
@@ -1108,7 +1111,7 @@ public class TestAddIndexes extends Luce
lmp.setNoCFSRatio(1.0);
lmp.setMaxCFSSegmentSizeMB(Double.POSITIVE_INFINITY);
IndexWriter w3 = new IndexWriter(dir, conf);
- w3.addIndexes(readers);
+ TestUtil.addIndexesSlowly(w3, readers);
w3.close();
// we should now see segments_X,
// _Y.cfs,_Y.cfe, _Z.si
@@ -1177,7 +1180,7 @@ public class TestAddIndexes extends Luce
doc.addAtom("f1", "doc1 field1");
doc.addAtom("id", "1");
w.addDocument(doc);
- IndexReader r1 = w.getReader();
+ DirectoryReader r1 = w.getReader();
w.close();
Directory d2 = newDirectory();
@@ -1186,12 +1189,12 @@ public class TestAddIndexes extends Luce
doc.addAtom("f2", "doc2 field2");
doc.addAtom("id", "2");
w.addDocument(doc);
- IndexReader r2 = w.getReader();
+ DirectoryReader r2 = w.getReader();
w.close();
Directory d3 = newDirectory();
w = new RandomIndexWriter(random(), d3);
- w.addIndexes(r1, r2);
+ TestUtil.addIndexesSlowly(w.w, r1, r2);
r1.close();
d1.close();
r2.close();
@@ -1215,8 +1218,7 @@ public class TestAddIndexes extends Luce
public void testAddEmpty() throws Exception {
Directory d1 = newDirectory();
RandomIndexWriter w = new RandomIndexWriter(random(), d1);
- MultiReader empty = new MultiReader();
- w.addIndexes(empty);
+ w.addIndexes(new CodecReader[0]);
w.close();
DirectoryReader dr = DirectoryReader.open(d1);
for (LeafReaderContext ctx : dr.leaves()) {
@@ -1234,11 +1236,11 @@ public class TestAddIndexes extends Luce
Directory src = newDirectory(), dest = newDirectory();
RandomIndexWriter w = new RandomIndexWriter(random(), src);
w.addDocument(w.newDocument());
- IndexReader allDeletedReader = new AllDeletedFilterReader(w.getReader().leaves().get(0).reader());
+ LeafReader allDeletedReader = new AllDeletedFilterReader(w.getReader().leaves().get(0).reader());
w.close();
w = new RandomIndexWriter(random(), dest);
- w.addIndexes(allDeletedReader);
+ w.addIndexes(SlowCodecReaderWrapper.wrap(allDeletedReader));
w.close();
DirectoryReader dr = DirectoryReader.open(src);
for (LeafReaderContext ctx : dr.leaves()) {
Modified: lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestBinaryDocValuesUpdates.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestBinaryDocValuesUpdates.java?rev=1658277&r1=1658276&r2=1658277&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestBinaryDocValuesUpdates.java (original)
+++ lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestBinaryDocValuesUpdates.java Sun Feb 8 23:53:14 2015
@@ -311,42 +311,6 @@ public class TestBinaryDocValuesUpdates
dir.close();
}
- public void testUpdateAndDeleteSameDocument() throws Exception {
- // update and delete same document in same commit session
- Directory dir = newDirectory();
- IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
- conf.setMaxBufferedDocs(10); // control segment flushing
- IndexWriter writer = new IndexWriter(dir, conf);
- FieldTypes fieldTypes = writer.getFieldTypes();
- fieldTypes.disableSorting("val");
-
- writer.addDocument(doc(writer, 0));
- writer.addDocument(doc(writer, 1));
-
- if (random().nextBoolean()) {
- writer.commit();
- }
-
- writer.deleteDocuments(new Term("id", "doc-0"));
- writer.updateBinaryDocValue(new Term("id", "doc-0"), "val", toBytes(17L));
-
- final DirectoryReader reader;
- if (random().nextBoolean()) { // not NRT
- writer.close();
- reader = DirectoryReader.open(dir);
- } else { // NRT
- reader = DirectoryReader.open(writer, true);
- writer.close();
- }
-
- LeafReader r = reader.leaves().get(0).reader();
- assertFalse(r.getLiveDocs().get(0));
- assertEquals(1, getValue(r.getBinaryDocValues("val"), 0)); // deletes are currently applied first
-
- reader.close();
- dir.close();
- }
-
public void testMultipleDocValuesTypes() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
@@ -669,7 +633,7 @@ public class TestBinaryDocValuesUpdates
reader.close();
dir.close();
}
-
+
public void testManyReopensAndFields() throws Exception {
Directory dir = newDirectory();
final Random random = random();
@@ -688,6 +652,7 @@ public class TestBinaryDocValuesUpdates
writer.commit();
reader = DirectoryReader.open(dir);
}
+ //System.out.println("TEST: isNRT=" + isNRT);
final int numFields = random.nextInt(4) + 3; // 3-7
final long[] fieldValues = new long[numFields];
@@ -700,7 +665,7 @@ public class TestBinaryDocValuesUpdates
int docID = 0;
for (int i = 0; i < numRounds; i++) {
int numDocs = atLeast(5);
-// System.out.println("[" + Thread.currentThread().getName() + "]: round=" + i + ", numDocs=" + numDocs);
+ //System.out.println("[" + Thread.currentThread().getName() + "]: round=" + i + ", numDocs=" + numDocs);
for (int j = 0; j < numDocs; j++) {
Document doc = writer.newDocument();
doc.addAtom("id", "doc-" + docID);
@@ -1143,7 +1108,7 @@ public class TestBinaryDocValuesUpdates
writer.addIndexes(dir1);
} else {
DirectoryReader reader = DirectoryReader.open(dir1);
- writer.addIndexes(reader);
+ TestUtil.addIndexesSlowly(writer, reader);
reader.close();
}
writer.close();
Modified: lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestConcurrentMergeScheduler.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestConcurrentMergeScheduler.java?rev=1658277&r1=1658276&r2=1658277&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestConcurrentMergeScheduler.java (original)
+++ lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestConcurrentMergeScheduler.java Sun Feb 8 23:53:14 2015
@@ -292,7 +292,7 @@ public class TestConcurrentMergeSchedule
ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler() {
@Override
- protected void doMerge(MergePolicy.OneMerge merge) throws IOException {
+ protected void doMerge(IndexWriter writer, MergePolicy.OneMerge merge) throws IOException {
try {
// Stall all incoming merges until we see
// maxMergeCount:
@@ -311,7 +311,7 @@ public class TestConcurrentMergeSchedule
// Then sleep a bit to give a chance for the bug
// (too many pending merges) to appear:
Thread.sleep(20);
- super.doMerge(merge);
+ super.doMerge(writer, merge);
} finally {
runningMergeCount.decrementAndGet();
}
@@ -357,10 +357,10 @@ public class TestConcurrentMergeSchedule
}
@Override
- public void doMerge(MergePolicy.OneMerge merge) throws IOException {
+ public void doMerge(IndexWriter writer, MergePolicy.OneMerge merge) throws IOException {
totMergedBytes += merge.totalBytesSize();
atLeastOneMerge.countDown();
- super.doMerge(merge);
+ super.doMerge(writer, merge);
}
}
@@ -429,7 +429,7 @@ public class TestConcurrentMergeSchedule
final AtomicInteger runningMergeCount = new AtomicInteger();
@Override
- public void doMerge(MergePolicy.OneMerge merge) throws IOException {
+ public void doMerge(IndexWriter writer, MergePolicy.OneMerge merge) throws IOException {
int count = runningMergeCount.incrementAndGet();
// evil?
synchronized (this) {
@@ -438,7 +438,7 @@ public class TestConcurrentMergeSchedule
}
}
try {
- super.doMerge(merge);
+ super.doMerge(writer, merge);
} finally {
runningMergeCount.decrementAndGet();
}
@@ -461,7 +461,6 @@ public class TestConcurrentMergeSchedule
// No merges should have run so far, because TMP has high segmentsPerTier:
assertEquals(0, maxRunningMergeCount.get());
-
w.forceMerge(1);
// At most 5 merge threads should have launched at once:
@@ -490,8 +489,9 @@ public class TestConcurrentMergeSchedule
IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
iwc.setMergeScheduler(new ConcurrentMergeScheduler() {
@Override
- protected void maybeStall() {
+ protected boolean maybeStall(IndexWriter writer) {
wasCalled.set(true);
+ return true;
}
});
IndexWriter w = new IndexWriter(dir, iwc);
@@ -515,14 +515,14 @@ public class TestConcurrentMergeSchedule
final CountDownLatch mergeFinish = new CountDownLatch(1);
ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler() {
@Override
- protected void doMerge(MergePolicy.OneMerge merge) throws IOException {
+ protected void doMerge(IndexWriter writer, MergePolicy.OneMerge merge) throws IOException {
mergeStart.countDown();
try {
mergeFinish.await();
} catch (InterruptedException ie) {
throw new RuntimeException(ie);
}
- super.doMerge(merge);
+ super.doMerge(writer, merge);
}
};
cms.setMaxMergesAndThreads(1, 1);
@@ -630,7 +630,7 @@ public class TestConcurrentMergeSchedule
ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
cms.setDefaultMaxMergesAndThreads(true);
assertEquals(1, cms.getMaxThreadCount());
- assertEquals(2, cms.getMaxMergeCount());
+ assertEquals(6, cms.getMaxMergeCount());
}
public void testNonSpinningDefaults() throws Exception {
@@ -638,7 +638,45 @@ public class TestConcurrentMergeSchedule
cms.setDefaultMaxMergesAndThreads(false);
int threadCount = cms.getMaxThreadCount();
assertTrue(threadCount >= 1);
- assertTrue(threadCount <= 3);
- assertEquals(cms.getMaxMergeCount(), 2+threadCount);
+ assertTrue(threadCount <= 4);
+ assertEquals(5+threadCount, cms.getMaxMergeCount());
+ }
+
+ // LUCENE-6197
+ public void testNoStallMergeThreads() throws Exception {
+ MockDirectoryWrapper dir = newMockDirectory();
+
+ IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
+ iwc.setMergePolicy(NoMergePolicy.INSTANCE);
+ iwc.setMaxBufferedDocs(2);
+ IndexWriter w = new IndexWriter(dir, iwc);
+ for(int i=0;i<1000;i++) {
+ Document doc = w.newDocument();
+ doc.addAtom("field", ""+i);
+ w.addDocument(doc);
+ }
+ w.close();
+
+ iwc = newIndexWriterConfig(new MockAnalyzer(random()));
+ AtomicBoolean failed = new AtomicBoolean();
+ ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler() {
+ @Override
+ protected void doStall() {
+ if (Thread.currentThread().getName().startsWith("Lucene Merge Thread")) {
+ failed.set(true);
+ }
+ super.doStall();
+ }
+ };
+ cms.setMaxMergesAndThreads(2, 1);
+ iwc.setMergeScheduler(cms);
+ iwc.setMaxBufferedDocs(2);
+
+ w = new IndexWriter(dir, iwc);
+ w.forceMerge(1);
+ w.close();
+ dir.close();
+
+ assertFalse(failed.get());
}
}
Modified: lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestDoc.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestDoc.java?rev=1658277&r1=1658276&r2=1658277&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestDoc.java (original)
+++ lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestDoc.java Sun Feb 8 23:53:14 2015
@@ -40,6 +40,7 @@ import org.apache.lucene.index.IndexWrit
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
+import org.apache.lucene.store.MergeInfo;
import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.store.TrackingDirectoryWrapper;
import org.apache.lucene.util.InfoStream;
@@ -47,235 +48,235 @@ import org.apache.lucene.util.LuceneTest
import org.apache.lucene.util.StringHelper;
import org.apache.lucene.util.Version;
-
/** JUnit adaptation of an older test case DocTest. */
public class TestDoc extends LuceneTestCase {
- private Path workDir;
- private Path indexDir;
- private LinkedList<Path> files;
-
- /** Set the test case. This test case needs
- * a few text files created in the current working directory.
- */
- @Override
- public void setUp() throws Exception {
- super.setUp();
- if (VERBOSE) {
- System.out.println("TEST: setUp");
- }
- workDir = createTempDir("TestDoc");
- indexDir = createTempDir("testIndex");
+ private Path workDir;
+ private Path indexDir;
+ private LinkedList<Path> files;
+
+ /** Set the test case. This test case needs
+ * a few text files created in the current working directory.
+ */
+ @Override
+ public void setUp() throws Exception {
+ super.setUp();
+ if (VERBOSE) {
+ System.out.println("TEST: setUp");
+ }
+ workDir = createTempDir("TestDoc");
+ indexDir = createTempDir("testIndex");
- Directory directory = newFSDirectory(indexDir);
- directory.close();
+ Directory directory = newFSDirectory(indexDir);
+ directory.close();
- files = new LinkedList<>();
- files.add(createOutput("test.txt",
- "This is the first test file"
- ));
-
- files.add(createOutput("test2.txt",
- "This is the second test file"
- ));
- }
-
- private Path createOutput(String name, String text) throws IOException {
- Writer fw = null;
- PrintWriter pw = null;
-
- try {
- Path path = workDir.resolve(name);
- Files.deleteIfExists(path);
-
- fw = new OutputStreamWriter(Files.newOutputStream(path), StandardCharsets.UTF_8);
- pw = new PrintWriter(fw);
- pw.println(text);
- return path;
-
- } finally {
- if (pw != null) pw.close();
- if (fw != null) fw.close();
- }
+ files = new LinkedList<>();
+ files.add(createOutput("test.txt",
+ "This is the first test file"
+ ));
+
+ files.add(createOutput("test2.txt",
+ "This is the second test file"
+ ));
+ }
+
+ private Path createOutput(String name, String text) throws IOException {
+ Writer fw = null;
+ PrintWriter pw = null;
+
+ try {
+ Path path = workDir.resolve(name);
+ Files.deleteIfExists(path);
+
+ fw = new OutputStreamWriter(Files.newOutputStream(path), StandardCharsets.UTF_8);
+ pw = new PrintWriter(fw);
+ pw.println(text);
+ return path;
+
+ } finally {
+ if (pw != null) pw.close();
+ if (fw != null) fw.close();
}
+ }
- /** This test executes a number of merges and compares the contents of
- * the segments created when using compound file or not using one.
- *
- * TODO: the original test used to print the segment contents to System.out
- * for visual validation. To have the same effect, a new method
- * checkSegment(String name, ...) should be created that would
- * assert various things about the segment.
- */
- public void testIndexAndMerge() throws Exception {
- StringWriter sw = new StringWriter();
- PrintWriter out = new PrintWriter(sw, true);
+ /** This test executes a number of merges and compares the contents of
+ * the segments created when using compound file or not using one.
+ *
+ * TODO: the original test used to print the segment contents to System.out
+ * for visual validation. To have the same effect, a new method
+ * checkSegment(String name, ...) should be created that would
+ * assert various things about the segment.
+ */
+ public void testIndexAndMerge() throws Exception {
+ StringWriter sw = new StringWriter();
+ PrintWriter out = new PrintWriter(sw, true);
- Directory directory = newFSDirectory(indexDir);
+ Directory directory = newFSDirectory(indexDir);
- if (directory instanceof MockDirectoryWrapper) {
- // We create unreferenced files (we don't even write
- // a segments file):
- ((MockDirectoryWrapper) directory).setAssertNoUnrefencedFilesOnClose(false);
- // this test itself deletes files (has no retry mechanism)
- ((MockDirectoryWrapper) directory).setEnableVirusScanner(false);
- }
+ if (directory instanceof MockDirectoryWrapper) {
+ // We create unreferenced files (we don't even write
+ // a segments file):
+ ((MockDirectoryWrapper) directory).setAssertNoUnrefencedFilesOnClose(false);
+ // this test itself deletes files (has no retry mechanism)
+ ((MockDirectoryWrapper) directory).setEnableVirusScanner(false);
+ }
- IndexWriter writer = new IndexWriter(
- directory,
- newIndexWriterConfig(new MockAnalyzer(random())).
- setOpenMode(OpenMode.CREATE).
- setMaxBufferedDocs(-1).
- setMergePolicy(newLogMergePolicy(10))
- );
+ IndexWriter writer = new IndexWriter(
+ directory,
+ newIndexWriterConfig(new MockAnalyzer(random())).
+ setOpenMode(OpenMode.CREATE).
+ setMaxBufferedDocs(-1).
+ setMergePolicy(newLogMergePolicy(10))
+ );
FieldTypes fieldTypes = writer.getFieldTypes();
fieldTypes.disableExistsFilters();
- SegmentCommitInfo si1 = indexDoc(writer, "test.txt");
- printSegment(out, si1);
+ SegmentCommitInfo si1 = indexDoc(writer, "test.txt");
+ printSegment(out, si1);
- SegmentCommitInfo si2 = indexDoc(writer, "test2.txt");
- printSegment(out, si2);
- writer.close();
+ SegmentCommitInfo si2 = indexDoc(writer, "test2.txt");
+ printSegment(out, si2);
+ writer.close();
- SegmentCommitInfo siMerge = merge(directory, si1, si2, "_merge", false);
- printSegment(out, siMerge);
+ SegmentCommitInfo siMerge = merge(directory, si1, si2, "_merge", false);
+ printSegment(out, siMerge);
- SegmentCommitInfo siMerge2 = merge(directory, si1, si2, "_merge2", false);
- printSegment(out, siMerge2);
+ SegmentCommitInfo siMerge2 = merge(directory, si1, si2, "_merge2", false);
+ printSegment(out, siMerge2);
- SegmentCommitInfo siMerge3 = merge(directory, siMerge, siMerge2, "_merge3", false);
- printSegment(out, siMerge3);
+ SegmentCommitInfo siMerge3 = merge(directory, siMerge, siMerge2, "_merge3", false);
+ printSegment(out, siMerge3);
- directory.close();
- out.close();
- sw.close();
-
- String multiFileOutput = sw.toString();
- //System.out.println(multiFileOutput);
-
- sw = new StringWriter();
- out = new PrintWriter(sw, true);
-
- directory = newFSDirectory(indexDir);
-
- if (directory instanceof MockDirectoryWrapper) {
- // We create unreferenced files (we don't even write
- // a segments file):
- ((MockDirectoryWrapper) directory).setAssertNoUnrefencedFilesOnClose(false);
- // this test itself deletes files (has no retry mechanism)
- ((MockDirectoryWrapper) directory).setEnableVirusScanner(false);
- }
+ directory.close();
+ out.close();
+ sw.close();
+
+ String multiFileOutput = sw.toString();
+ //System.out.println(multiFileOutput);
+
+ sw = new StringWriter();
+ out = new PrintWriter(sw, true);
+
+ directory = newFSDirectory(indexDir);
+
+ if (directory instanceof MockDirectoryWrapper) {
+ // We create unreferenced files (we don't even write
+ // a segments file):
+ ((MockDirectoryWrapper) directory).setAssertNoUnrefencedFilesOnClose(false);
+ // this test itself deletes files (has no retry mechanism)
+ ((MockDirectoryWrapper) directory).setEnableVirusScanner(false);
+ }
- writer = new IndexWriter(
- directory,
- newIndexWriterConfig(new MockAnalyzer(random())).
- setOpenMode(OpenMode.CREATE).
- setMaxBufferedDocs(-1).
- setMergePolicy(newLogMergePolicy(10))
- );
+ writer = new IndexWriter(
+ directory,
+ newIndexWriterConfig(new MockAnalyzer(random())).
+ setOpenMode(OpenMode.CREATE).
+ setMaxBufferedDocs(-1).
+ setMergePolicy(newLogMergePolicy(10))
+ );
+
+ fieldTypes = writer.getFieldTypes();
+ fieldTypes.disableExistsFilters();
+
+ si1 = indexDoc(writer, "test.txt");
+ printSegment(out, si1);
+
+ si2 = indexDoc(writer, "test2.txt");
+ printSegment(out, si2);
+ writer.close();
- fieldTypes = writer.getFieldTypes();
- fieldTypes.disableExistsFilters();
+ siMerge = merge(directory, si1, si2, "_merge", true);
+ printSegment(out, siMerge);
- si1 = indexDoc(writer, "test.txt");
- printSegment(out, si1);
+ siMerge2 = merge(directory, si1, si2, "_merge2", true);
+ printSegment(out, siMerge2);
- si2 = indexDoc(writer, "test2.txt");
- printSegment(out, si2);
- writer.close();
+ siMerge3 = merge(directory, siMerge, siMerge2, "_merge3", true);
+ printSegment(out, siMerge3);
+
+ directory.close();
+ out.close();
+ sw.close();
+ String singleFileOutput = sw.toString();
+
+ assertEquals(multiFileOutput, singleFileOutput);
+ }
+
+ private SegmentCommitInfo indexDoc(IndexWriter writer, String fileName)
+ throws Exception {
+ Path path = workDir.resolve(fileName);
+ Document doc = writer.newDocument();
+ InputStreamReader is = new InputStreamReader(Files.newInputStream(path), StandardCharsets.UTF_8);
+ doc.addLargeText("contents", is);
+ writer.addDocument(doc);
+ writer.commit();
+ is.close();
+ return writer.newestSegment();
+ }
+
+ private SegmentCommitInfo merge(Directory dir, SegmentCommitInfo si1, SegmentCommitInfo si2, String merged, boolean useCompoundFile)
+ throws Exception {
+ FieldTypes fieldTypes = FieldTypes.getFieldTypes(dir, null);
+ IOContext context = newIOContext(random(), new IOContext(new MergeInfo(-1, -1, false, -1)));
+ SegmentReader r1 = new SegmentReader(fieldTypes, si1, context);
+ SegmentReader r2 = new SegmentReader(fieldTypes, si2, context);
+
+ final Codec codec = Codec.getDefault();
+ TrackingDirectoryWrapper trackingDir = new TrackingDirectoryWrapper(si1.info.dir);
+ final SegmentInfo si = new SegmentInfo(si1.info.dir, Version.LATEST, merged, -1, false, codec, null, StringHelper.randomId(), new HashMap<>());
+
+ SegmentMerger merger = new SegmentMerger(fieldTypes, Arrays.<CodecReader>asList(r1, r2),
+ si, InfoStream.getDefault(), trackingDir,
+ new FieldInfos.FieldNumbers(), context);
+
+ MergeState mergeState = merger.merge();
+ r1.close();
+ r2.close();;
+ si.setFiles(new HashSet<>(trackingDir.getCreatedFiles()));
+
+ if (useCompoundFile) {
+ Collection<String> filesToDelete = si.files();
+ IndexWriter.createCompoundFile(InfoStream.getDefault(), new TrackingDirectoryWrapper(dir), si, newIOContext(random()));
+ si.setUseCompoundFile(true);
+ for (final String fileToDelete : filesToDelete) {
+ si1.info.dir.deleteFile(fileToDelete);
+ }
+ }
- siMerge = merge(directory, si1, si2, "_merge", true);
- printSegment(out, siMerge);
+ return new SegmentCommitInfo(si, 0, -1L, -1L, -1L);
+ }
- siMerge2 = merge(directory, si1, si2, "_merge2", true);
- printSegment(out, siMerge2);
- siMerge3 = merge(directory, siMerge, siMerge2, "_merge3", true);
- printSegment(out, siMerge3);
-
- directory.close();
- out.close();
- sw.close();
- String singleFileOutput = sw.toString();
-
- assertEquals(multiFileOutput, singleFileOutput);
- }
-
- private SegmentCommitInfo indexDoc(IndexWriter writer, String fileName)
- throws Exception {
- Path path = workDir.resolve(fileName);
- Document doc = writer.newDocument();
- InputStreamReader is = new InputStreamReader(Files.newInputStream(path), StandardCharsets.UTF_8);
- doc.addLargeText("contents", is);
- writer.addDocument(doc);
- writer.commit();
- is.close();
- return writer.newestSegment();
- }
-
- private SegmentCommitInfo merge(Directory dir, SegmentCommitInfo si1, SegmentCommitInfo si2, String merged, boolean useCompoundFile)
- throws Exception {
- FieldTypes fieldTypes = FieldTypes.getFieldTypes(dir, null);
- IOContext context = newIOContext(random());
- SegmentReader r1 = new SegmentReader(fieldTypes, si1, context);
- SegmentReader r2 = new SegmentReader(fieldTypes, si2, context);
-
- final Codec codec = Codec.getDefault();
- TrackingDirectoryWrapper trackingDir = new TrackingDirectoryWrapper(si1.info.dir);
- final SegmentInfo si = new SegmentInfo(si1.info.dir, Version.LATEST, merged, -1, false, codec, null, StringHelper.randomId(), new HashMap<>());
-
- SegmentMerger merger = new SegmentMerger(fieldTypes, Arrays.<LeafReader>asList(r1, r2),
- si, InfoStream.getDefault(), trackingDir,
- MergeState.CheckAbort.NONE, new FieldInfos.FieldNumbers(), context);
-
- MergeState mergeState = merger.merge();
- r1.close();
- r2.close();;
- si.setFiles(new HashSet<>(trackingDir.getCreatedFiles()));
-
- if (useCompoundFile) {
- Collection<String> filesToDelete = IndexWriter.createCompoundFile(InfoStream.getDefault(), dir, MergeState.CheckAbort.NONE, si, newIOContext(random()));
- si.setUseCompoundFile(true);
- for (final String fileToDelete : filesToDelete) {
- si1.info.dir.deleteFile(fileToDelete);
- }
- }
-
- return new SegmentCommitInfo(si, 0, -1L, -1L, -1L);
- }
-
-
- private void printSegment(PrintWriter out, SegmentCommitInfo si)
- throws Exception {
- SegmentReader reader = new SegmentReader(null, si, newIOContext(random()));
-
- for (int i = 0; i < reader.numDocs(); i++)
- out.println(reader.document(i));
-
- Fields fields = reader.fields();
- for (String field : fields) {
- Terms terms = fields.terms(field);
- assertNotNull(terms);
- TermsEnum tis = terms.iterator(null);
- while(tis.next() != null) {
-
- out.print(" term=" + field + ":" + tis.term());
- out.println(" DF=" + tis.docFreq());
-
- DocsAndPositionsEnum positions = tis.docsAndPositions(reader.getLiveDocs(), null);
- while (positions.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
- out.print(" doc=" + positions.docID());
- out.print(" TF=" + positions.freq());
- out.print(" pos=");
- out.print(positions.nextPosition());
- for (int j = 1; j < positions.freq(); j++)
- out.print("," + positions.nextPosition());
- out.println("");
- }
+ private void printSegment(PrintWriter out, SegmentCommitInfo si)
+ throws Exception {
+ SegmentReader reader = new SegmentReader(null, si, newIOContext(random()));
+
+ for (int i = 0; i < reader.numDocs(); i++)
+ out.println(reader.document(i));
+
+ Fields fields = reader.fields();
+ for (String field : fields) {
+ Terms terms = fields.terms(field);
+ assertNotNull(terms);
+ TermsEnum tis = terms.iterator(null);
+ while(tis.next() != null) {
+
+ out.print(" term=" + field + ":" + tis.term());
+ out.println(" DF=" + tis.docFreq());
+
+ DocsAndPositionsEnum positions = tis.docsAndPositions(reader.getLiveDocs(), null);
+ while (positions.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
+ out.print(" doc=" + positions.docID());
+ out.print(" TF=" + positions.freq());
+ out.print(" pos=");
+ out.print(positions.nextPosition());
+ for (int j = 1; j < positions.freq(); j++)
+ out.print("," + positions.nextPosition());
+ out.println("");
}
}
- reader.close();
}
+ reader.close();
+ }
}
Modified: lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestDocValuesIndexing.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestDocValuesIndexing.java?rev=1658277&r1=1658276&r2=1658277&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestDocValuesIndexing.java (original)
+++ lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestDocValuesIndexing.java Sun Feb 8 23:53:14 2015
@@ -29,6 +29,7 @@ import org.apache.lucene.store.Directory
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LuceneTestCase;
+import org.apache.lucene.util.TestUtil;
/**
*
@@ -62,7 +63,7 @@ public class TestDocValuesIndexing exten
Directory d3 = newDirectory();
w = new RandomIndexWriter(random(), d3);
- w.addIndexes(SlowCompositeReaderWrapper.wrap(r1), SlowCompositeReaderWrapper.wrap(r2));
+ w.addIndexes(SlowCodecReaderWrapper.wrap(SlowCompositeReaderWrapper.wrap(r1)), SlowCodecReaderWrapper.wrap(SlowCompositeReaderWrapper.wrap(r2)));
r1.close();
d1.close();
r2.close();
@@ -519,9 +520,9 @@ public class TestDocValuesIndexing exten
// expected
}
- IndexReader r = DirectoryReader.open(dir2);
+ DirectoryReader r = DirectoryReader.open(dir2);
try {
- w.addIndexes(new IndexReader[] {r});
+ TestUtil.addIndexesSlowly(w, r);
fail("didn't hit expected exception");
} catch (IllegalStateException iae) {
// expected
@@ -685,14 +686,14 @@ public class TestDocValuesIndexing exten
doc = writer.newDocument();
doc.addAtom("dv", new BytesRef("foo"));
writer.addDocument(doc);
- IndexReader[] readers = new IndexReader[] {DirectoryReader.open(dir)};
+ DirectoryReader reader = DirectoryReader.open(dir);
try {
- writer.addIndexes(readers);
+ TestUtil.addIndexesSlowly(writer, reader);
fail("did not hit exception");
} catch (IllegalStateException ise) {
// expected
}
- readers[0].close();
+ reader.close();
writer.close();
dir.close();
@@ -715,6 +716,33 @@ public class TestDocValuesIndexing exten
doc = writer.newDocument();
try {
doc.addAtom("dv", new BytesRef("foo"));
+ fail("did not hit exception");
+ } catch (IllegalStateException ise) {
+ // expected
+ }
+ writer.close();
+ dir2.close();
+ dir.close();
+ }
+
+ public void testTypeChangeViaAddIndexesIR2() throws Exception {
+ Directory dir = newDirectory();
+ IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
+ IndexWriter writer = new IndexWriter(dir, conf);
+ Document doc = writer.newDocument();
+ doc.addLong("dv", 0L);
+ writer.addDocument(doc);
+ writer.close();
+
+ Directory dir2 = newDirectory();
+ conf = newIndexWriterConfig(new MockAnalyzer(random()));
+ writer = new IndexWriter(dir2, conf);
+ DirectoryReader reader = DirectoryReader.open(dir);
+ TestUtil.addIndexesSlowly(writer, reader);
+ reader.close();
+ doc = writer.newDocument();
+ try {
+ doc.addAtom("dv", new BytesRef("foo"));
fail("did not hit exception");
} catch (IllegalStateException ise) {
// expected
Modified: lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestDocumentsWriterDeleteQueue.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestDocumentsWriterDeleteQueue.java?rev=1658277&r1=1658276&r2=1658277&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestDocumentsWriterDeleteQueue.java (original)
+++ lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestDocumentsWriterDeleteQueue.java Sun Feb 8 23:53:14 2015
@@ -16,6 +16,7 @@ package org.apache.lucene.index;
* License for the specific language governing permissions and limitations under
* the License.
*/
+
import java.lang.reflect.Field;
import java.util.HashSet;
import java.util.Set;
@@ -24,12 +25,14 @@ import java.util.concurrent.atomic.Atomi
import java.util.concurrent.locks.ReentrantLock;
import org.apache.lucene.index.DocumentsWriterDeleteQueue.DeleteSlice;
+import org.apache.lucene.index.PrefixCodedTerms.TermIterator;
import org.apache.lucene.search.TermQuery;
-import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.ThreadInterruptedException;
+
+
/**
* Unit test for {@link DocumentsWriterDeleteQueue}
*/
@@ -75,9 +78,18 @@ public class TestDocumentsWriterDeleteQu
assertEquals(uniqueValues, bd2.terms.keySet());
HashSet<Term> frozenSet = new HashSet<>();
BytesRefBuilder bytesRef = new BytesRefBuilder();
- for (Term t : queue.freezeGlobalBuffer(null).termsIterable()) {
- bytesRef.copyBytes(t.bytes);
- frozenSet.add(new Term(t.field, bytesRef.toBytesRef()));
+ TermIterator iter = queue.freezeGlobalBuffer(null).termIterator();
+ String field = null;
+ while (true) {
+ boolean newField = iter.next();
+ if (newField) {
+ field = iter.field;
+ if (field == null) {
+ break;
+ }
+ }
+ bytesRef.copyBytes(iter.bytes);
+ frozenSet.add(new Term(field, bytesRef.toBytesRef()));
}
assertEquals(uniqueValues, frozenSet);
assertEquals("num deletes must be 0 after freeze", 0, queue
@@ -204,10 +216,21 @@ public class TestDocumentsWriterDeleteQu
queue.tryApplyGlobalSlice();
Set<Term> frozenSet = new HashSet<>();
BytesRefBuilder builder = new BytesRefBuilder();
- for (Term t : queue.freezeGlobalBuffer(null).termsIterable()) {
- builder.copyBytes(t.bytes);
- frozenSet.add(new Term(t.field, builder.toBytesRef()));
+
+ TermIterator iter = queue.freezeGlobalBuffer(null).termIterator();
+ String field = null;
+ while (true) {
+ boolean newField = iter.next();
+ if (newField) {
+ field = iter.field;
+ if (field == null) {
+ break;
+ }
+ }
+ builder.copyBytes(iter.bytes);
+ frozenSet.add(new Term(field, builder.toBytesRef()));
}
+
assertEquals("num deletes must be 0 after freeze", 0, queue
.numGlobalTermDeletes());
assertEquals(uniqueValues.size(), frozenSet.size());
Modified: lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestDocumentsWriterStallControl.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestDocumentsWriterStallControl.java?rev=1658277&r1=1658276&r2=1658277&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestDocumentsWriterStallControl.java (original)
+++ lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestDocumentsWriterStallControl.java Sun Feb 8 23:53:14 2015
@@ -33,7 +33,7 @@ import org.apache.lucene.util.ThreadInte
public class TestDocumentsWriterStallControl extends LuceneTestCase {
public void testSimpleStall() throws InterruptedException {
- DocumentsWriterStallControl ctrl = new DocumentsWriterStallControl();
+ DocumentsWriterStallControl ctrl = new DocumentsWriterStallControl(newIndexWriterConfig());
ctrl.updateStalled(false);
Thread[] waitThreads = waitThreads(atLeast(1), ctrl);
@@ -55,7 +55,7 @@ public class TestDocumentsWriterStallCon
}
public void testRandom() throws InterruptedException {
- final DocumentsWriterStallControl ctrl = new DocumentsWriterStallControl();
+ final DocumentsWriterStallControl ctrl = new DocumentsWriterStallControl(newIndexWriterConfig());
ctrl.updateStalled(false);
Thread[] stallThreads = new Thread[atLeast(3)];
@@ -96,7 +96,7 @@ public class TestDocumentsWriterStallCon
}
public void testAccquireReleaseRace() throws InterruptedException {
- final DocumentsWriterStallControl ctrl = new DocumentsWriterStallControl();
+ final DocumentsWriterStallControl ctrl = new DocumentsWriterStallControl(newIndexWriterConfig());
ctrl.updateStalled(false);
final AtomicBoolean stop = new AtomicBoolean(false);
final AtomicBoolean checkPoint = new AtomicBoolean(true);
Modified: lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestDuelingCodecs.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestDuelingCodecs.java?rev=1658277&r1=1658276&r2=1658277&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestDuelingCodecs.java (original)
+++ lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestDuelingCodecs.java Sun Feb 8 23:53:14 2015
@@ -26,6 +26,7 @@ import org.apache.lucene.document.Docume
import org.apache.lucene.document.FieldTypes;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.LineFileDocs;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TestUtil;
@@ -41,7 +42,9 @@ public class TestDuelingCodecs extends L
private Directory rightDir;
private IndexReader rightReader;
private Codec rightCodec;
-
+ private RandomIndexWriter leftWriter;
+ private RandomIndexWriter rightWriter;
+ private long seed;
private String info; // for debugging
@Override
@@ -55,10 +58,10 @@ public class TestDuelingCodecs extends L
leftCodec = Codec.forName("SimpleText");
rightCodec = new RandomCodec(random());
- leftDir = newDirectory();
- rightDir = newDirectory();
+ leftDir = newFSDirectory(createTempDir("leftDir"));
+ rightDir = newFSDirectory(createTempDir("rightDir"));
- long seed = random().nextLong();
+ seed = random().nextLong();
// must use same seed because of random payloads, etc
int maxTermLength = TestUtil.nextInt(random(), 1, IndexWriter.MAX_TERM_LENGTH);
@@ -80,44 +83,23 @@ public class TestDuelingCodecs extends L
rightConfig.setMergePolicy(newLogMergePolicy());
// must use same seed because of random docvalues fields, etc
- RandomIndexWriter leftWriter = new RandomIndexWriter(new Random(seed), leftDir, leftConfig);
- RandomIndexWriter rightWriter = new RandomIndexWriter(new Random(seed), rightDir, rightConfig);
-
- int numdocs = atLeast(100);
- createRandomIndex(numdocs, leftWriter, seed);
- createRandomIndex(numdocs, rightWriter, seed);
+ leftWriter = new RandomIndexWriter(new Random(seed), leftDir, leftConfig);
+ rightWriter = new RandomIndexWriter(new Random(seed), rightDir, rightConfig);
- leftReader = maybeWrapReader(leftWriter.getReader());
- leftWriter.close();
- rightReader = maybeWrapReader(rightWriter.getReader());
- rightWriter.close();
-
- // check that our readers are valid
- TestUtil.checkReader(leftReader);
- TestUtil.checkReader(rightReader);
-
info = "left: " + leftCodec.toString() + " / right: " + rightCodec.toString();
}
@Override
public void tearDown() throws Exception {
- if (leftReader != null) {
- leftReader.close();
- }
- if (rightReader != null) {
- rightReader.close();
- }
-
- if (leftDir != null) {
- leftDir.close();
- }
- if (rightDir != null) {
- rightDir.close();
- }
-
+ IOUtils.close(leftWriter,
+ rightWriter,
+ leftReader,
+ rightReader,
+ leftDir,
+ rightDir);
super.tearDown();
}
-
+
/**
* populates a writer with random stuff. this must be fully reproducable with the seed!
*/
@@ -160,7 +142,28 @@ public class TestDuelingCodecs extends L
* checks the two indexes are equivalent
*/
public void testEquals() throws IOException {
+ int numdocs = TEST_NIGHTLY ? atLeast(2000) : atLeast(100);
+ createRandomIndex(numdocs, leftWriter, seed);
+ createRandomIndex(numdocs, rightWriter, seed);
+
+ leftReader = leftWriter.getReader();
+ rightReader = rightWriter.getReader();
+
assertReaderEquals(info, leftReader, rightReader);
}
+ public void testCrazyReaderEquals() throws IOException {
+ int numdocs = atLeast(100);
+ createRandomIndex(numdocs, leftWriter, seed);
+ createRandomIndex(numdocs, rightWriter, seed);
+
+ leftReader = wrapReader(leftWriter.getReader());
+ rightReader = wrapReader(rightWriter.getReader());
+
+ // check that our readers are valid
+ TestUtil.checkReader(leftReader);
+ TestUtil.checkReader(rightReader);
+
+ assertReaderEquals(info, leftReader, rightReader);
+ }
}
Modified: lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestFilterLeafReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestFilterLeafReader.java?rev=1658277&r1=1658276&r2=1658277&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestFilterLeafReader.java (original)
+++ lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestFilterLeafReader.java Sun Feb 8 23:53:14 2015
@@ -137,11 +137,11 @@ public class TestFilterLeafReader extend
((BaseDirectoryWrapper) target).setCrossCheckTermVectorsOnClose(false);
writer = new IndexWriter(target, newIndexWriterConfig(new MockAnalyzer(random())));
- IndexReader reader = new TestReader(DirectoryReader.open(directory));
- writer.addIndexes(reader);
+ try (LeafReader reader = new TestReader(DirectoryReader.open(directory))) {
+ writer.addIndexes(SlowCodecReaderWrapper.wrap(reader));
+ }
writer.close();
- reader.close();
- reader = DirectoryReader.open(target);
+ IndexReader reader = DirectoryReader.open(target);
TermsEnum terms = MultiFields.getTerms(reader, "default").iterator(null);
while (terms.next() != null) {
Modified: lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestIndexFileDeleter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestIndexFileDeleter.java?rev=1658277&r1=1658276&r2=1658277&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestIndexFileDeleter.java (original)
+++ lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestIndexFileDeleter.java Sun Feb 8 23:53:14 2015
@@ -22,6 +22,7 @@ import java.util.*;
import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.lucene.analysis.MockAnalyzer;
+import org.apache.lucene.codecs.simpletext.SimpleTextCodec;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.store.AlreadyClosedException;
@@ -114,7 +115,7 @@ public class TestIndexFileDeleter extend
// non-existent segment:
copyFile(dir, "_0_1" + ext, "_188_1" + ext);
- String cfsFiles0[] = si0.getCodec().compoundFormat().files(si0);
+ String cfsFiles0[] = si0.getCodec() instanceof SimpleTextCodec ? new String[] { "_0.scf" } : new String[] { "_0.cfs", "_0.cfe" };
// Create a bogus segment file:
copyFile(dir, cfsFiles0[0], "_188.cfs");
@@ -127,12 +128,12 @@ public class TestIndexFileDeleter extend
// TODO: assert is bogus (relies upon codec-specific filenames)
assertTrue(slowFileExists(dir, "_3.fdt") || slowFileExists(dir, "_3.fld"));
- String cfsFiles3[] = si3.getCodec().compoundFormat().files(si3);
+ String cfsFiles3[] = si3.getCodec() instanceof SimpleTextCodec ? new String[] { "_3.scf" } : new String[] { "_3.cfs", "_3.cfe" };
for (String f : cfsFiles3) {
assertTrue(!slowFileExists(dir, f));
}
- String cfsFiles1[] = si1.getCodec().compoundFormat().files(si1);
+ String cfsFiles1[] = si1.getCodec() instanceof SimpleTextCodec ? new String[] { "_1.scf" } : new String[] { "_1.cfs", "_1.cfe" };
copyFile(dir, cfsFiles1[0], "_3.cfs");
String[] filesPre = dir.listAll();
@@ -430,7 +431,7 @@ public class TestIndexFileDeleter extend
if (ms instanceof ConcurrentMergeScheduler) {
final ConcurrentMergeScheduler suppressFakeFail = new ConcurrentMergeScheduler() {
@Override
- protected void handleMergeException(Throwable exc) {
+ protected void handleMergeException(Directory dir, Throwable exc) {
// suppress only FakeIOException:
if (exc instanceof RuntimeException && exc.getMessage().equals("fake fail")) {
// ok to ignore
@@ -438,13 +439,12 @@ public class TestIndexFileDeleter extend
&& exc.getCause() != null && "fake fail".equals(exc.getCause().getMessage())) {
// also ok to ignore
} else {
- super.handleMergeException(exc);
+ super.handleMergeException(dir, exc);
}
}
};
final ConcurrentMergeScheduler cms = (ConcurrentMergeScheduler) ms;
suppressFakeFail.setMaxMergesAndThreads(cms.getMaxMergeCount(), cms.getMaxThreadCount());
- suppressFakeFail.setMergeThreadPriority(cms.getMergeThreadPriority());
iwc.setMergeScheduler(suppressFakeFail);
}
Modified: lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java?rev=1658277&r1=1658276&r2=1658277&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java (original)
+++ lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java Sun Feb 8 23:53:14 2015
@@ -767,8 +767,8 @@ public class TestIndexWriter extends Luc
writer2.commit();
writer2.close();
- IndexReader r1 = DirectoryReader.open(dir2);
- writer.addIndexes(r1, r1);
+ DirectoryReader r1 = DirectoryReader.open(dir2);
+ TestUtil.addIndexesSlowly(writer, r1, r1);
writer.close();
IndexReader r3 = DirectoryReader.open(dir);
@@ -2277,7 +2277,7 @@ public class TestIndexWriter extends Luc
iwc.setMergeScheduler(new ConcurrentMergeScheduler() {
@Override
- public void doMerge(MergePolicy.OneMerge merge) throws IOException {
+ public void doMerge(IndexWriter writer, MergePolicy.OneMerge merge) throws IOException {
mergeStarted.countDown();
try {
closeStarted.await();
@@ -2285,7 +2285,7 @@ public class TestIndexWriter extends Luc
Thread.currentThread().interrupt();
throw new RuntimeException(ie);
}
- super.doMerge(merge);
+ super.doMerge(writer, merge);
}
@Override
Modified: lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterDelete.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterDelete.java?rev=1658277&r1=1658276&r2=1658277&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterDelete.java (original)
+++ lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterDelete.java Sun Feb 8 23:53:14 2015
@@ -799,7 +799,7 @@ public class TestIndexWriterDelete exten
doc.addLargeText("city", text[i]);
modifier.addDocument(doc);
}
- // flush (and commit if ac)
+ // flush
if (VERBOSE) {
System.out.println("TEST: now full merge");
@@ -828,7 +828,7 @@ public class TestIndexWriterDelete exten
modifier.deleteDocuments(term);
- // add a doc (needed for the !ac case; see below)
+ // add a doc
// doc remains buffered
if (VERBOSE) {
@@ -1250,4 +1250,122 @@ public class TestIndexWriterDelete exten
r.close();
d.close();
}
+
+ public void testOnlyDeletesTriggersMergeOnClose() throws Exception {
+ Directory dir = newDirectory();
+ IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()));
+ iwc.setMaxBufferedDocs(2);
+ LogDocMergePolicy mp = new LogDocMergePolicy();
+ mp.setMinMergeDocs(1);
+ iwc.setMergePolicy(mp);
+ iwc.setMergeScheduler(new SerialMergeScheduler());
+ IndexWriter w = new IndexWriter(dir, iwc);
+ for(int i=0;i<38;i++) {
+ Document doc = w.newDocument();
+ doc.addAtom("id", ""+i);
+ w.addDocument(doc);
+ }
+ w.commit();
+
+ for(int i=0;i<18;i++) {
+ w.deleteDocuments(new Term("id", ""+i));
+ }
+
+ w.close();
+ DirectoryReader r = DirectoryReader.open(dir);
+ assertEquals(1, r.leaves().size());
+ r.close();
+
+ dir.close();
+ }
+
+ public void testOnlyDeletesTriggersMergeOnGetReader() throws Exception {
+ Directory dir = newDirectory();
+ IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()));
+ iwc.setMaxBufferedDocs(2);
+ LogDocMergePolicy mp = new LogDocMergePolicy();
+ mp.setMinMergeDocs(1);
+ iwc.setMergePolicy(mp);
+ iwc.setMergeScheduler(new SerialMergeScheduler());
+ IndexWriter w = new IndexWriter(dir, iwc);
+ for(int i=0;i<38;i++) {
+ Document doc = w.newDocument();
+ doc.addAtom("id", ""+i);
+ w.addDocument(doc);
+ }
+ w.commit();
+
+ for(int i=0;i<18;i++) {
+ w.deleteDocuments(new Term("id", ""+i));
+ }
+
+ // First one triggers, but does not reflect, the merge:
+ DirectoryReader.open(w, true).close();
+ IndexReader r =DirectoryReader.open(w, true);
+ assertEquals(1, r.leaves().size());
+ r.close();
+
+ w.close();
+ dir.close();
+ }
+
+ public void testOnlyDeletesTriggersMergeOnFlush() throws Exception {
+ Directory dir = newDirectory();
+ IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()));
+ iwc.setMaxBufferedDocs(2);
+ LogDocMergePolicy mp = new LogDocMergePolicy();
+ mp.setMinMergeDocs(1);
+ iwc.setMergePolicy(mp);
+ iwc.setMergeScheduler(new SerialMergeScheduler());
+ iwc.setMaxBufferedDeleteTerms(18);
+ IndexWriter w = new IndexWriter(dir, iwc);
+ for(int i=0;i<38;i++) {
+ Document doc = w.newDocument();
+ doc.addAtom("id", ""+i);
+ w.addDocument(doc);
+ }
+ w.commit();
+
+ for(int i=0;i<18;i++) {
+ w.deleteDocuments(new Term("id", ""+i));
+ }
+ w.commit();
+
+ DirectoryReader r = DirectoryReader.open(dir);
+ assertEquals(1, r.leaves().size());
+ r.close();
+
+ w.close();
+ dir.close();
+ }
+
+ public void testOnlyDeletesDeleteAllDocs() throws Exception {
+ Directory dir = newDirectory();
+ IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()));
+ iwc.setMaxBufferedDocs(2);
+ LogDocMergePolicy mp = new LogDocMergePolicy();
+ mp.setMinMergeDocs(1);
+ iwc.setMergePolicy(mp);
+ iwc.setMergeScheduler(new SerialMergeScheduler());
+ iwc.setMaxBufferedDeleteTerms(18);
+ IndexWriter w = new IndexWriter(dir, iwc);
+ for(int i=0;i<38;i++) {
+ Document doc = w.newDocument();
+ doc.addAtom("id", ""+i);
+ w.addDocument(doc);
+ }
+ w.commit();
+
+ for(int i=0;i<38;i++) {
+ w.deleteDocuments(new Term("id", ""+i));
+ }
+
+ DirectoryReader r = DirectoryReader.open(w, true);
+ assertEquals(0, r.leaves().size());
+ assertEquals(0, r.maxDoc());
+ r.close();
+
+ w.close();
+ dir.close();
+ }
}
Modified: lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java?rev=1658277&r1=1658276&r2=1658277&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java (original)
+++ lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java Sun Feb 8 23:53:14 2015
@@ -23,6 +23,7 @@ import java.io.StringReader;
import java.nio.file.NoSuchFileException;
import java.util.ArrayList;
import java.util.Arrays;
+import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Random;
@@ -48,15 +49,15 @@ import org.apache.lucene.store.Directory
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput;
-import org.apache.lucene.store.MockDirectoryWrapper.FakeIOException;
import org.apache.lucene.store.MockDirectoryWrapper;
+import org.apache.lucene.store.MockDirectoryWrapper.FakeIOException;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.InfoStream;
import org.apache.lucene.util.LuceneTestCase;
-import org.apache.lucene.util.TestUtil;
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
+import org.apache.lucene.util.TestUtil;
@SuppressCodecs("SimpleText") // too slow here
public class TestIndexWriterExceptions extends LuceneTestCase {
@@ -360,12 +361,19 @@ public class TestIndexWriterExceptions e
// LUCENE-1208
public void testExceptionJustBeforeFlush() throws IOException {
Directory dir = newDirectory();
+
+ final AtomicBoolean doCrash = new AtomicBoolean();
+
Analyzer analyzer = new Analyzer(Analyzer.PER_FIELD_REUSE_STRATEGY) {
@Override
public TokenStreamComponents createComponents(String fieldName) {
MockTokenizer tokenizer = new MockTokenizer(MockTokenizer.WHITESPACE, false);
tokenizer.setEnableChecks(false); // disable workflow checking as we forcefully close() in exceptional cases.
- return new TokenStreamComponents(tokenizer, new CrashingFilter(fieldName, tokenizer));
+ TokenStream stream = tokenizer;
+ if (doCrash.get()) {
+ stream = new CrashingFilter(fieldName, stream);
+ }
+ return new TokenStreamComponents(tokenizer, stream);
}
};
@@ -379,6 +387,7 @@ public class TestIndexWriterExceptions e
Document crashDoc = w.newDocument();
crashDoc.addLargeText("crash", "do it on token 4");
+ doCrash.set(true);
try {
w.addDocument(crashDoc);
fail("did not hit expected exception");
@@ -1184,7 +1193,7 @@ public class TestIndexWriterExceptions e
dir.close();
}
- // Simulate a corrupt index by removing one of the cfs
+ // Simulate a corrupt index by removing one of the
// files and make sure we get an IOException trying to
// open the index:
public void testSimulatedCorruptIndex2() throws IOException {
@@ -1222,8 +1231,9 @@ public class TestIndexWriterExceptions e
SegmentInfos sis = SegmentInfos.readLatestCommit(dir);
for (SegmentCommitInfo si : sis) {
assertTrue(si.info.getUseCompoundFile());
- String cfsFiles[] = si.info.getCodec().compoundFormat().files(si.info);
- dir.deleteFile(cfsFiles[0]);
+ List<String> victims = new ArrayList<String>(si.info.files());
+ Collections.shuffle(victims, random());
+ dir.deleteFile(victims.get(0));
corrupted = true;
break;
}
@@ -1859,16 +1869,15 @@ public class TestIndexWriterExceptions e
if (ms instanceof ConcurrentMergeScheduler) {
final ConcurrentMergeScheduler suppressFakeIOE = new ConcurrentMergeScheduler() {
@Override
- protected void handleMergeException(Throwable exc) {
+ protected void handleMergeException(Directory dir, Throwable exc) {
// suppress only FakeIOException:
if (!(exc instanceof FakeIOException)) {
- super.handleMergeException(exc);
+ super.handleMergeException(dir, exc);
}
}
};
final ConcurrentMergeScheduler cms = (ConcurrentMergeScheduler) ms;
suppressFakeIOE.setMaxMergesAndThreads(cms.getMaxMergeCount(), cms.getMaxThreadCount());
- suppressFakeIOE.setMergeThreadPriority(cms.getMergeThreadPriority());
iwc.setMergeScheduler(suppressFakeIOE);
}
Modified: lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterMaxDocs.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterMaxDocs.java?rev=1658277&r1=1658276&r2=1658277&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterMaxDocs.java (original)
+++ lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterMaxDocs.java Sun Feb 8 23:53:14 2015
@@ -30,7 +30,9 @@ import org.apache.lucene.search.TopDocs;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
import org.apache.lucene.util.LuceneTestCase;
+import org.apache.lucene.util.TestUtil;
import org.apache.lucene.util.TimeUnits;
+
import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite;
@SuppressCodecs({ "SimpleText", "Memory", "Direct" })
@@ -278,9 +280,9 @@ public class TestIndexWriterMaxDocs exte
// expected
}
assertEquals(1, w2.maxDoc());
- IndexReader ir = DirectoryReader.open(dir);
+ DirectoryReader ir = DirectoryReader.open(dir);
try {
- w2.addIndexes(new IndexReader[] {ir});
+ TestUtil.addIndexesSlowly(w2, ir);
fail("didn't hit exception");
} catch (IllegalStateException ise) {
// expected
Modified: lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterOnDiskFull.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterOnDiskFull.java?rev=1658277&r1=1658276&r2=1658277&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterOnDiskFull.java (original)
+++ lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterOnDiskFull.java Sun Feb 8 23:53:14 2015
@@ -319,12 +319,12 @@ public class TestIndexWriterOnDiskFull e
}
writer.forceMerge(1);
} else if (1 == method) {
- IndexReader readers[] = new IndexReader[dirs.length];
+ DirectoryReader readers[] = new DirectoryReader[dirs.length];
for(int i=0;i<dirs.length;i++) {
readers[i] = DirectoryReader.open(dirs[i]);
}
try {
- writer.addIndexes(readers);
+ TestUtil.addIndexesSlowly(writer, readers);
} finally {
for(int i=0;i<dirs.length;i++) {
readers[i].close();
Modified: lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterOutOfFileDescriptors.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterOutOfFileDescriptors.java?rev=1658277&r1=1658276&r2=1658277&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterOutOfFileDescriptors.java (original)
+++ lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterOutOfFileDescriptors.java Sun Feb 8 23:53:14 2015
@@ -39,7 +39,7 @@ public class TestIndexWriterOutOfFileDes
dir.setRandomIOExceptionRateOnOpen(rate);
int iters = atLeast(20);
LineFileDocs docs = null;
- IndexReader r = null;
+ DirectoryReader r = null;
DirectoryReader r2 = null;
boolean any = false;
MockDirectoryWrapper dirCopy = null;
@@ -69,9 +69,9 @@ public class TestIndexWriterOutOfFileDes
if (r != null && random().nextInt(5) == 3) {
if (random().nextBoolean()) {
if (VERBOSE) {
- System.out.println("TEST: addIndexes IR[]");
+ System.out.println("TEST: addIndexes LR[]");
}
- w.addIndexes(new IndexReader[] {r});
+ TestUtil.addIndexesSlowly(w, r);
} else {
if (VERBOSE) {
System.out.println("TEST: addIndexes Directory[]");
Modified: lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterReader.java?rev=1658277&r1=1658276&r2=1658277&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterReader.java (original)
+++ lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterReader.java Sun Feb 8 23:53:14 2015
@@ -398,7 +398,7 @@ public class TestIndexWriterReader exten
final Thread[] threads = new Thread[numThreads];
IndexWriter mainWriter;
final List<Throwable> failures = new ArrayList<>();
- IndexReader[] readers;
+ DirectoryReader[] readers;
boolean didClose = false;
AtomicInteger count = new AtomicInteger(0);
AtomicInteger numaddIndexes = new AtomicInteger(0);
@@ -416,7 +416,7 @@ public class TestIndexWriterReader exten
writer.close();
- readers = new IndexReader[numDirs];
+ readers = new DirectoryReader[numDirs];
for (int i = 0; i < numDirs; i++) {
readers[i] = DirectoryReader.open(addDir);
}
@@ -499,7 +499,7 @@ public class TestIndexWriterReader exten
numaddIndexes.incrementAndGet();
break;
case 2:
- mainWriter.addIndexes(readers);
+ TestUtil.addIndexesSlowly(mainWriter, readers);
break;
case 3:
mainWriter.commit();
Modified: lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterWithThreads.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterWithThreads.java?rev=1658277&r1=1658276&r2=1658277&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterWithThreads.java (original)
+++ lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterWithThreads.java Sun Feb 8 23:53:14 2015
@@ -630,9 +630,7 @@ public class TestIndexWriterWithThreads
writerRef.get().prepareCommit();
}
writerRef.get().commit();
- } catch (AlreadyClosedException ace) {
- // ok
- } catch (NullPointerException npe) {
+ } catch (AlreadyClosedException | NullPointerException ace) {
// ok
} finally {
commitLock.unlock();
@@ -644,11 +642,7 @@ public class TestIndexWriterWithThreads
}
try {
writerRef.get().addDocument(docs.nextDoc());
- } catch (AlreadyClosedException ace) {
- // ok
- } catch (NullPointerException npe) {
- // ok
- } catch (AssertionError ae) {
+ } catch (AlreadyClosedException | NullPointerException | AssertionError ace) {
// ok
}
break;