You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by mi...@apache.org on 2011/01/05 18:34:00 UTC
svn commit: r1055547 [2/3] - in /lucene/dev/branches/branch_3x: ./ lucene/
lucene/backwards/src/test/org/apache/lucene/index/
lucene/contrib/ant/src/java/org/apache/lucene/ant/
lucene/contrib/wordnet/src/java/org/apache/lucene/wordnet/
lucene/src/java/...
Modified: lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/InvertedDocEndConsumer.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/InvertedDocEndConsumer.java?rev=1055547&r1=1055546&r2=1055547&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/InvertedDocEndConsumer.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/InvertedDocEndConsumer.java Wed Jan 5 17:33:58 2011
@@ -24,7 +24,6 @@ import java.io.IOException;
abstract class InvertedDocEndConsumer {
abstract InvertedDocEndConsumerPerThread addThread(DocInverterPerThread docInverterPerThread);
abstract void flush(Map<InvertedDocEndConsumerPerThread,Collection<InvertedDocEndConsumerPerField>> threadsAndFields, SegmentWriteState state) throws IOException;
- abstract void closeDocStore(SegmentWriteState state) throws IOException;
abstract void abort();
abstract void setFieldInfos(FieldInfos fieldInfos);
}
Modified: lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/LogMergePolicy.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/LogMergePolicy.java?rev=1055547&r1=1055546&r2=1055547&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/LogMergePolicy.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/LogMergePolicy.java Wed Jan 5 17:33:58 2011
@@ -70,7 +70,6 @@ public abstract class LogMergePolicy ext
protected boolean calibrateSizeByDeletes = true;
protected boolean useCompoundFile = true;
- protected boolean useCompoundDocStore = true;
public LogMergePolicy() {
super();
@@ -157,27 +156,6 @@ public abstract class LogMergePolicy ext
return useCompoundFile;
}
- // Javadoc inherited
- @Override
- public boolean useCompoundDocStore(SegmentInfos infos) {
- return useCompoundDocStore;
- }
-
- /** Sets whether compound file format should be used for
- * newly flushed and newly merged doc store
- * segment files (term vectors and stored fields). */
- public void setUseCompoundDocStore(boolean useCompoundDocStore) {
- this.useCompoundDocStore = useCompoundDocStore;
- }
-
- /** Returns true if newly flushed and newly merge doc
- * store segment files (term vectors and stored fields)
- * are written in compound file format. @see
- * #setUseCompoundDocStore */
- public boolean getUseCompoundDocStore() {
- return useCompoundDocStore;
- }
-
/** Sets whether the segment size should be calibrated by
* the number of deletes when choosing segments for merge. */
public void setCalibrateSizeByDeletes(boolean calibrateSizeByDeletes) {
@@ -594,8 +572,7 @@ public abstract class LogMergePolicy ext
sb.append("maxMergeSize=").append(maxMergeSize).append(", ");
sb.append("calibrateSizeByDeletes=").append(calibrateSizeByDeletes).append(", ");
sb.append("maxMergeDocs=").append(maxMergeDocs).append(", ");
- sb.append("useCompoundFile=").append(useCompoundFile).append(", ");
- sb.append("useCompoundDocStore=").append(useCompoundDocStore);
+ sb.append("useCompoundFile=").append(useCompoundFile);
sb.append("]");
return sb.toString();
}
Modified: lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/MergePolicy.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/MergePolicy.java?rev=1055547&r1=1055546&r2=1055547&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/MergePolicy.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/MergePolicy.java Wed Jan 5 17:33:58 2011
@@ -67,7 +67,6 @@ public abstract class MergePolicy implem
public static class OneMerge {
SegmentInfo info; // used by IndexWriter
- boolean mergeDocStores; // used by IndexWriter
boolean optimize; // used by IndexWriter
boolean registerDone; // used by IndexWriter
long mergeGen; // used by IndexWriter
@@ -153,9 +152,6 @@ public abstract class MergePolicy implem
b.append(" into ").append(info.name);
if (optimize)
b.append(" [optimize]");
- if (mergeDocStores) {
- b.append(" [mergeDocStores]");
- }
if (aborted) {
b.append(" [ABORTED]");
}
@@ -316,10 +312,4 @@ public abstract class MergePolicy implem
/** Returns true if a new segment (regardless of its origin) should use the compound file format. */
public abstract boolean useCompoundFile(SegmentInfos segments, SegmentInfo newSegment) throws IOException;
-
- /**
- * Returns true if the doc store files should use the
- * compound file format.
- */
- public abstract boolean useCompoundDocStore(SegmentInfos segments);
}
Modified: lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/NoMergePolicy.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/NoMergePolicy.java?rev=1055547&r1=1055546&r2=1055547&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/NoMergePolicy.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/NoMergePolicy.java Wed Jan 5 17:33:58 2011
@@ -67,9 +67,6 @@ public final class NoMergePolicy extends
throws CorruptIndexException, IOException { return null; }
@Override
- public boolean useCompoundDocStore(SegmentInfos segments) { return useCompoundFile; }
-
- @Override
public boolean useCompoundFile(SegmentInfos segments, SegmentInfo newSegment) { return useCompoundFile; }
@Override
Modified: lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/NormsWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/NormsWriter.java?rev=1055547&r1=1055546&r2=1055547&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/NormsWriter.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/NormsWriter.java Wed Jan 5 17:33:58 2011
@@ -171,7 +171,4 @@ final class NormsWriter extends Inverted
normsOut.close();
}
}
-
- @Override
- void closeDocStore(SegmentWriteState state) {}
}
Modified: lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/SegmentInfo.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/SegmentInfo.java?rev=1055547&r1=1055546&r2=1055547&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/SegmentInfo.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/SegmentInfo.java Wed Jan 5 17:33:58 2011
@@ -93,8 +93,7 @@ public final class SegmentInfo {
private Map<String,String> diagnostics;
public SegmentInfo(String name, int docCount, Directory dir, boolean isCompoundFile, boolean hasSingleNormFile,
- int docStoreOffset, String docStoreSegment, boolean docStoreIsCompoundFile, boolean hasProx,
- boolean hasVectors) {
+ boolean hasProx, boolean hasVectors) {
this.name = name;
this.docCount = docCount;
this.dir = dir;
@@ -102,13 +101,10 @@ public final class SegmentInfo {
this.isCompoundFile = (byte) (isCompoundFile ? YES : NO);
preLockless = false;
this.hasSingleNormFile = hasSingleNormFile;
- this.docStoreOffset = docStoreOffset;
- this.docStoreSegment = docStoreSegment;
- this.docStoreIsCompoundFile = docStoreIsCompoundFile;
+ this.docStoreOffset = -1;
delCount = 0;
this.hasProx = hasProx;
this.hasVectors = hasVectors;
- assert docStoreOffset == -1 || docStoreSegment != null: "dso=" + docStoreOffset + " dss=" + docStoreSegment + " docCount=" + docCount;
}
/**
@@ -342,8 +338,10 @@ public final class SegmentInfo {
@Override
public Object clone() {
SegmentInfo si = new SegmentInfo(name, docCount, dir, false, hasSingleNormFile,
- docStoreOffset, docStoreSegment, docStoreIsCompoundFile,
hasProx, hasVectors);
+ si.docStoreOffset = docStoreOffset;
+ si.docStoreSegment = docStoreSegment;
+ si.docStoreIsCompoundFile = docStoreIsCompoundFile;
si.delGen = delGen;
si.delCount = delCount;
si.preLockless = preLockless;
Modified: lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/SegmentMerger.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/SegmentMerger.java?rev=1055547&r1=1055546&r2=1055547&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/SegmentMerger.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/SegmentMerger.java Wed Jan 5 17:33:58 2011
@@ -58,16 +58,12 @@ final class SegmentMerger {
private final CheckAbort checkAbort;
- // Whether we should merge doc stores (stored fields and
- // vectors files). When all segments we are merging
- // already share the same doc store files, we don't need
- // to merge the doc stores.
- private boolean mergeDocStores;
-
/** Maximum number of contiguous documents to bulk-copy
when merging stored fields */
private final static int MAX_RAW_MERGE_DOCS = 4192;
+ private SegmentWriteState segmentWriteState;
+
private final PayloadProcessorProvider payloadProcessorProvider;
SegmentMerger(Directory dir, int termIndexInterval, String name, MergePolicy.OneMerge merge, PayloadProcessorProvider payloadProcessorProvider, FieldInfos fieldInfos) {
@@ -107,23 +103,6 @@ final class SegmentMerger {
* @throws IOException if there is a low-level IO error
*/
final int merge() throws CorruptIndexException, IOException {
- return merge(true);
- }
-
- /**
- * Merges the readers specified by the {@link #add} method
- * into the directory passed to the constructor.
- * @param mergeDocStores if false, we will not merge the
- * stored fields nor vectors files
- * payloads before they are written
- * @return The number of documents that were merged
- * @throws CorruptIndexException if the index is corrupt
- * @throws IOException if there is a low-level IO error
- */
- final int merge(boolean mergeDocStores) throws CorruptIndexException, IOException {
-
- this.mergeDocStores = mergeDocStores;
-
// NOTE: it's important to add calls to
// checkAbort.work(...) if you make any changes to this
// method that will spend alot of time. The frequency
@@ -135,9 +114,8 @@ final class SegmentMerger {
mergeTerms();
mergeNorms();
- if (mergeDocStores && fieldInfos.hasVectors()) {
+ if (fieldInfos.hasVectors())
mergeVectors();
- }
return mergedDocs;
}
@@ -150,9 +128,7 @@ final class SegmentMerger {
if (ext.equals(IndexFileNames.PROX_EXTENSION) && !fieldInfos.hasProx())
continue;
- if (mergeDocStores || (!ext.equals(IndexFileNames.FIELDS_EXTENSION) &&
- !ext.equals(IndexFileNames.FIELDS_INDEX_EXTENSION)))
- fileSet.add(IndexFileNames.segmentFileName(segment, ext));
+ fileSet.add(IndexFileNames.segmentFileName(segment, ext));
}
// Fieldable norm files
@@ -166,7 +142,7 @@ final class SegmentMerger {
}
// Vector files
- if (fieldInfos.hasVectors() && mergeDocStores) {
+ if (fieldInfos.hasVectors()) {
for (String ext : IndexFileNames.VECTOR_EXTENSIONS) {
fileSet.add(IndexFileNames.segmentFileName(segment, ext));
}
@@ -278,51 +254,43 @@ final class SegmentMerger {
setMatchingSegmentReaders();
- if (mergeDocStores) {
- // merge field values
- final FieldsWriter fieldsWriter = new FieldsWriter(directory, segment, fieldInfos);
-
- try {
- int idx = 0;
- for (IndexReader reader : readers) {
- final SegmentReader matchingSegmentReader = matchingSegmentReaders[idx++];
- FieldsReader matchingFieldsReader = null;
- if (matchingSegmentReader != null) {
- final FieldsReader fieldsReader = matchingSegmentReader.getFieldsReader();
- if (fieldsReader != null && fieldsReader.canReadRawDocs()) {
- matchingFieldsReader = fieldsReader;
- }
+ final FieldsWriter fieldsWriter = new FieldsWriter(directory, segment, fieldInfos);
+
+ try {
+ int idx = 0;
+ for (IndexReader reader : readers) {
+ final SegmentReader matchingSegmentReader = matchingSegmentReaders[idx++];
+ FieldsReader matchingFieldsReader = null;
+ if (matchingSegmentReader != null) {
+ final FieldsReader fieldsReader = matchingSegmentReader.getFieldsReader();
+ if (fieldsReader != null && fieldsReader.canReadRawDocs()) {
+ matchingFieldsReader = fieldsReader;
}
- if (reader.hasDeletions()) {
- docCount += copyFieldsWithDeletions(fieldsWriter,
- reader, matchingFieldsReader);
- } else {
- docCount += copyFieldsNoDeletions(fieldsWriter,
+ }
+ if (reader.hasDeletions()) {
+ docCount += copyFieldsWithDeletions(fieldsWriter,
reader, matchingFieldsReader);
- }
+ } else {
+ docCount += copyFieldsNoDeletions(fieldsWriter,
+ reader, matchingFieldsReader);
}
- } finally {
- fieldsWriter.close();
}
+ } finally {
+ fieldsWriter.close();
+ }
- final String fileName = IndexFileNames.segmentFileName(segment, IndexFileNames.FIELDS_INDEX_EXTENSION);
- final long fdxFileLength = directory.fileLength(fileName);
+ final String fileName = IndexFileNames.segmentFileName(segment, IndexFileNames.FIELDS_INDEX_EXTENSION);
+ final long fdxFileLength = directory.fileLength(fileName);
- if (4+((long) docCount)*8 != fdxFileLength)
- // This is most likely a bug in Sun JRE 1.6.0_04/_05;
- // we detect that the bug has struck, here, and
- // throw an exception to prevent the corruption from
- // entering the index. See LUCENE-1282 for
- // details.
- throw new RuntimeException("mergeFields produced an invalid result: docCount is " + docCount + " but fdx file size is " + fdxFileLength + " file=" + fileName + " file exists?=" + directory.fileExists(fileName) + "; now aborting this merge to prevent index corruption");
-
- } else
- // If we are skipping the doc stores, that means there
- // are no deletions in any of these segments, so we
- // just sum numDocs() of each segment to get total docCount
- for (final IndexReader reader : readers) {
- docCount += reader.numDocs();
- }
+ if (4+((long) docCount)*8 != fdxFileLength)
+ // This is most likely a bug in Sun JRE 1.6.0_04/_05;
+ // we detect that the bug has struck, here, and
+ // throw an exception to prevent the corruption from
+ // entering the index. See LUCENE-1282 for
+ // details.
+ throw new RuntimeException("mergeFields produced an invalid result: docCount is " + docCount + " but fdx file size is " + fdxFileLength + " file=" + fileName + " file exists?=" + directory.fileExists(fileName) + "; now aborting this merge to prevent index corruption");
+
+ segmentWriteState = new SegmentWriteState(null, directory, segment, fieldInfos, docCount, termIndexInterval);
return docCount;
}
@@ -521,9 +489,7 @@ final class SegmentMerger {
private final void mergeTerms() throws CorruptIndexException, IOException {
- SegmentWriteState state = new SegmentWriteState(null, directory, segment, null, mergedDocs, 0, termIndexInterval);
-
- final FormatPostingsFieldsConsumer fieldsConsumer = new FormatPostingsFieldsWriter(state, fieldInfos);
+ final FormatPostingsFieldsConsumer fieldsConsumer = new FormatPostingsFieldsWriter(segmentWriteState, fieldInfos);
try {
queue = new SegmentMergeQueue(readers.size());
Modified: lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/SegmentWriteState.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/SegmentWriteState.java?rev=1055547&r1=1055546&r2=1055547&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/SegmentWriteState.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/SegmentWriteState.java Wed Jan 5 17:33:58 2011
@@ -19,33 +19,49 @@ package org.apache.lucene.index;
import java.util.HashSet;
import java.util.Collection;
+import java.io.PrintStream;
import org.apache.lucene.store.Directory;
-class SegmentWriteState {
- DocumentsWriter docWriter;
- Directory directory;
- String segmentName;
- String docStoreSegmentName;
- int numDocs;
- int termIndexInterval;
- int numDocsInStore;
- Collection<String> flushedFiles;
+/**
+ * @lucene.experimental
+ */
+public class SegmentWriteState {
+ public final PrintStream infoStream;
+ public final Directory directory;
+ public final String segmentName;
+ public final FieldInfos fieldInfos;
+ public final int numDocs;
public boolean hasVectors;
+ public final Collection<String> flushedFiles;
- public SegmentWriteState(DocumentsWriter docWriter, Directory directory, String segmentName, String docStoreSegmentName, int numDocs,
- int numDocsInStore, int termIndexInterval) {
- this.docWriter = docWriter;
+ /** Expert: The fraction of terms in the "dictionary" which should be stored
+ * in RAM. Smaller values use more memory, but make searching slightly
+ * faster, while larger values use less memory and make searching slightly
+ * slower. Searching is typically not dominated by dictionary lookup, so
+ * tweaking this is rarely useful.*/
+ public final int termIndexInterval;
+
+ /** Expert: The fraction of TermDocs entries stored in skip tables,
+ * used to accelerate {@link DocsEnum#advance(int)}. Larger values result in
+ * smaller indexes, greater acceleration, but fewer accelerable cases, while
+ * smaller values result in bigger indexes, less acceleration and more
+ * accelerable cases. More detailed experiments would be useful here. */
+ public final int skipInterval = 16;
+
+ /** Expert: The maximum number of skip levels. Smaller values result in
+ * slightly smaller indexes, but slower skipping in big posting lists.
+ */
+ public final int maxSkipLevels = 10;
+
+ public SegmentWriteState(PrintStream infoStream, Directory directory, String segmentName, FieldInfos fieldInfos,
+ int numDocs, int termIndexInterval) {
+ this.infoStream = infoStream;
this.directory = directory;
this.segmentName = segmentName;
- this.docStoreSegmentName = docStoreSegmentName;
+ this.fieldInfos = fieldInfos;
this.numDocs = numDocs;
- this.numDocsInStore = numDocsInStore;
this.termIndexInterval = termIndexInterval;
flushedFiles = new HashSet<String>();
}
-
- public String segmentFileName(String ext) {
- return segmentName + "." + ext;
- }
}
Modified: lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/StoredFieldsWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/StoredFieldsWriter.java?rev=1055547&r1=1055546&r2=1055547&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/StoredFieldsWriter.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/StoredFieldsWriter.java Wed Jan 5 17:33:58 2011
@@ -29,7 +29,6 @@ final class StoredFieldsWriter {
final DocumentsWriter docWriter;
final FieldInfos fieldInfos;
int lastDocID;
- private String docStoreSegment;
PerDoc[] docFreeList = new PerDoc[1];
int freeCount;
@@ -44,60 +43,31 @@ final class StoredFieldsWriter {
}
synchronized public void flush(SegmentWriteState state) throws IOException {
-
- if (state.numDocsInStore > 0) {
- // It's possible that all documents seen in this segment
- // hit non-aborting exceptions, in which case we will
- // not have yet init'd the FieldsWriter:
- initFieldsWriter();
-
- // Fill fdx file to include any final docs that we
- // skipped because they hit non-aborting exceptions
- fill(state.numDocsInStore - docWriter.getDocStoreOffset());
- }
-
- if (fieldsWriter != null)
- fieldsWriter.flush();
- }
-
- private synchronized void initFieldsWriter() throws IOException {
- if (fieldsWriter == null) {
- docStoreSegment = docWriter.getDocStoreSegment();
- if (docStoreSegment != null) {
- fieldsWriter = new FieldsWriter(docWriter.directory,
- docStoreSegment,
- fieldInfos);
- docWriter.addOpenFile(IndexFileNames.segmentFileName(docStoreSegment, IndexFileNames.FIELDS_EXTENSION));
- docWriter.addOpenFile(IndexFileNames.segmentFileName(docStoreSegment, IndexFileNames.FIELDS_INDEX_EXTENSION));
- lastDocID = 0;
- }
- }
- }
-
- synchronized public void closeDocStore(SegmentWriteState state) throws IOException {
- final int inc = state.numDocsInStore - lastDocID;
- if (inc > 0) {
+ if (state.numDocs > lastDocID) {
initFieldsWriter();
- fill(state.numDocsInStore - docWriter.getDocStoreOffset());
+ fill(state.numDocs);
}
if (fieldsWriter != null) {
fieldsWriter.close();
fieldsWriter = null;
- assert docStoreSegment != null;
- assert state.docStoreSegmentName != null;
- assert docStoreSegment.equals(state.docStoreSegmentName): "fieldsWriter wrote to segment=" + docStoreSegment + " vs SegmentWriteState segment=" + state.docStoreSegmentName;
lastDocID = 0;
- String fieldsName = IndexFileNames.segmentFileName(state.docStoreSegmentName, IndexFileNames.FIELDS_EXTENSION);
- String fieldsIdxName = IndexFileNames.segmentFileName(state.docStoreSegmentName, IndexFileNames.FIELDS_INDEX_EXTENSION);
+
+ String fieldsName = IndexFileNames.segmentFileName(state.segmentName, IndexFileNames.FIELDS_EXTENSION);
+ String fieldsIdxName = IndexFileNames.segmentFileName(state.segmentName, IndexFileNames.FIELDS_INDEX_EXTENSION);
state.flushedFiles.add(fieldsName);
state.flushedFiles.add(fieldsIdxName);
- state.docWriter.removeOpenFile(fieldsName);
- state.docWriter.removeOpenFile(fieldsIdxName);
+ if (4 + ((long) state.numDocs) * 8 != state.directory.fileLength(fieldsIdxName)) {
+ throw new RuntimeException("after flush: fdx size mismatch: " + state.numDocs + " docs vs " + state.directory.fileLength(fieldsIdxName) + " length in bytes of " + fieldsIdxName + " file exists?=" + state.directory.fileExists(fieldsIdxName));
+ }
+ }
+ }
- if (4+((long) state.numDocsInStore)*8 != state.directory.fileLength(fieldsIdxName))
- throw new RuntimeException("after flush: fdx size mismatch: " + state.numDocsInStore + " docs vs " + state.directory.fileLength(fieldsIdxName) + " length in bytes of " + fieldsIdxName + " file exists?=" + state.directory.fileExists(fieldsIdxName));
+ private synchronized void initFieldsWriter() throws IOException {
+ if (fieldsWriter == null) {
+ fieldsWriter = new FieldsWriter(docWriter.directory, docWriter.getSegment(), fieldInfos);
+ lastDocID = 0;
}
}
@@ -114,16 +84,14 @@ final class StoredFieldsWriter {
docFreeList = new PerDoc[ArrayUtil.oversize(allocCount, RamUsageEstimator.NUM_BYTES_OBJECT_REF)];
}
return new PerDoc();
- } else
+ } else {
return docFreeList[--freeCount];
+ }
}
synchronized void abort() {
if (fieldsWriter != null) {
- try {
- fieldsWriter.close();
- } catch (Throwable t) {
- }
+ fieldsWriter.abort();
fieldsWriter = null;
lastDocID = 0;
}
@@ -131,12 +99,9 @@ final class StoredFieldsWriter {
/** Fills in any hole in the docIDs */
void fill(int docID) throws IOException {
- final int docStoreOffset = docWriter.getDocStoreOffset();
-
// We must "catch up" for all docs before us
// that had no stored fields:
- final int end = docID+docStoreOffset;
- while(lastDocID < end) {
+ while(lastDocID < docID) {
fieldsWriter.skipDocument();
lastDocID++;
}
@@ -156,10 +121,6 @@ final class StoredFieldsWriter {
assert docWriter.writer.testPoint("StoredFieldsWriter.finishDocument end");
}
- public boolean freeRAM() {
- return false;
- }
-
synchronized void free(PerDoc perDoc) {
assert freeCount < docFreeList.length;
assert 0 == perDoc.numStoredFields;
Modified: lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/TermVectorsTermsWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/TermVectorsTermsWriter.java?rev=1055547&r1=1055546&r2=1055547&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/TermVectorsTermsWriter.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/TermVectorsTermsWriter.java Wed Jan 5 17:33:58 2011
@@ -20,6 +20,7 @@ package org.apache.lucene.index;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.store.RAMOutputStream;
import org.apache.lucene.util.ArrayUtil;
+import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.RamUsageEstimator;
import java.io.IOException;
@@ -30,7 +31,6 @@ import java.util.Map;
final class TermVectorsTermsWriter extends TermsHashConsumer {
final DocumentsWriter docWriter;
- TermVectorsWriter termVectorsWriter;
PerDoc[] docFreeList = new PerDoc[1];
int freeCount;
IndexOutput tvx;
@@ -50,26 +50,29 @@ final class TermVectorsTermsWriter exten
@Override
synchronized void flush(Map<TermsHashConsumerPerThread,Collection<TermsHashConsumerPerField>> threadsAndFields, final SegmentWriteState state) throws IOException {
+ if (tvx != null) {
+ // At least one doc in this run had term vectors enabled
+ fill(state.numDocs);
+ tvx.close();
+ tvf.close();
+ tvd.close();
+ tvx = tvd = tvf = null;
+ assert state.segmentName != null;
+ String idxName = IndexFileNames.segmentFileName(state.segmentName, IndexFileNames.VECTORS_INDEX_EXTENSION);
+ String fldName = IndexFileNames.segmentFileName(state.segmentName, IndexFileNames.VECTORS_FIELDS_EXTENSION);
+ String docName = IndexFileNames.segmentFileName(state.segmentName, IndexFileNames.VECTORS_DOCUMENTS_EXTENSION);
- // NOTE: it's possible that all documents seen in this segment
- // hit non-aborting exceptions, in which case we will
- // not have yet init'd the TermVectorsWriter. This is
- // actually OK (unlike in the stored fields case)
- // because, although FieldInfos.hasVectors() will return
- // true, the TermVectorsReader gracefully handles
- // non-existence of the term vectors files.
- state.hasVectors = hasVectors;
+ if (4 + ((long) state.numDocs) * 16 != state.directory.fileLength(idxName)) {
+ throw new RuntimeException("after flush: tvx size mismatch: " + state.numDocs + " docs vs " + state.directory.fileLength(idxName) + " length in bytes of " + idxName + " file exists?=" + state.directory.fileExists(idxName));
+ }
- if (tvx != null) {
+ state.flushedFiles.add(idxName);
+ state.flushedFiles.add(fldName);
+ state.flushedFiles.add(docName);
- if (state.numDocsInStore > 0)
- // In case there are some final documents that we
- // didn't see (because they hit a non-aborting exception):
- fill(state.numDocsInStore - docWriter.getDocStoreOffset());
-
- tvx.flush();
- tvd.flush();
- tvf.flush();
+ lastDocID = 0;
+ state.hasVectors = hasVectors;
+ hasVectors = false;
}
for (Map.Entry<TermsHashConsumerPerThread,Collection<TermsHashConsumerPerField>> entry : threadsAndFields.entrySet()) {
@@ -84,37 +87,6 @@ final class TermVectorsTermsWriter exten
}
}
- @Override
- synchronized void closeDocStore(final SegmentWriteState state) throws IOException {
- if (tvx != null) {
- // At least one doc in this run had term vectors
- // enabled
- fill(state.numDocsInStore - docWriter.getDocStoreOffset());
- tvx.close();
- tvf.close();
- tvd.close();
- tvx = null;
- assert state.docStoreSegmentName != null;
- String idxName = IndexFileNames.segmentFileName(state.docStoreSegmentName, IndexFileNames.VECTORS_INDEX_EXTENSION);
- if (4+((long) state.numDocsInStore)*16 != state.directory.fileLength(idxName))
- throw new RuntimeException("after flush: tvx size mismatch: " + state.numDocsInStore + " docs vs " + state.directory.fileLength(idxName) + " length in bytes of " + idxName + " file exists?=" + state.directory.fileExists(idxName));
-
- String fldName = IndexFileNames.segmentFileName(state.docStoreSegmentName, IndexFileNames.VECTORS_FIELDS_EXTENSION);
- String docName = IndexFileNames.segmentFileName(state.docStoreSegmentName, IndexFileNames.VECTORS_DOCUMENTS_EXTENSION);
- state.flushedFiles.add(idxName);
- state.flushedFiles.add(fldName);
- state.flushedFiles.add(docName);
-
- docWriter.removeOpenFile(idxName);
- docWriter.removeOpenFile(fldName);
- docWriter.removeOpenFile(docName);
-
- lastDocID = 0;
- state.hasVectors = hasVectors;
- hasVectors = false;
- }
- }
-
int allocCount;
synchronized PerDoc getPerDoc() {
@@ -128,18 +100,17 @@ final class TermVectorsTermsWriter exten
docFreeList = new PerDoc[ArrayUtil.oversize(allocCount, RamUsageEstimator.NUM_BYTES_OBJECT_REF)];
}
return new PerDoc();
- } else
+ } else {
return docFreeList[--freeCount];
+ }
}
/** Fills in no-term-vectors for all docs we haven't seen
* since the last doc that had term vectors. */
void fill(int docID) throws IOException {
- final int docStoreOffset = docWriter.getDocStoreOffset();
- final int end = docID+docStoreOffset;
- if (lastDocID < end) {
+ if (lastDocID < docID) {
final long tvfPosition = tvf.getFilePointer();
- while(lastDocID < end) {
+ while(lastDocID < docID) {
tvx.writeLong(tvd.getFilePointer());
tvd.writeVInt(0);
tvx.writeLong(tvfPosition);
@@ -151,31 +122,19 @@ final class TermVectorsTermsWriter exten
synchronized void initTermVectorsWriter() throws IOException {
if (tvx == null) {
- final String docStoreSegment = docWriter.getDocStoreSegment();
-
- if (docStoreSegment == null)
- return;
-
// If we hit an exception while init'ing the term
// vector output files, we must abort this segment
// because those files will be in an unknown
// state:
- String idxName = IndexFileNames.segmentFileName(docStoreSegment, IndexFileNames.VECTORS_INDEX_EXTENSION);
- String docName = IndexFileNames.segmentFileName(docStoreSegment, IndexFileNames.VECTORS_DOCUMENTS_EXTENSION);
- String fldName = IndexFileNames.segmentFileName(docStoreSegment, IndexFileNames.VECTORS_FIELDS_EXTENSION);
hasVectors = true;
- tvx = docWriter.directory.createOutput(idxName);
- tvd = docWriter.directory.createOutput(docName);
- tvf = docWriter.directory.createOutput(fldName);
+ tvx = docWriter.directory.createOutput(IndexFileNames.segmentFileName(docWriter.getSegment(), IndexFileNames.VECTORS_INDEX_EXTENSION));
+ tvd = docWriter.directory.createOutput(IndexFileNames.segmentFileName(docWriter.getSegment(), IndexFileNames.VECTORS_DOCUMENTS_EXTENSION));
+ tvf = docWriter.directory.createOutput(IndexFileNames.segmentFileName(docWriter.getSegment(), IndexFileNames.VECTORS_FIELDS_EXTENSION));
tvx.writeInt(TermVectorsReader.FORMAT_CURRENT);
tvd.writeInt(TermVectorsReader.FORMAT_CURRENT);
tvf.writeInt(TermVectorsReader.FORMAT_CURRENT);
- docWriter.addOpenFile(idxName);
- docWriter.addOpenFile(fldName);
- docWriter.addOpenFile(docName);
-
lastDocID = 0;
}
}
@@ -193,8 +152,9 @@ final class TermVectorsTermsWriter exten
tvx.writeLong(tvf.getFilePointer());
tvd.writeVInt(perDoc.numVectorFields);
if (perDoc.numVectorFields > 0) {
- for(int i=0;i<perDoc.numVectorFields;i++)
+ for(int i=0;i<perDoc.numVectorFields;i++) {
tvd.writeVInt(perDoc.fieldNumbers[i]);
+ }
assert 0 == perDoc.fieldPointers[0];
long lastPos = perDoc.fieldPointers[0];
for(int i=1;i<perDoc.numVectorFields;i++) {
@@ -206,7 +166,7 @@ final class TermVectorsTermsWriter exten
perDoc.numVectorFields = 0;
}
- assert lastDocID == perDoc.docID + docWriter.getDocStoreOffset();
+ assert lastDocID == perDoc.docID;
lastDocID++;
@@ -215,36 +175,26 @@ final class TermVectorsTermsWriter exten
assert docWriter.writer.testPoint("TermVectorsTermsWriter.finishDocument end");
}
- public boolean freeRAM() {
- // We don't hold any state beyond one doc, so we don't
- // free persistent RAM here
- return false;
- }
-
@Override
public void abort() {
hasVectors = false;
- if (tvx != null) {
- try {
- tvx.close();
- } catch (Throwable t) {
- }
- tvx = null;
- }
- if (tvd != null) {
- try {
- tvd.close();
- } catch (Throwable t) {
- }
- tvd = null;
- }
- if (tvf != null) {
- try {
- tvf.close();
- } catch (Throwable t) {
- }
- tvf = null;
+ try {
+ IOUtils.closeSafely(tvx, tvd, tvf);
+ } catch (IOException ignored) {
+ }
+ try {
+ docWriter.directory.deleteFile(IndexFileNames.segmentFileName(docWriter.getSegment(), IndexFileNames.VECTORS_INDEX_EXTENSION));
+ } catch (IOException ignored) {
+ }
+ try {
+ docWriter.directory.deleteFile(IndexFileNames.segmentFileName(docWriter.getSegment(), IndexFileNames.VECTORS_DOCUMENTS_EXTENSION));
+ } catch (IOException ignored) {
+ }
+ try {
+ docWriter.directory.deleteFile(IndexFileNames.segmentFileName(docWriter.getSegment(), IndexFileNames.VECTORS_FIELDS_EXTENSION));
+ } catch (IOException ignored) {
}
+ tvx = tvd = tvf = null;
lastDocID = 0;
}
Modified: lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/TermsHash.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/TermsHash.java?rev=1055547&r1=1055546&r2=1055547&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/TermsHash.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/TermsHash.java Wed Jan 5 17:33:58 2011
@@ -70,13 +70,6 @@ final class TermsHash extends InvertedDo
}
@Override
- synchronized void closeDocStore(SegmentWriteState state) throws IOException {
- consumer.closeDocStore(state);
- if (nextTermsHash != null)
- nextTermsHash.closeDocStore(state);
- }
-
- @Override
synchronized void flush(Map<InvertedDocConsumerPerThread,Collection<InvertedDocConsumerPerField>> threadsAndFields, final SegmentWriteState state) throws IOException {
Map<TermsHashConsumerPerThread,Collection<TermsHashConsumerPerField>> childThreadsAndFields = new HashMap<TermsHashConsumerPerThread,Collection<TermsHashConsumerPerField>>();
Map<InvertedDocConsumerPerThread,Collection<InvertedDocConsumerPerField>> nextThreadsAndFields;
Modified: lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/TermsHashConsumer.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/TermsHashConsumer.java?rev=1055547&r1=1055546&r2=1055547&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/TermsHashConsumer.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/TermsHashConsumer.java Wed Jan 5 17:33:58 2011
@@ -25,7 +25,6 @@ abstract class TermsHashConsumer {
abstract TermsHashConsumerPerThread addThread(TermsHashPerThread perThread);
abstract void flush(Map<TermsHashConsumerPerThread,Collection<TermsHashConsumerPerField>> threadsAndFields, final SegmentWriteState state) throws IOException;
abstract void abort();
- abstract void closeDocStore(SegmentWriteState state) throws IOException;
FieldInfos fieldInfos;
Modified: lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/util/IOUtils.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/util/IOUtils.java?rev=1055547&r1=1055546&r2=1055547&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/util/IOUtils.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/util/IOUtils.java Wed Jan 5 17:33:58 2011
@@ -64,4 +64,27 @@ public final class IOUtils {
else if (firstIOE != null)
throw firstIOE;
}
+
+ /**
+ * <p>Closes all given <tt>Closeable</tt>s, suppressing all thrown exceptions. Some of the <tt>Closeable</tt>s
+ * may be null, they are ignored. After everything is closed, method either throws the first of suppressed exceptions,
+ * or completes normally.</p>
+ * @param objects objects to call <tt>close()</tt> on
+ */
+ public static void closeSafely(Closeable... objects) throws IOException {
+ IOException firstIOE = null;
+
+ for (Closeable object : objects) {
+ try {
+ if (object != null)
+ object.close();
+ } catch (IOException ioe) {
+ if (firstIOE == null)
+ firstIOE = ioe;
+ }
+ }
+
+ if (firstIOE != null)
+ throw firstIOE;
+ }
}
Modified: lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/TestSearch.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/TestSearch.java?rev=1055547&r1=1055546&r2=1055547&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/TestSearch.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/TestSearch.java Wed Jan 5 17:33:58 2011
@@ -77,7 +77,6 @@ public class TestSearch extends LuceneTe
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
LogMergePolicy lmp = (LogMergePolicy) conf.getMergePolicy();
lmp.setUseCompoundFile(useCompoundFile);
- lmp.setUseCompoundDocStore(useCompoundFile);
IndexWriter writer = new IndexWriter(directory, conf);
String[] docs = {
Modified: lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/TestSearchForDuplicates.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/TestSearchForDuplicates.java?rev=1055547&r1=1055546&r2=1055547&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/TestSearchForDuplicates.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/TestSearchForDuplicates.java Wed Jan 5 17:33:58 2011
@@ -82,7 +82,6 @@ public class TestSearchForDuplicates ext
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
LogMergePolicy lmp = (LogMergePolicy) conf.getMergePolicy();
lmp.setUseCompoundFile(useCompoundFiles);
- lmp.setUseCompoundDocStore(useCompoundFiles);
IndexWriter writer = new IndexWriter(directory, conf);
if (VERBOSE) {
System.out.println("TEST: now build index");
Modified: lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/Test2BTerms.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/Test2BTerms.java?rev=1055547&r1=1055546&r2=1055547&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/Test2BTerms.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/Test2BTerms.java Wed Jan 5 17:33:58 2011
@@ -83,12 +83,11 @@ public class Test2BTerms extends LuceneT
Directory dir = newFSDirectory(_TestUtil.getTempDir("2BTerms"));
IndexWriter w = new IndexWriter(dir,
- newIndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT))
- .setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
- .setRAMBufferSizeMB(256.0).setMergeScheduler(new ConcurrentMergeScheduler()));
- ((LogMergePolicy) w.getConfig().getMergePolicy()).setUseCompoundFile(false);
- ((LogMergePolicy) w.getConfig().getMergePolicy()).setUseCompoundDocStore(false);
- ((LogMergePolicy) w.getConfig().getMergePolicy()).setMergeFactor(10);
+ new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT))
+ .setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
+ .setRAMBufferSizeMB(256.0)
+ .setMergeScheduler(new ConcurrentMergeScheduler())
+ .setMergePolicy(newLogMergePolicy(false, 10)));
MergePolicy mp = w.getConfig().getMergePolicy();
if (mp instanceof LogByteSizeMergePolicy) {
Modified: lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestAddIndexes.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestAddIndexes.java?rev=1055547&r1=1055546&r2=1055547&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestAddIndexes.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestAddIndexes.java Wed Jan 5 17:33:58 2011
@@ -59,7 +59,6 @@ public class TestAddIndexes extends Luce
writer = newWriter(aux, newIndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setOpenMode(OpenMode.CREATE));
((LogMergePolicy) writer.getConfig().getMergePolicy()).setUseCompoundFile(false); // use one without a compound file
- ((LogMergePolicy) writer.getConfig().getMergePolicy()).setUseCompoundDocStore(false); // use one without a compound file
// add 40 documents in separate files
addDocs(writer, 40);
assertEquals(40, writer.maxDoc());
@@ -271,13 +270,11 @@ public class TestAddIndexes extends Luce
writer = newWriter(aux, newIndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(1000));
((LogMergePolicy) writer.getConfig().getMergePolicy()).setUseCompoundFile(false); // use one without a compound file
- ((LogMergePolicy) writer.getConfig().getMergePolicy()).setUseCompoundDocStore(false); // use one without a compound file
// add 140 documents in separate files
addDocs(writer, 40);
writer.close();
writer = newWriter(aux, newIndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(1000));
((LogMergePolicy) writer.getConfig().getMergePolicy()).setUseCompoundFile(false); // use one without a compound file
- ((LogMergePolicy) writer.getConfig().getMergePolicy()).setUseCompoundDocStore(false); // use one without a compound file
addDocs(writer, 100);
writer.close();
@@ -504,7 +501,6 @@ public class TestAddIndexes extends Luce
writer = newWriter(aux, newIndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(100));
((LogMergePolicy) writer.getConfig().getMergePolicy()).setUseCompoundFile(false); // use one without a compound file
- ((LogMergePolicy) writer.getConfig().getMergePolicy()).setUseCompoundDocStore(false); // use one without a compound file
((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(10);
// add 30 documents in 3 segments
for (int i = 0; i < 3; i++) {
@@ -512,7 +508,6 @@ public class TestAddIndexes extends Luce
writer.close();
writer = newWriter(aux, newIndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(100));
((LogMergePolicy) writer.getConfig().getMergePolicy()).setUseCompoundFile(false); // use one without a compound file
- ((LogMergePolicy) writer.getConfig().getMergePolicy()).setUseCompoundDocStore(false); // use one without a compound file
((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(10);
}
assertEquals(30, writer.maxDoc());
@@ -526,7 +521,6 @@ public class TestAddIndexes extends Luce
Directory dir = newDirectory();
LogByteSizeMergePolicy lmp = new LogByteSizeMergePolicy();
lmp.setUseCompoundFile(false);
- lmp.setUseCompoundDocStore(false);
lmp.setMergeFactor(100);
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT))
@@ -555,7 +549,6 @@ public class TestAddIndexes extends Luce
lmp = new LogByteSizeMergePolicy();
lmp.setMinMergeMB(0.0001);
lmp.setUseCompoundFile(false);
- lmp.setUseCompoundDocStore(false);
lmp.setMergeFactor(4);
writer = new IndexWriter(dir2, newIndexWriterConfig(TEST_VERSION_CURRENT,
new WhitespaceAnalyzer(TEST_VERSION_CURRENT))
Modified: lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java?rev=1055547&r1=1055546&r2=1055547&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java Wed Jan 5 17:33:58 2011
@@ -527,7 +527,6 @@ public class TestBackwardsCompatibility
Directory dir = newFSDirectory(new File(dirName));
IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setMaxBufferedDocs(10);
((LogMergePolicy) conf.getMergePolicy()).setUseCompoundFile(doCFS);
- ((LogMergePolicy) conf.getMergePolicy()).setUseCompoundDocStore(doCFS);
if (doCFS) {
((LogMergePolicy) conf.getMergePolicy()).setNoCFSRatio(1.0);
}
@@ -542,7 +541,6 @@ public class TestBackwardsCompatibility
// open fresh writer so we get no prx file in the added segment
conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setMaxBufferedDocs(10);
((LogMergePolicy) conf.getMergePolicy()).setUseCompoundFile(doCFS);
- ((LogMergePolicy) conf.getMergePolicy()).setUseCompoundDocStore(doCFS);
writer = new IndexWriter(dir, conf);
addNoProxDoc(writer);
writer.close();
Modified: lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestConcurrentMergeScheduler.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestConcurrentMergeScheduler.java?rev=1055547&r1=1055546&r2=1055547&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestConcurrentMergeScheduler.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestConcurrentMergeScheduler.java Wed Jan 5 17:33:58 2011
@@ -96,7 +96,7 @@ public class TestConcurrentMergeSchedule
writer.addDocument(doc);
failure.setDoFail();
try {
- writer.flush(true, false, true);
+ writer.flush(true, true);
if (failure.hitExc) {
fail("failed to hit IOException");
}
Modified: lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestDeletionPolicy.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestDeletionPolicy.java?rev=1055547&r1=1055546&r2=1055547&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestDeletionPolicy.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestDeletionPolicy.java Wed Jan 5 17:33:58 2011
@@ -195,16 +195,13 @@ public class TestDeletionPolicy extends
final double SECONDS = 2.0;
- boolean useCompoundFile = true;
-
Directory dir = newDirectory();
ExpirationTimeDeletionPolicy policy = new ExpirationTimeDeletionPolicy(dir, SECONDS);
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT,
new WhitespaceAnalyzer(TEST_VERSION_CURRENT))
.setIndexDeletionPolicy(policy);
LogMergePolicy lmp = (LogMergePolicy) conf.getMergePolicy();
- lmp.setUseCompoundFile(useCompoundFile);
- lmp.setUseCompoundDocStore(useCompoundFile);
+ lmp.setUseCompoundFile(true);
IndexWriter writer = new IndexWriter(dir, conf);
writer.close();
@@ -219,8 +216,7 @@ public class TestDeletionPolicy extends
new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setOpenMode(
OpenMode.APPEND).setIndexDeletionPolicy(policy);
lmp = (LogMergePolicy) conf.getMergePolicy();
- lmp.setUseCompoundFile(useCompoundFile);
- lmp.setUseCompoundDocStore(useCompoundFile);
+ lmp.setUseCompoundFile(true);
writer = new IndexWriter(dir, conf);
for(int j=0;j<17;j++) {
addDoc(writer);
@@ -298,7 +294,6 @@ public class TestDeletionPolicy extends
.setMergeScheduler(new SerialMergeScheduler());
LogMergePolicy lmp = (LogMergePolicy) conf.getMergePolicy();
lmp.setUseCompoundFile(useCompoundFile);
- lmp.setUseCompoundDocStore(useCompoundFile);
lmp.setMergeFactor(10);
IndexWriter writer = new IndexWriter(dir, conf);
for(int i=0;i<107;i++) {
@@ -311,7 +306,6 @@ public class TestDeletionPolicy extends
OpenMode.APPEND).setIndexDeletionPolicy(policy);
lmp = (LogMergePolicy) conf.getMergePolicy();
lmp.setUseCompoundFile(useCompoundFile);
- lmp.setUseCompoundDocStore(useCompoundFile);
writer = new IndexWriter(dir, conf);
writer.optimize();
writer.close();
@@ -485,7 +479,6 @@ public class TestDeletionPolicy extends
.setMaxBufferedDocs(10);
LogMergePolicy lmp = (LogMergePolicy) conf.getMergePolicy();
lmp.setUseCompoundFile(useCompoundFile);
- lmp.setUseCompoundDocStore(useCompoundFile);
IndexWriter writer = new IndexWriter(dir, conf);
for(int i=0;i<107;i++) {
addDoc(writer);
@@ -496,7 +489,6 @@ public class TestDeletionPolicy extends
.setOpenMode(OpenMode.APPEND).setIndexDeletionPolicy(policy);
lmp = (LogMergePolicy) conf.getMergePolicy();
lmp.setUseCompoundFile(useCompoundFile);
- lmp.setUseCompoundDocStore(useCompoundFile);
writer = new IndexWriter(dir, conf);
writer.optimize();
writer.close();
@@ -536,7 +528,6 @@ public class TestDeletionPolicy extends
.setMaxBufferedDocs(10);
LogMergePolicy lmp = (LogMergePolicy) conf.getMergePolicy();
lmp.setUseCompoundFile(useCompoundFile);
- lmp.setUseCompoundDocStore(useCompoundFile);
IndexWriter writer = new IndexWriter(dir, conf);
for(int i=0;i<17;i++) {
addDoc(writer);
@@ -594,7 +585,6 @@ public class TestDeletionPolicy extends
.setOpenMode(OpenMode.CREATE).setIndexDeletionPolicy(policy);
LogMergePolicy lmp = (LogMergePolicy) conf.getMergePolicy();
lmp.setUseCompoundFile(useCompoundFile);
- lmp.setUseCompoundDocStore(useCompoundFile);
IndexWriter writer = new IndexWriter(dir, conf);
writer.close();
Term searchTerm = new Term("content", "aaa");
@@ -606,7 +596,6 @@ public class TestDeletionPolicy extends
.setOpenMode(OpenMode.APPEND).setIndexDeletionPolicy(policy);
lmp = (LogMergePolicy) conf.getMergePolicy();
lmp.setUseCompoundFile(useCompoundFile);
- lmp.setUseCompoundDocStore(useCompoundFile);
writer = new IndexWriter(dir, conf);
for(int j=0;j<17;j++) {
addDoc(writer);
@@ -627,7 +616,6 @@ public class TestDeletionPolicy extends
.setOpenMode(OpenMode.APPEND).setIndexDeletionPolicy(policy);
lmp = (LogMergePolicy) conf.getMergePolicy();
lmp.setUseCompoundFile(useCompoundFile);
- lmp.setUseCompoundDocStore(useCompoundFile);
writer = new IndexWriter(dir, conf);
writer.optimize();
// this is a commit
@@ -703,7 +691,6 @@ public class TestDeletionPolicy extends
.setMaxBufferedDocs(10);
LogMergePolicy lmp = (LogMergePolicy) conf.getMergePolicy();
lmp.setUseCompoundFile(useCompoundFile);
- lmp.setUseCompoundDocStore(useCompoundFile);
IndexWriter writer = new IndexWriter(dir, conf);
writer.close();
Term searchTerm = new Term("content", "aaa");
@@ -717,7 +704,6 @@ public class TestDeletionPolicy extends
.setMaxBufferedDocs(10);
lmp = (LogMergePolicy) conf.getMergePolicy();
lmp.setUseCompoundFile(useCompoundFile);
- lmp.setUseCompoundDocStore(useCompoundFile);
writer = new IndexWriter(dir, conf);
for(int j=0;j<17;j++) {
addDoc(writer);
Modified: lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestDoc.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestDoc.java?rev=1055547&r1=1055546&r2=1055547&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestDoc.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestDoc.java Wed Jan 5 17:33:58 2011
@@ -192,7 +192,7 @@ public class TestDoc extends LuceneTestC
r2.close();
final SegmentInfo info = new SegmentInfo(merged, si1.docCount + si2.docCount, si1.dir,
- useCompoundFile, true, -1, null, false, merger.fieldInfos().hasProx(),
+ useCompoundFile, true, merger.fieldInfos().hasProx(),
merger.fieldInfos().hasVectors());
if (useCompoundFile) {
Modified: lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestFieldsReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestFieldsReader.java?rev=1055547&r1=1055546&r2=1055547&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestFieldsReader.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestFieldsReader.java Wed Jan 5 17:33:58 2011
@@ -56,7 +56,6 @@ public class TestFieldsReader extends Lu
dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT));
((LogMergePolicy) conf.getMergePolicy()).setUseCompoundFile(false);
- ((LogMergePolicy) conf.getMergePolicy()).setUseCompoundDocStore(false);
IndexWriter writer = new IndexWriter(dir, conf);
writer.addDocument(testDoc);
writer.close();
Modified: lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestIndexFileDeleter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestIndexFileDeleter.java?rev=1055547&r1=1055546&r2=1055547&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestIndexFileDeleter.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestIndexFileDeleter.java Wed Jan 5 17:33:58 2011
@@ -43,16 +43,16 @@ public class TestIndexFileDeleter extend
IndexWriterConfig conf = newIndexWriterConfig(
TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT))
.setMaxBufferedDocs(10);
- ((LogMergePolicy) conf.getMergePolicy()).setMergeFactor(10);
- ((LogMergePolicy) conf.getMergePolicy()).setUseCompoundFile(true);
- ((LogMergePolicy) conf.getMergePolicy()).setNoCFSRatio(1.0);
+ LogMergePolicy mergePolicy = newLogMergePolicy(true, 10);
+ mergePolicy.setNoCFSRatio(1); // This test expects all of its segments to be in CFS
+ conf.setMergePolicy(mergePolicy);
+
IndexWriter writer = new IndexWriter(dir, conf);
int i;
for(i=0;i<35;i++) {
addDoc(writer, i);
}
- ((LogMergePolicy) writer.getConfig().getMergePolicy()).setUseCompoundFile(false);
- ((LogMergePolicy) writer.getConfig().getMergePolicy()).setUseCompoundDocStore(false);
+ mergePolicy.setUseCompoundFile(false);
for(;i<45;i++) {
addDoc(writer, i);
}
Modified: lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestIndexReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestIndexReader.java?rev=1055547&r1=1055546&r2=1055547&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestIndexReader.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestIndexReader.java Wed Jan 5 17:33:58 2011
@@ -21,7 +21,6 @@ package org.apache.lucene.index;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
-import java.util.Arrays;
import java.util.Collection;
import java.util.HashSet;
import java.util.Iterator;
@@ -567,7 +566,6 @@ public class TestIndexReader extends Luc
// add 1 documents with term : aaa
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
((LogMergePolicy) writer.getConfig().getMergePolicy()).setUseCompoundFile(false);
- ((LogMergePolicy) writer.getConfig().getMergePolicy()).setUseCompoundDocStore(false);
addDoc(writer, searchTerm.text());
writer.close();
@@ -965,35 +963,8 @@ public class TestIndexReader extends Luc
}
}
- // Whether we succeeded or failed, check that all
- // un-referenced files were in fact deleted (ie,
- // we did not create garbage). Just create a
- // new IndexFileDeleter, have it delete
- // unreferenced files, then verify that in fact
- // no files were deleted:
- String[] startFiles = dir.listAll();
- SegmentInfos infos = new SegmentInfos();
- infos.read(dir);
- new IndexFileDeleter(dir, new KeepOnlyLastCommitDeletionPolicy(), infos, null, null);
- String[] endFiles = dir.listAll();
-
- Arrays.sort(startFiles);
- Arrays.sort(endFiles);
-
- //for(int i=0;i<startFiles.length;i++) {
- // System.out.println(" startFiles: " + i + ": " + startFiles[i]);
- //}
-
- if (!Arrays.equals(startFiles, endFiles)) {
- String successStr;
- if (success) {
- successStr = "success";
- } else {
- successStr = "IOException";
- err.printStackTrace();
- }
- fail("reader.close() failed to delete unreferenced files after " + successStr + " (" + diskFree + " bytes): before delete:\n " + arrayToString(startFiles) + "\n after delete:\n " + arrayToString(endFiles));
- }
+ IndexWriter.unlock(dir);
+ TestIndexWriter.assertNoUnreferencedFiles(dir, "reader.close() failed to delete unreferenced files");
// Finally, verify index is not corrupt, and, if
// we succeeded, we see all docs changed, and if
Modified: lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestIndexReaderClone.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestIndexReaderClone.java?rev=1055547&r1=1055546&r2=1055547&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestIndexReaderClone.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestIndexReaderClone.java Wed Jan 5 17:33:58 2011
@@ -487,7 +487,6 @@ public class TestIndexReaderClone extend
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new SimpleAnalyzer(TEST_VERSION_CURRENT)));
((LogMergePolicy) w.getConfig().getMergePolicy()).setUseCompoundFile(false);
- ((LogMergePolicy) w.getConfig().getMergePolicy()).setUseCompoundDocStore(false);
Document doc = new Document();
doc.add(newField("field", "yes it's stored", Field.Store.YES, Field.Index.ANALYZED));
w.addDocument(doc);
Modified: lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestIndexReaderCloneNorms.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestIndexReaderCloneNorms.java?rev=1055547&r1=1055546&r2=1055547&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestIndexReaderCloneNorms.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestIndexReaderCloneNorms.java Wed Jan 5 17:33:58 2011
@@ -233,7 +233,6 @@ public class TestIndexReaderCloneNorms e
LogMergePolicy lmp = (LogMergePolicy) iw.getConfig().getMergePolicy();
lmp.setMergeFactor(3);
lmp.setUseCompoundFile(true);
- lmp.setUseCompoundDocStore(true);
iw.close();
}
@@ -286,7 +285,6 @@ public class TestIndexReaderCloneNorms e
LogMergePolicy lmp = (LogMergePolicy) conf.getMergePolicy();
lmp.setMergeFactor(3);
lmp.setUseCompoundFile(compound);
- lmp.setUseCompoundDocStore(compound);
IndexWriter iw = new IndexWriter(dir, conf);
for (int i = 0; i < ndocs; i++) {
iw.addDocument(newDoc());
Modified: lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestIndexWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestIndexWriter.java?rev=1055547&r1=1055546&r2=1055547&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestIndexWriter.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestIndexWriter.java Wed Jan 5 17:33:58 2011
@@ -154,7 +154,7 @@ public class TestIndexWriter extends Luc
String[] startFiles = dir.listAll();
SegmentInfos infos = new SegmentInfos();
infos.read(dir);
- new IndexFileDeleter(dir, new KeepOnlyLastCommitDeletionPolicy(), infos, null, null);
+ new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED).rollback();
String[] endFiles = dir.listAll();
Arrays.sort(startFiles);
@@ -1022,7 +1022,7 @@ public class TestIndexWriter extends Luc
doc.add(newField("field", "aaa", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
for(int i=0;i<19;i++)
writer.addDocument(doc);
- writer.flush(false, true, true);
+ writer.flush(false, true);
writer.close();
SegmentInfos sis = new SegmentInfos();
sis.read(dir);
@@ -1191,7 +1191,6 @@ public class TestIndexWriter extends Luc
LogMergePolicy lmp = (LogMergePolicy) writer.getConfig().getMergePolicy();
lmp.setMergeFactor(2);
lmp.setUseCompoundFile(false);
- lmp.setUseCompoundDocStore(false);
Document doc = new Document();
String contents = "aa bb cc dd ee ff gg hh ii jj kk";
@@ -1227,7 +1226,6 @@ public class TestIndexWriter extends Luc
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
LogMergePolicy lmp2 = (LogMergePolicy) writer.getConfig().getMergePolicy();
lmp2.setUseCompoundFile(false);
- lmp2.setUseCompoundDocStore(false);
writer.optimize();
writer.close();
}
@@ -2401,7 +2399,6 @@ public class TestIndexWriter extends Luc
TEST_VERSION_CURRENT, new SimpleAnalyzer(TEST_VERSION_CURRENT)));
LogMergePolicy lmp = (LogMergePolicy) writer.getConfig().getMergePolicy();
lmp.setUseCompoundFile(false);
- lmp.setUseCompoundDocStore(false);
ByteArrayOutputStream bos = new ByteArrayOutputStream(1024);
writer.setInfoStream(new PrintStream(bos));
writer.addDocument(new Document());
Modified: lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestIndexWriterDelete.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestIndexWriterDelete.java?rev=1055547&r1=1055546&r2=1055547&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestIndexWriterDelete.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestIndexWriterDelete.java Wed Jan 5 17:33:58 2011
@@ -685,7 +685,6 @@ public class TestIndexWriterDelete exten
LogMergePolicy lmp = (LogMergePolicy) modifier.getConfig().getMergePolicy();
lmp.setUseCompoundFile(true);
- lmp.setUseCompoundDocStore(true);
dir.failOn(failure.reset());
@@ -832,7 +831,7 @@ public class TestIndexWriterDelete exten
String[] startFiles = dir.listAll();
SegmentInfos infos = new SegmentInfos();
infos.read(dir);
- new IndexFileDeleter(dir, new KeepOnlyLastCommitDeletionPolicy(), infos, null, null);
+ new IndexFileDeleter(dir, new KeepOnlyLastCommitDeletionPolicy(), infos, null);
String[] endFiles = dir.listAll();
modifier.close();
dir.close();
Modified: lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestIndexWriterReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestIndexWriterReader.java?rev=1055547&r1=1055546&r2=1055547&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestIndexWriterReader.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestIndexWriterReader.java Wed Jan 5 17:33:58 2011
@@ -232,7 +232,7 @@ public class TestIndexWriterReader exten
writer.setInfoStream(infoStream);
// create the index
createIndexNoClose(!optimize, "index1", writer);
- writer.flush(false, true, true);
+ writer.flush(false, true);
// create a 2nd index
Directory dir2 = newDirectory();
@@ -312,7 +312,7 @@ public class TestIndexWriterReader exten
writer.setInfoStream(infoStream);
// create the index
createIndexNoClose(!optimize, "index1", writer);
- writer.flush(false, true, true);
+ writer.flush(false, true);
// get a reader
IndexReader r1 = writer.getReader();
@@ -519,7 +519,7 @@ public class TestIndexWriterReader exten
IndexReader r1 = writer.getReader();
assertEquals(0, r1.maxDoc());
createIndexNoClose(false, "index1", writer);
- writer.flush(!optimize, true, true);
+ writer.flush(!optimize, true);
IndexReader iwr1 = writer.getReader();
assertEquals(100, iwr1.maxDoc());
@@ -531,7 +531,7 @@ public class TestIndexWriterReader exten
Document d = createDocument(x, "index1", 5);
writer.addDocument(d);
}
- writer.flush(false, true, true);
+ writer.flush(false, true);
// verify the reader was reopened internally
IndexReader iwr2 = writer.getReader();
assertTrue(iwr2 != r1);
Modified: lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestIndexWriterWithThreads.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestIndexWriterWithThreads.java?rev=1055547&r1=1055546&r2=1055547&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestIndexWriterWithThreads.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestIndexWriterWithThreads.java Wed Jan 5 17:33:58 2011
@@ -345,47 +345,6 @@ public class TestIndexWriterWithThreads
_testMultipleThreadsFailure(new FailOnlyOnAbortOrFlush(true));
}
- // Throws IOException during DocumentsWriter.closeDocStore
- private static class FailOnlyInCloseDocStore extends MockDirectoryWrapper.Failure {
- private boolean onlyOnce;
- public FailOnlyInCloseDocStore(boolean onlyOnce) {
- this.onlyOnce = onlyOnce;
- }
- @Override
- public void eval(MockDirectoryWrapper dir) throws IOException {
- if (doFail) {
- StackTraceElement[] trace = new Exception().getStackTrace();
- for (int i = 0; i < trace.length; i++) {
- if ("closeDocStore".equals(trace[i].getMethodName())) {
- if (onlyOnce)
- doFail = false;
- throw new IOException("now failing on purpose");
- }
- }
- }
- }
- }
-
- // LUCENE-1130: test IOException in closeDocStore
- public void testIOExceptionDuringCloseDocStore() throws IOException {
- _testSingleThreadFailure(new FailOnlyInCloseDocStore(false));
- }
-
- // LUCENE-1130: test IOException in closeDocStore
- public void testIOExceptionDuringCloseDocStoreOnlyOnce() throws IOException {
- _testSingleThreadFailure(new FailOnlyInCloseDocStore(true));
- }
-
- // LUCENE-1130: test IOException in closeDocStore, with threads
- public void testIOExceptionDuringCloseDocStoreWithThreads() throws Exception {
- _testMultipleThreadsFailure(new FailOnlyInCloseDocStore(false));
- }
-
- // LUCENE-1130: test IOException in closeDocStore, with threads
- public void testIOExceptionDuringCloseDocStoreWithThreadsOnlyOnce() throws Exception {
- _testMultipleThreadsFailure(new FailOnlyInCloseDocStore(true));
- }
-
// Throws IOException during DocumentsWriter.writeSegment
private static class FailOnlyInWriteSegment extends MockDirectoryWrapper.Failure {
private boolean onlyOnce;
Modified: lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestLazyBug.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestLazyBug.java?rev=1055547&r1=1055546&r2=1055547&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestLazyBug.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestLazyBug.java Wed Jan 5 17:33:58 2011
@@ -72,8 +72,7 @@ public class TestLazyBug extends LuceneT
TEST_VERSION_CURRENT, new SimpleAnalyzer(TEST_VERSION_CURRENT)));
LogMergePolicy lmp = (LogMergePolicy) writer.getConfig().getMergePolicy();
lmp.setUseCompoundFile(false);
- lmp.setUseCompoundDocStore(false);
-
+
for (int d = 1; d <= NUM_DOCS; d++) {
Document doc = new Document();
for (int f = 1; f <= NUM_FIELDS; f++ ) {
Modified: lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestLazyProxSkipping.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestLazyProxSkipping.java?rev=1055547&r1=1055546&r2=1055547&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestLazyProxSkipping.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestLazyProxSkipping.java Wed Jan 5 17:33:58 2011
@@ -68,7 +68,6 @@ public class TestLazyProxSkipping extend
Directory directory = new SeekCountingDirectory(new RAMDirectory());
IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setMaxBufferedDocs(10));
((LogMergePolicy) writer.getConfig().getMergePolicy()).setUseCompoundFile(false);
- ((LogMergePolicy) writer.getConfig().getMergePolicy()).setUseCompoundDocStore(false);
for (int i = 0; i < numDocs; i++) {
Document doc = new Document();
String content;
Modified: lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestNRTReaderWithThreads.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestNRTReaderWithThreads.java?rev=1055547&r1=1055546&r2=1055547&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestNRTReaderWithThreads.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestNRTReaderWithThreads.java Wed Jan 5 17:33:58 2011
@@ -33,7 +33,6 @@ public class TestNRTReaderWithThreads ex
IndexWriter writer = new IndexWriter(mainDir, newIndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setMaxBufferedDocs(10));
((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(2);
((LogMergePolicy) writer.getConfig().getMergePolicy()).setUseCompoundFile(false);
- ((LogMergePolicy) writer.getConfig().getMergePolicy()).setUseCompoundDocStore(false);
IndexReader reader = writer.getReader(); // start pooling readers
reader.close();
RunThread[] indexThreads = new RunThread[4];
Modified: lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestNoMergePolicy.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestNoMergePolicy.java?rev=1055547&r1=1055546&r2=1055547&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestNoMergePolicy.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestNoMergePolicy.java Wed Jan 5 17:33:58 2011
@@ -33,16 +33,13 @@ public class TestNoMergePolicy extends L
assertNull(mp.findMerges(null));
assertNull(mp.findMergesForOptimize(null, 0, null));
assertNull(mp.findMergesToExpungeDeletes(null));
- assertFalse(mp.useCompoundDocStore(null));
assertFalse(mp.useCompoundFile(null, null));
mp.close();
}
@Test
public void testCompoundFiles() throws Exception {
- assertFalse(NoMergePolicy.NO_COMPOUND_FILES.useCompoundDocStore(null));
assertFalse(NoMergePolicy.NO_COMPOUND_FILES.useCompoundFile(null, null));
- assertTrue(NoMergePolicy.COMPOUND_FILES.useCompoundDocStore(null));
assertTrue(NoMergePolicy.COMPOUND_FILES.useCompoundFile(null, null));
}
Modified: lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestNorms.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestNorms.java?rev=1055547&r1=1055546&r2=1055547&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestNorms.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestNorms.java Wed Jan 5 17:33:58 2011
@@ -147,7 +147,6 @@ public class TestNorms extends LuceneTes
LogMergePolicy lmp = (LogMergePolicy) iw.getConfig().getMergePolicy();
lmp.setMergeFactor(3);
lmp.setUseCompoundFile(true);
- lmp.setUseCompoundDocStore(true);
iw.close();
}
@@ -192,7 +191,6 @@ public class TestNorms extends LuceneTes
LogMergePolicy lmp = (LogMergePolicy) iw.getConfig().getMergePolicy();
lmp.setMergeFactor(3);
lmp.setUseCompoundFile(compound);
- lmp.setUseCompoundDocStore(compound);
for (int i = 0; i < ndocs; i++) {
iw.addDocument(newDoc());
}
Modified: lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestOmitTf.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestOmitTf.java?rev=1055547&r1=1055546&r2=1055547&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestOmitTf.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestOmitTf.java Wed Jan 5 17:33:58 2011
@@ -218,7 +218,6 @@ public class TestOmitTf extends LuceneTe
LogMergePolicy lmp = (LogMergePolicy) writer.getConfig().getMergePolicy();
lmp.setMergeFactor(2);
lmp.setUseCompoundFile(false);
- lmp.setUseCompoundDocStore(false);
Document d = new Document();
Field f1 = newField("f1", "This field has term freqs", Field.Store.NO, Field.Index.ANALYZED);
Modified: lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestPerSegmentDeletes.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestPerSegmentDeletes.java?rev=1055547&r1=1055546&r2=1055547&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestPerSegmentDeletes.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestPerSegmentDeletes.java Wed Jan 5 17:33:58 2011
@@ -69,7 +69,7 @@ public class TestPerSegmentDeletes exten
// flushing without applying deletes means
// there will still be deletes in the segment infos
- writer.flush(false, false, false);
+ writer.flush(false, false);
assertTrue(writer.bufferedDeletes.any());
// get reader flushes pending deletes
@@ -82,7 +82,7 @@ public class TestPerSegmentDeletes exten
// merge segments 0 and 1
// which should apply the delete id:2
writer.deleteDocuments(new Term("id", "2"));
- writer.flush(false, false, false);
+ writer.flush(false, false);
fsmp.doMerge = true;
fsmp.start = 0;
fsmp.length = 2;
@@ -175,12 +175,12 @@ public class TestPerSegmentDeletes exten
writer.addDocument(TestIndexWriterReader.createDocument(x, "5", 2));
//System.out.println("numRamDocs(" + x + ")" + writer.numRamDocs());
}
- writer.flush(false, true, false);
+ writer.flush(false, false);
for (int x = 25; x < 30; x++) {
writer.addDocument(TestIndexWriterReader.createDocument(x, "5", 2));
//System.out.println("numRamDocs(" + x + ")" + writer.numRamDocs());
}
- writer.flush(false, true, false);
+ writer.flush(false, false);
//System.out.println("infos3:"+writer.segmentInfos);
@@ -273,11 +273,6 @@ public class TestPerSegmentDeletes exten
}
@Override
- public boolean useCompoundDocStore(SegmentInfos segments) {
- return useCompoundFile;
- }
-
- @Override
public boolean useCompoundFile(SegmentInfos segments, SegmentInfo newSegment) {
return useCompoundFile;
}
Modified: lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestSegmentMerger.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestSegmentMerger.java?rev=1055547&r1=1055546&r2=1055547&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestSegmentMerger.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestSegmentMerger.java Wed Jan 5 17:33:58 2011
@@ -78,8 +78,8 @@ public class TestSegmentMerger extends L
assertTrue(docsMerged == 2);
//Should be able to open a new SegmentReader against the new directory
SegmentReader mergedReader = SegmentReader.get(false, mergedDir,
- new SegmentInfo(mergedSegment, docsMerged, mergedDir, false, true, -1,
- null, false, merger.fieldInfos().hasProx(),
+ new SegmentInfo(mergedSegment, docsMerged, mergedDir, false, true,
+ merger.fieldInfos().hasProx(),
merger.fieldInfos().hasVectors()),
BufferedIndexInput.BUFFER_SIZE, true, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
Modified: lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestStressIndexing2.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestStressIndexing2.java?rev=1055547&r1=1055546&r2=1055547&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestStressIndexing2.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestStressIndexing2.java Wed Jan 5 17:33:58 2011
@@ -155,7 +155,6 @@ public class TestStressIndexing2 extends
w.commit();
LogMergePolicy lmp = (LogMergePolicy) w.getConfig().getMergePolicy();
lmp.setUseCompoundFile(false);
- lmp.setUseCompoundDocStore(false);
lmp.setMergeFactor(mergeFactor);
/***
w.setMaxMergeDocs(Integer.MAX_VALUE);
@@ -212,7 +211,6 @@ public class TestStressIndexing2 extends
w.setInfoStream(VERBOSE ? System.out : null);
LogMergePolicy lmp = (LogMergePolicy) w.getConfig().getMergePolicy();
lmp.setUseCompoundFile(false);
- lmp.setUseCompoundDocStore(false);
lmp.setMergeFactor(mergeFactor);
threads = new IndexingThread[nThreads];
Modified: lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestTermVectorsReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestTermVectorsReader.java?rev=1055547&r1=1055546&r2=1055547&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestTermVectorsReader.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestTermVectorsReader.java Wed Jan 5 17:33:58 2011
@@ -91,7 +91,6 @@ public class TestTermVectorsReader exten
dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MyAnalyzer()).setMaxBufferedDocs(-1));
((LogMergePolicy) writer.getConfig().getMergePolicy()).setUseCompoundFile(false);
- ((LogMergePolicy) writer.getConfig().getMergePolicy()).setUseCompoundDocStore(false);
((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(10);
Document doc = new Document();
Modified: lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/store/TestFileSwitchDirectory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/store/TestFileSwitchDirectory.java?rev=1055547&r1=1055546&r2=1055547&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/store/TestFileSwitchDirectory.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/store/TestFileSwitchDirectory.java Wed Jan 5 17:33:58 2011
@@ -46,7 +46,6 @@ public class TestFileSwitchDirectory ext
FileSwitchDirectory fsd = new FileSwitchDirectory(fileExtensions, primaryDir, secondaryDir, true);
IndexWriter writer = new IndexWriter(fsd, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
((LogMergePolicy) writer.getConfig().getMergePolicy()).setUseCompoundFile(false);
- ((LogMergePolicy) writer.getConfig().getMergePolicy()).setUseCompoundDocStore(false);
TestIndexWriterReader.createIndexNoClose(true, "ram", writer);
IndexReader reader = writer.getReader();
assertEquals(100, reader.maxDoc());