You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by rm...@apache.org on 2011/11/06 20:09:11 UTC
svn commit: r1198537 [1/3] - in /lucene/dev/branches/lucene2621: lucene/
lucene/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/
lucene/contrib/misc/src/test/org/apache/lucene/misc/
lucene/contrib/misc/src/test/org/apache/lucene/stor...
Author: rmuir
Date: Sun Nov 6 19:09:08 2011
New Revision: 1198537
URL: http://svn.apache.org/viewvc?rev=1198537&view=rev
Log:
LUCENE-2621: pass infostream to codecs when merging, add tests.infostream (default=tests.verbose), randomly use null impl in tests
Added:
lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/util/InfoStream.java (with props)
lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/util/PrintStreamInfoStream.java (with props)
lucene/dev/branches/lucene2621/lucene/src/test-framework/java/org/apache/lucene/util/NullInfoStream.java (with props)
Modified:
lucene/dev/branches/lucene2621/lucene/common-build.xml
lucene/dev/branches/lucene2621/lucene/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestIndicesEquals.java
lucene/dev/branches/lucene2621/lucene/contrib/misc/src/test/org/apache/lucene/misc/TestHighFreqTerms.java
lucene/dev/branches/lucene2621/lucene/contrib/misc/src/test/org/apache/lucene/store/TestNRTCachingDirectory.java
lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/BufferedDeletesStream.java
lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/ConcurrentMergeScheduler.java
lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/DocumentsWriter.java
lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/DocumentsWriterFlushControl.java
lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/DocumentsWriterPerThread.java
lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/FlushByRamOrCountsPolicy.java
lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/FlushPolicy.java
lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/IndexFileDeleter.java
lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/IndexUpgrader.java
lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/IndexWriter.java
lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/IndexWriterConfig.java
lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/LogMergePolicy.java
lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/PerDocWriteState.java
lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/SegmentMerger.java
lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/SegmentWriteState.java
lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/TieredMergePolicy.java
lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/UpgradeIndexMergePolicy.java
lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/codecs/MergeState.java
lucene/dev/branches/lucene2621/lucene/src/test-framework/java/org/apache/lucene/index/RandomIndexWriter.java
lucene/dev/branches/lucene2621/lucene/src/test-framework/java/org/apache/lucene/index/ThreadedIndexingAndSearchingTestCase.java
lucene/dev/branches/lucene2621/lucene/src/test-framework/java/org/apache/lucene/util/LuceneTestCase.java
lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/TestDemo.java
lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/TestExternalCodecs.java
lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/TestSearchForDuplicates.java
lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/Test2BTerms.java
lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestAddIndexes.java
lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestAtomicUpdate.java
lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java
lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestConcurrentMergeScheduler.java
lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestDeletionPolicy.java
lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestDoc.java
lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestDocsAndPositions.java
lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestIndexFileDeleter.java
lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestIndexReader.java
lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestIndexReaderCloneNorms.java
lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestIndexReaderDelete.java
lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestIndexReaderOnDiskFull.java
lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestIndexReaderReopen.java
lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestIndexWriter.java
lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestIndexWriterCommit.java
lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestIndexWriterConfig.java
lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestIndexWriterDelete.java
lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java
lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestIndexWriterMergePolicy.java
lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestIndexWriterMerging.java
lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestIndexWriterNRTIsCurrent.java
lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestIndexWriterOnDiskFull.java
lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestIndexWriterOptimize.java
lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestIndexWriterReader.java
lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestIndexWriterWithThreads.java
lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestMultiFields.java
lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestNRTReaderWithThreads.java
lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestNeverDelete.java
lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestOmitNorms.java
lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestOmitTf.java
lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestOptimizeForever.java
lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestPerSegmentDeletes.java
lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestRollingUpdates.java
lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestSegmentInfo.java
lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestSegmentMerger.java
lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestStressIndexing2.java
lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestStressNRT.java
lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestTermsEnum.java
lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestThreadedOptimize.java
lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestTieredMergePolicy.java
lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/codecs/perfield/TestPerFieldPostingsFormat.java
lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/search/TestFieldCache.java
lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/search/TestFuzzyQuery2.java
lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/search/TestTermVectors.java
lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/util/fst/TestFSTs.java
lucene/dev/branches/lucene2621/modules/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/CloseIndexTask.java
lucene/dev/branches/lucene2621/modules/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/CreateIndexTask.java
lucene/dev/branches/lucene2621/modules/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/RollbackIndexTask.java
lucene/dev/branches/lucene2621/modules/queries/src/test/org/apache/lucene/queries/function/FunctionTestSetup.java
lucene/dev/branches/lucene2621/solr/core/src/java/org/apache/solr/update/SolrIndexWriter.java
lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/search/TestRealTimeGet.java
Modified: lucene/dev/branches/lucene2621/lucene/common-build.xml
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/lucene/common-build.xml?rev=1198537&r1=1198536&r2=1198537&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/lucene/common-build.xml (original)
+++ lucene/dev/branches/lucene2621/lucene/common-build.xml Sun Nov 6 19:09:08 2011
@@ -95,6 +95,8 @@
<property name="tests.nightly" value="false" />
<property name="tests.cleanthreads.sysprop" value="perMethod"/>
<property name="tests.asserts.gracious" value="false"/>
+ <property name="tests.verbose" value="false"/>
+ <property name="tests.infostream" value="${tests.verbose}"/>
<property name="javac.deprecation" value="off"/>
<property name="javac.debug" value="on"/>
@@ -491,8 +493,6 @@
<compile-test-macro srcdir="${tests.src.dir}" destdir="${build.dir}/classes/test"
test.classpath="test.classpath"/>
</target>
-
- <property name="tests.verbose" value="false"/>
<macrodef name="compile-test-macro" description="Compiles junit tests.">
<attribute name="srcdir"/>
@@ -550,6 +550,8 @@
<!-- allow tests to control debug prints -->
<sysproperty key="tests.verbose" value="${tests.verbose}"/>
+ <!-- even more debugging -->
+ <sysproperty key="tests.infostream" value="${tests.infostream}"/>
<!-- directory for formatter lock -->
<sysproperty key="tests.lockdir" value="${tests.lockdir}"/>
<!-- set the codec tests should run with -->
Modified: lucene/dev/branches/lucene2621/lucene/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestIndicesEquals.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/lucene/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestIndicesEquals.java?rev=1198537&r1=1198536&r2=1198537&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/lucene/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestIndicesEquals.java (original)
+++ lucene/dev/branches/lucene2621/lucene/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestIndicesEquals.java Sun Nov 6 19:09:08 2011
@@ -98,7 +98,6 @@ public class TestIndicesEquals extends L
// create dir data
IndexWriter indexWriter = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(new Random(seed))).setMergePolicy(newLogMergePolicy()));
- indexWriter.setInfoStream(VERBOSE ? System.out : null);
if (VERBOSE) {
System.out.println("TEST: make test index");
}
Modified: lucene/dev/branches/lucene2621/lucene/contrib/misc/src/test/org/apache/lucene/misc/TestHighFreqTerms.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/lucene/contrib/misc/src/test/org/apache/lucene/misc/TestHighFreqTerms.java?rev=1198537&r1=1198536&r2=1198537&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/lucene/contrib/misc/src/test/org/apache/lucene/misc/TestHighFreqTerms.java (original)
+++ lucene/dev/branches/lucene2621/lucene/contrib/misc/src/test/org/apache/lucene/misc/TestHighFreqTerms.java Sun Nov 6 19:09:08 2011
@@ -42,7 +42,6 @@ public class TestHighFreqTerms extends L
writer = new IndexWriter(dir, newIndexWriterConfig(random,
TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false))
.setMaxBufferedDocs(2));
- writer.setInfoStream(VERBOSE ? System.out : null);
indexDocs(writer);
reader = IndexReader.open(dir, true);
_TestUtil.checkIndex(dir);
Modified: lucene/dev/branches/lucene2621/lucene/contrib/misc/src/test/org/apache/lucene/store/TestNRTCachingDirectory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/lucene/contrib/misc/src/test/org/apache/lucene/store/TestNRTCachingDirectory.java?rev=1198537&r1=1198536&r2=1198537&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/lucene/contrib/misc/src/test/org/apache/lucene/store/TestNRTCachingDirectory.java (original)
+++ lucene/dev/branches/lucene2621/lucene/contrib/misc/src/test/org/apache/lucene/store/TestNRTCachingDirectory.java Sun Nov 6 19:09:08 2011
@@ -47,7 +47,6 @@ public class TestNRTCachingDirectory ext
NRTCachingDirectory cachedDir = new NRTCachingDirectory(dir, 2.0, 25.0);
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
RandomIndexWriter w = new RandomIndexWriter(random, cachedDir, conf);
- w.w.setInfoStream(VERBOSE ? System.out : null);
final LineFileDocs docs = new LineFileDocs(random);
final int numDocs = _TestUtil.nextInt(random, 100, 400);
Modified: lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/BufferedDeletesStream.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/BufferedDeletesStream.java?rev=1198537&r1=1198536&r2=1198537&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/BufferedDeletesStream.java (original)
+++ lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/BufferedDeletesStream.java Sun Nov 6 19:09:08 2011
@@ -18,10 +18,8 @@ package org.apache.lucene.index;
*/
import java.io.IOException;
-import java.io.PrintStream;
import java.util.List;
import java.util.ArrayList;
-import java.util.Date;
import java.util.Comparator;
import java.util.Collections;
import java.util.concurrent.atomic.AtomicInteger;
@@ -34,6 +32,7 @@ import org.apache.lucene.search.Query;
import org.apache.lucene.search.QueryWrapperFilter;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.InfoStream;
/* Tracks the stream of {@link BufferedDeletes}.
* When DocumentsWriterPerThread flushes, its buffered
@@ -63,22 +62,11 @@ class BufferedDeletesStream {
// used only by assert
private Term lastDeleteTerm;
- private PrintStream infoStream;
+ private final InfoStream infoStream;
private final AtomicLong bytesUsed = new AtomicLong();
private final AtomicInteger numTerms = new AtomicInteger();
- private final int messageID;
- public BufferedDeletesStream(int messageID) {
- this.messageID = messageID;
- }
-
- private synchronized void message(String message) {
- if (infoStream != null) {
- infoStream.println("BD " + messageID + " [" + new Date() + "; " + Thread.currentThread().getName() + "]: " + message);
- }
- }
-
- public synchronized void setInfoStream(PrintStream infoStream) {
+ public BufferedDeletesStream(InfoStream infoStream) {
this.infoStream = infoStream;
}
@@ -101,7 +89,7 @@ class BufferedDeletesStream {
numTerms.addAndGet(packet.numTermDeletes);
bytesUsed.addAndGet(packet.bytesUsed);
if (infoStream != null) {
- message("push deletes " + packet + " delGen=" + packet.delGen() + " packetCount=" + deletes.size() + " totBytesUsed=" + bytesUsed.get());
+ infoStream.message("BD", "push deletes " + packet + " delGen=" + packet.delGen() + " packetCount=" + deletes.size() + " totBytesUsed=" + bytesUsed.get());
}
assert checkDeleteStats();
return packet.delGen();
@@ -171,12 +159,14 @@ class BufferedDeletesStream {
assert checkDeleteStats();
if (!any()) {
- message("applyDeletes: no deletes; skipping");
+ if (infoStream != null) {
+ infoStream.message("BD", "applyDeletes: no deletes; skipping");
+ }
return new ApplyDeletesResult(false, nextGen++, null);
}
if (infoStream != null) {
- message("applyDeletes: infos=" + infos + " packetCount=" + deletes.size());
+ infoStream.message("BD", "applyDeletes: infos=" + infos + " packetCount=" + deletes.size());
}
List<SegmentInfo> infos2 = new ArrayList<SegmentInfo>();
@@ -248,7 +238,7 @@ class BufferedDeletesStream {
}
if (infoStream != null) {
- message("seg=" + info + " segGen=" + segGen + " segDeletes=[" + packet + "]; coalesced deletes=[" + (coalescedDeletes == null ? "null" : coalescedDeletes) + "] delCount=" + delCount + (segAllDeletes ? " 100% deleted" : ""));
+ infoStream.message("BD", "seg=" + info + " segGen=" + segGen + " segDeletes=[" + packet + "]; coalesced deletes=[" + (coalescedDeletes == null ? "null" : coalescedDeletes) + "] delCount=" + delCount + (segAllDeletes ? " 100% deleted" : ""));
}
if (coalescedDeletes == null) {
@@ -290,7 +280,7 @@ class BufferedDeletesStream {
}
if (infoStream != null) {
- message("seg=" + info + " segGen=" + segGen + " coalesced deletes=[" + (coalescedDeletes == null ? "null" : coalescedDeletes) + "] delCount=" + delCount + (segAllDeletes ? " 100% deleted" : ""));
+ infoStream.message("BD", "seg=" + info + " segGen=" + segGen + " coalesced deletes=[" + (coalescedDeletes == null ? "null" : coalescedDeletes) + "] delCount=" + delCount + (segAllDeletes ? " 100% deleted" : ""));
}
}
info.setBufferedDeletesGen(nextGen);
@@ -301,7 +291,7 @@ class BufferedDeletesStream {
assert checkDeleteStats();
if (infoStream != null) {
- message("applyDeletes took " + (System.currentTimeMillis()-t0) + " msec");
+ infoStream.message("BD", "applyDeletes took " + (System.currentTimeMillis()-t0) + " msec");
}
// assert infos != segmentInfos || !any() : "infos=" + infos + " segmentInfos=" + segmentInfos + " any=" + any;
@@ -324,7 +314,7 @@ class BufferedDeletesStream {
}
if (infoStream != null) {
- message("prune sis=" + segmentInfos + " minGen=" + minGen + " packetCount=" + deletes.size());
+ infoStream.message("BD", "prune sis=" + segmentInfos + " minGen=" + minGen + " packetCount=" + deletes.size());
}
final int limit = deletes.size();
for(int delIDX=0;delIDX<limit;delIDX++) {
@@ -344,7 +334,7 @@ class BufferedDeletesStream {
private synchronized void prune(int count) {
if (count > 0) {
if (infoStream != null) {
- message("pruneDeletes: prune " + count + " packets; " + (deletes.size() - count) + " packets remain");
+ infoStream.message("BD", "pruneDeletes: prune " + count + " packets; " + (deletes.size() - count) + " packets remain");
}
for(int delIDX=0;delIDX<count;delIDX++) {
final FrozenBufferedDeletes packet = deletes.get(delIDX);
Modified: lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/ConcurrentMergeScheduler.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/ConcurrentMergeScheduler.java?rev=1198537&r1=1198536&r2=1198537&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/ConcurrentMergeScheduler.java (original)
+++ lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/ConcurrentMergeScheduler.java Sun Nov 6 19:09:08 2011
@@ -18,6 +18,7 @@ package org.apache.lucene.index;
*/
import org.apache.lucene.store.Directory;
+import org.apache.lucene.util.InfoStream;
import org.apache.lucene.util.ThreadInterruptedException;
import org.apache.lucene.util.CollectionUtil;
@@ -225,7 +226,10 @@ public class ConcurrentMergeScheduler ex
* called and returned true.
*/
protected void message(String message) {
- writer.message("CMS: " + message);
+ final InfoStream infoStream = writer.infoStream;
+ if (infoStream != null) {
+ infoStream.message("CMS", message);
+ }
}
private synchronized void initMergeThreadPriority() {
Modified: lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/DocumentsWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/DocumentsWriter.java?rev=1198537&r1=1198536&r2=1198537&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/DocumentsWriter.java (original)
+++ lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/DocumentsWriter.java Sun Nov 6 19:09:08 2011
@@ -36,6 +36,7 @@ import org.apache.lucene.search.Query;
import org.apache.lucene.search.similarities.SimilarityProvider;
import org.apache.lucene.store.AlreadyClosedException;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.util.InfoStream;
/**
* This class accepts multiple added documents and directly
@@ -106,7 +107,7 @@ final class DocumentsWriter {
private volatile boolean closed;
- PrintStream infoStream;
+ final InfoStream infoStream;
SimilarityProvider similarityProvider;
List<String> newFiles;
@@ -140,6 +141,7 @@ final class DocumentsWriter {
this.codec = codec;
this.directory = directory;
this.indexWriter = writer;
+ this.infoStream = config.getInfoStream();
this.similarityProvider = config.getSimilarityProvider();
this.perThreadPool = config.getIndexerThreadPool();
this.chain = config.getIndexingChain();
@@ -187,14 +189,6 @@ final class DocumentsWriter {
indexWriter.flushCount.incrementAndGet();
}
- synchronized void setInfoStream(PrintStream infoStream) {
- this.infoStream = infoStream;
- final Iterator<ThreadState> it = perThreadPool.getAllPerThreadsIterator();
- while (it.hasNext()) {
- it.next().perThread.setInfoStream(infoStream);
- }
- }
-
/** Returns how many docs are currently buffered in RAM. */
int getNumDocs() {
return numDocsInRAM.get();
@@ -204,14 +198,6 @@ final class DocumentsWriter {
return abortedFiles;
}
- // returns boolean for asserts
- boolean message(String message) {
- if (infoStream != null) {
- indexWriter.message("DW: " + message);
- }
- return true;
- }
-
private void ensureOpen() throws AlreadyClosedException {
if (closed) {
throw new AlreadyClosedException("this IndexWriter is closed");
@@ -231,7 +217,7 @@ final class DocumentsWriter {
try {
if (infoStream != null) {
- message("DW: abort");
+ infoStream.message("DW", "abort");
}
final Iterator<ThreadState> threadsIterator = perThreadPool.getActivePerThreadsIterator();
@@ -258,14 +244,14 @@ final class DocumentsWriter {
success = true;
} finally {
if (infoStream != null) {
- message("docWriter: done abort; abortedFiles=" + abortedFiles + " success=" + success);
+ infoStream.message("DW", "done abort; abortedFiles=" + abortedFiles + " success=" + success);
}
}
}
boolean anyChanges() {
if (infoStream != null) {
- message("docWriter: anyChanges? numDocsInRam=" + numDocsInRAM.get()
+ infoStream.message("DW", "anyChanges? numDocsInRam=" + numDocsInRAM.get()
+ " deletes=" + anyDeletions() + " hasTickets:"
+ ticketQueue.hasTickets() + " pendingChangesInFullFlush: "
+ pendingChangesInCurrentFullFlush);
@@ -304,7 +290,7 @@ final class DocumentsWriter {
if (flushControl.anyStalledThreads() || flushControl.numQueuedFlushes() > 0) {
// Help out flushing any queued DWPTs so we can un-stall:
if (infoStream != null) {
- message("docWriter: DocumentsWriter has queued dwpt; will hijack this thread to flush pending segment(s)");
+ infoStream.message("DW", "DocumentsWriter has queued dwpt; will hijack this thread to flush pending segment(s)");
}
do {
// Try pick up pending threads here if possible
@@ -315,14 +301,14 @@ final class DocumentsWriter {
}
if (infoStream != null && flushControl.anyStalledThreads()) {
- message("WARNING DocumentsWriter has stalled threads; waiting");
+ infoStream.message("DW", "WARNING DocumentsWriter has stalled threads; waiting");
}
flushControl.waitIfStalled(); // block if stalled
} while (flushControl.numQueuedFlushes() != 0); // still queued DWPTs try help flushing
if (infoStream != null) {
- message("continue indexing after helpling out flushing DocumentsWriter is healthy");
+ infoStream.message("DW", "continue indexing after helping out flushing DocumentsWriter is healthy");
}
}
return maybeMerge;
@@ -481,7 +467,7 @@ final class DocumentsWriter {
if (ramBufferSizeMB != IndexWriterConfig.DISABLE_AUTO_FLUSH &&
flushControl.getDeleteBytesUsed() > (1024*1024*ramBufferSizeMB/2)) {
if (infoStream != null) {
- message("force apply deletes bytesUsed=" + flushControl.getDeleteBytesUsed() + " vs ramBuffer=" + (1024*1024*ramBufferSizeMB));
+ infoStream.message("DW", "force apply deletes bytesUsed=" + flushControl.getDeleteBytesUsed() + " vs ramBuffer=" + (1024*1024*ramBufferSizeMB));
}
applyAllDeletes(deleteQueue);
}
@@ -515,7 +501,7 @@ final class DocumentsWriter {
if (bufferedDeletes != null && bufferedDeletes.any()) {
indexWriter.publishFrozenDeletes(bufferedDeletes);
if (infoStream != null) {
- message("flush: push buffered deletes: " + bufferedDeletes);
+ infoStream.message("DW", "flush: push buffered deletes: " + bufferedDeletes);
}
}
} else {
@@ -542,14 +528,14 @@ final class DocumentsWriter {
final SegmentInfo segInfo = indexWriter.prepareFlushedSegment(newSegment);
final BufferedDeletes deletes = newSegment.segmentDeletes;
if (infoStream != null) {
- message(Thread.currentThread().getName() + ": publishFlushedSegment seg-private deletes=" + deletes);
+ infoStream.message("DW", Thread.currentThread().getName() + ": publishFlushedSegment seg-private deletes=" + deletes);
}
FrozenBufferedDeletes packet = null;
if (deletes != null && deletes.any()) {
// Segment private delete
packet = new FrozenBufferedDeletes(deletes, true);
if (infoStream != null) {
- message("flush: push buffered seg private deletes: " + packet);
+ infoStream.message("DW", "flush: push buffered seg private deletes: " + packet);
}
}
@@ -575,7 +561,7 @@ final class DocumentsWriter {
throws IOException {
final DocumentsWriterDeleteQueue flushingDeleteQueue;
if (infoStream != null) {
- message(Thread.currentThread().getName() + " startFullFlush");
+ infoStream.message("DW", Thread.currentThread().getName() + " startFullFlush");
}
synchronized (this) {
@@ -601,7 +587,7 @@ final class DocumentsWriter {
flushControl.waitForFlush();
if (!anythingFlushed && flushingDeleteQueue.anyChanges()) { // apply deletes if we did not flush any document
if (infoStream != null) {
- message(Thread.currentThread().getName() + ": flush naked frozen global deletes");
+ infoStream.message("DW", Thread.currentThread().getName() + ": flush naked frozen global deletes");
}
synchronized (ticketQueue) {
ticketQueue.incTicketCount(); // first inc the ticket count - freeze opens a window for #anyChanges to fail
@@ -619,7 +605,7 @@ final class DocumentsWriter {
final void finishFullFlush(boolean success) {
try {
if (infoStream != null) {
- message(Thread.currentThread().getName() + " finishFullFlush success=" + success);
+ infoStream.message("DW", Thread.currentThread().getName() + " finishFullFlush success=" + success);
}
assert setFlushingDeleteQueue(null);
if (success) {
Modified: lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/DocumentsWriterFlushControl.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/DocumentsWriterFlushControl.java?rev=1198537&r1=1198536&r2=1198537&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/DocumentsWriterFlushControl.java (original)
+++ lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/DocumentsWriterFlushControl.java Sun Nov 6 19:09:08 2011
@@ -454,7 +454,7 @@ public final class DocumentsWriterFlushC
void addFlushableState(ThreadState perThread) {
if (documentsWriter.infoStream != null) {
- documentsWriter.message("FC: " + Thread.currentThread().getName() + ": addFlushableState " + perThread.perThread);
+ documentsWriter.infoStream.message("DWFC", Thread.currentThread().getName() + ": addFlushableState " + perThread.perThread);
}
final DocumentsWriterPerThread dwpt = perThread.perThread;
assert perThread.isHeldByCurrentThread();
Modified: lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/DocumentsWriterPerThread.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/DocumentsWriterPerThread.java?rev=1198537&r1=1198536&r2=1198537&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/DocumentsWriterPerThread.java (original)
+++ lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/DocumentsWriterPerThread.java Sun Nov 6 19:09:08 2011
@@ -35,6 +35,7 @@ import org.apache.lucene.util.BitVector;
import org.apache.lucene.util.Counter;
import org.apache.lucene.util.ByteBlockPool.Allocator;
import org.apache.lucene.util.ByteBlockPool.DirectTrackingAllocator;
+import org.apache.lucene.util.InfoStream;
import org.apache.lucene.util.RamUsageEstimator;
public class DocumentsWriterPerThread {
@@ -131,7 +132,7 @@ public class DocumentsWriterPerThread {
hasAborted = aborting = true;
try {
if (infoStream != null) {
- message("now abort");
+ infoStream.message("DWPT", "now abort");
}
try {
consumer.abort();
@@ -146,7 +147,7 @@ public class DocumentsWriterPerThread {
} finally {
aborting = false;
if (infoStream != null) {
- message("done abort");
+ infoStream.message("DWPT", "done abort");
}
}
}
@@ -167,7 +168,7 @@ public class DocumentsWriterPerThread {
boolean hasAborted = false; // True if the last exception throws by #updateDocument was aborting
private FieldInfos fieldInfos;
- private PrintStream infoStream;
+ private final InfoStream infoStream;
private int numDocsInRAM;
private int flushedDocCount;
DocumentsWriterDeleteQueue deleteQueue;
@@ -225,13 +226,13 @@ public class DocumentsWriterPerThread {
// this call is synchronized on IndexWriter.segmentInfos
segment = writer.newSegmentName();
assert numDocsInRAM == 0;
- if (INFO_VERBOSE) {
- message(Thread.currentThread().getName() + " init seg=" + segment + " delQueue=" + deleteQueue);
+ if (INFO_VERBOSE && infoStream != null) {
+ infoStream.message("DWPT", Thread.currentThread().getName() + " init seg=" + segment + " delQueue=" + deleteQueue);
}
}
- if (INFO_VERBOSE) {
- message(Thread.currentThread().getName() + " update delTerm=" + delTerm + " docID=" + docState.docID + " seg=" + segment);
+ if (INFO_VERBOSE && infoStream != null) {
+ infoStream.message("DWPT", Thread.currentThread().getName() + " update delTerm=" + delTerm + " docID=" + docState.docID + " seg=" + segment);
}
boolean success = false;
try {
@@ -273,12 +274,12 @@ public class DocumentsWriterPerThread {
// this call is synchronized on IndexWriter.segmentInfos
segment = writer.newSegmentName();
assert numDocsInRAM == 0;
- if (INFO_VERBOSE) {
- message(Thread.currentThread().getName() + " init seg=" + segment + " delQueue=" + deleteQueue);
+ if (INFO_VERBOSE && infoStream != null) {
+ infoStream.message("DWPT", Thread.currentThread().getName() + " init seg=" + segment + " delQueue=" + deleteQueue);
}
}
- if (INFO_VERBOSE) {
- message(Thread.currentThread().getName() + " update delTerm=" + delTerm + " docID=" + docState.docID + " seg=" + segment);
+ if (INFO_VERBOSE && infoStream != null) {
+ infoStream.message("DWPT", Thread.currentThread().getName() + " update delTerm=" + delTerm + " docID=" + docState.docID + " seg=" + segment);
}
int docCount = 0;
try {
@@ -459,12 +460,12 @@ public class DocumentsWriterPerThread {
}
if (infoStream != null) {
- message("flush postings as segment " + flushState.segmentName + " numDocs=" + numDocsInRAM);
+ infoStream.message("DWPT", "flush postings as segment " + flushState.segmentName + " numDocs=" + numDocsInRAM);
}
if (aborting) {
if (infoStream != null) {
- message("flush: skip because aborting is set");
+ infoStream.message("DWPT", "flush: skip because aborting is set");
}
return null;
}
@@ -476,10 +477,10 @@ public class DocumentsWriterPerThread {
pendingDeletes.terms.clear();
final SegmentInfo newSegment = new SegmentInfo(segment, flushState.numDocs, directory, false, flushState.codec, fieldInfos.asReadOnly());
if (infoStream != null) {
- message("new segment has " + (flushState.liveDocs == null ? 0 : (flushState.numDocs - flushState.liveDocs.count())) + " deleted docs");
- message("new segment has " + (newSegment.getHasVectors() ? "vectors" : "no vectors"));
- message("flushedFiles=" + newSegment.files());
- message("flushed codec=" + newSegment.getCodec());
+ infoStream.message("DWPT", "new segment has " + (flushState.liveDocs == null ? 0 : (flushState.numDocs - flushState.liveDocs.count())) + " deleted docs");
+ infoStream.message("DWPT", "new segment has " + (newSegment.getHasVectors() ? "vectors" : "no vectors"));
+ infoStream.message("DWPT", "flushedFiles=" + newSegment.files());
+ infoStream.message("DWPT", "flushed codec=" + newSegment.getCodec());
}
flushedDocCount += flushState.numDocs;
@@ -495,7 +496,7 @@ public class DocumentsWriterPerThread {
if (infoStream != null) {
final double newSegmentSizeNoStore = newSegment.sizeInBytes(false)/1024./1024.;
final double newSegmentSize = newSegment.sizeInBytes(true)/1024./1024.;
- message("flushed: segment=" + newSegment +
+ infoStream.message("DWPT", "flushed: segment=" + newSegment +
" ramUsed=" + nf.format(startMBUsed) + " MB" +
" newFlushedSize=" + nf.format(newSegmentSize) + " MB" +
" (" + nf.format(newSegmentSizeNoStore) + " MB w/o doc stores)" +
@@ -527,10 +528,6 @@ public class DocumentsWriterPerThread {
return bytesUsed.get() + pendingDeletes.bytesUsed.get();
}
- void message(String message) {
- writer.message("DWPT: " + message);
- }
-
/* Initial chunks size of the shared byte[] blocks used to
store postings data */
final static int BYTE_BLOCK_NOT_MASK = ~BYTE_BLOCK_MASK;
@@ -561,11 +558,6 @@ public class DocumentsWriterPerThread {
return new PerDocWriteState(infoStream, directory, segment, fieldInfos, bytesUsed, segmentSuffix, IOContext.DEFAULT);
}
- void setInfoStream(PrintStream infoStream) {
- this.infoStream = infoStream;
- docState.infoStream = infoStream;
- }
-
@Override
public String toString() {
return "DocumentsWriterPerThread [pendingDeletes=" + pendingDeletes
Modified: lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/FlushByRamOrCountsPolicy.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/FlushByRamOrCountsPolicy.java?rev=1198537&r1=1198536&r2=1198537&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/FlushByRamOrCountsPolicy.java (original)
+++ lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/FlushByRamOrCountsPolicy.java Sun Nov 6 19:09:08 2011
@@ -64,7 +64,7 @@ public class FlushByRamOrCountsPolicy ex
control.getDeleteBytesUsed() > (1024*1024*indexWriterConfig.getRAMBufferSizeMB()))) {
control.setApplyAllDeletes();
if (writer.infoStream != null) {
- writer.message("force apply deletes bytesUsed=" + control.getDeleteBytesUsed() + " vs ramBuffer=" + (1024*1024*indexWriterConfig.getRAMBufferSizeMB()));
+ writer.infoStream.message("FP", "force apply deletes bytesUsed=" + control.getDeleteBytesUsed() + " vs ramBuffer=" + (1024*1024*indexWriterConfig.getRAMBufferSizeMB()));
}
}
}
@@ -82,7 +82,7 @@ public class FlushByRamOrCountsPolicy ex
if (totalRam >= limit) {
final DocumentsWriter writer = this.writer.get();
if (writer.infoStream != null) {
- writer.message("flush: activeBytes=" + control.activeBytes() + " deleteBytes=" + control.getDeleteBytesUsed() + " vs limit=" + limit);
+ writer.infoStream.message("FP", "flush: activeBytes=" + control.activeBytes() + " deleteBytes=" + control.getDeleteBytesUsed() + " vs limit=" + limit);
}
markLargestWriterPending(control, state, totalRam);
}
Modified: lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/FlushPolicy.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/FlushPolicy.java?rev=1198537&r1=1198536&r2=1198537&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/FlushPolicy.java (original)
+++ lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/FlushPolicy.java Sun Nov 6 19:09:08 2011
@@ -20,6 +20,7 @@ import java.util.Iterator;
import org.apache.lucene.index.DocumentsWriterPerThreadPool.ThreadState;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.util.InfoStream;
import org.apache.lucene.util.SetOnce;
/**
@@ -123,9 +124,16 @@ public abstract class FlushPolicy {
}
}
}
- assert writer.get().message(
- "set largest ram consuming thread pending on lower watermark");
+ assert assertMessage("set largest ram consuming thread pending on lower watermark");
return maxRamUsingThreadState;
}
+
+ private boolean assertMessage(String s) {
+ InfoStream infoStream = writer.get().infoStream;
+ if (infoStream != null) {
+ infoStream.message("FP", s);
+ }
+ return true;
+ }
}
Modified: lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/IndexFileDeleter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/IndexFileDeleter.java?rev=1198537&r1=1198536&r2=1198537&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/IndexFileDeleter.java (original)
+++ lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/IndexFileDeleter.java Sun Nov 6 19:09:08 2011
@@ -31,6 +31,7 @@ import java.util.Map;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.NoSuchDirectoryException;
import org.apache.lucene.util.CollectionUtil;
+import org.apache.lucene.util.InfoStream;
/*
* This class keeps track of each SegmentInfos instance that
@@ -95,7 +96,7 @@ final class IndexFileDeleter {
/* Commits that the IndexDeletionPolicy have decided to delete: */
private List<CommitPoint> commitsToDelete = new ArrayList<CommitPoint>();
- private PrintStream infoStream;
+ private final InfoStream infoStream;
private Directory directory;
private IndexDeletionPolicy policy;
@@ -109,17 +110,6 @@ final class IndexFileDeleter {
// Used only for assert
private final IndexWriter writer;
- void setInfoStream(PrintStream infoStream) {
- this.infoStream = infoStream;
- if (infoStream != null) {
- message("setInfoStream deletionPolicy=" + policy);
- }
- }
-
- private void message(String message) {
- infoStream.println("IFD [" + new Date() + "; " + Thread.currentThread().getName() + "]: " + message);
- }
-
// called only from assert
private boolean locked() {
return writer == null || Thread.holdsLock(writer);
@@ -134,14 +124,14 @@ final class IndexFileDeleter {
* @throws IOException if there is a low-level IO error
*/
public IndexFileDeleter(Directory directory, IndexDeletionPolicy policy, SegmentInfos segmentInfos,
- PrintStream infoStream, IndexWriter writer) throws CorruptIndexException, IOException {
+ InfoStream infoStream, IndexWriter writer) throws CorruptIndexException, IOException {
this.infoStream = infoStream;
this.writer = writer;
final String currentSegmentsFile = segmentInfos.getCurrentSegmentFileName();
if (infoStream != null) {
- message("init: current segments file is \"" + currentSegmentsFile + "\"; deletionPolicy=" + policy);
+ infoStream.message("IFD", "init: current segments file is \"" + currentSegmentsFile + "\"; deletionPolicy=" + policy);
}
this.policy = policy;
@@ -173,7 +163,7 @@ final class IndexFileDeleter {
// it's valid (<= the max gen). Load it, then
// incref all files it refers to:
if (infoStream != null) {
- message("init: load commit \"" + fileName + "\"");
+ infoStream.message("IFD", "init: load commit \"" + fileName + "\"");
}
SegmentInfos sis = new SegmentInfos();
try {
@@ -187,7 +177,7 @@ final class IndexFileDeleter {
// doesn't. So, we catch this and handle it
// as if the file does not exist
if (infoStream != null) {
- message("init: hit FileNotFoundException when loading commit \"" + fileName + "\"; skipping this commit point");
+ infoStream.message("IFD", "init: hit FileNotFoundException when loading commit \"" + fileName + "\"; skipping this commit point");
}
sis = null;
} catch (IOException e) {
@@ -218,7 +208,7 @@ final class IndexFileDeleter {
refresh(segmentInfo.name);
sis = null;
if (infoStream != null) {
- message("init: hit FileNotFoundException when loading commit \"" + fileName + "\"; skipping this commit point");
+ infoStream.message("IFD", "init: hit FileNotFoundException when loading commit \"" + fileName + "\"; skipping this commit point");
}
}
}
@@ -255,7 +245,7 @@ final class IndexFileDeleter {
throw new CorruptIndexException("failed to locate current segments_N file");
}
if (infoStream != null) {
- message("forced open of current segments file " + segmentInfos.getCurrentSegmentFileName());
+ infoStream.message("IFD", "forced open of current segments file " + segmentInfos.getCurrentSegmentFileName());
}
currentCommitPoint = new CommitPoint(commitsToDelete, directory, sis);
commits.add(currentCommitPoint);
@@ -273,7 +263,7 @@ final class IndexFileDeleter {
final String fileName = entry.getKey();
if (0 == rc.count) {
if (infoStream != null) {
- message("init: removing unreferenced file \"" + fileName + "\"");
+ infoStream.message("IFD", "init: removing unreferenced file \"" + fileName + "\"");
}
deleteFile(fileName);
}
@@ -313,7 +303,7 @@ final class IndexFileDeleter {
for(int i=0;i<size;i++) {
CommitPoint commit = commitsToDelete.get(i);
if (infoStream != null) {
- message("deleteCommits: now decRef commit \"" + commit.getSegmentsFileName() + "\"");
+ infoStream.message("IFD", "deleteCommits: now decRef commit \"" + commit.getSegmentsFileName() + "\"");
}
for (final String file : commit.files) {
decRef(file);
@@ -373,7 +363,7 @@ final class IndexFileDeleter {
!fileName.equals(IndexFileNames.SEGMENTS_GEN)) {
// Unreferenced file, so remove it
if (infoStream != null) {
- message("refresh [prefix=" + segmentName + "]: removing newly created unreferenced file \"" + fileName + "\"");
+ infoStream.message("IFD", "refresh [prefix=" + segmentName + "]: removing newly created unreferenced file \"" + fileName + "\"");
}
deleteFile(fileName);
}
@@ -415,7 +405,7 @@ final class IndexFileDeleter {
void revisitPolicy() throws IOException {
assert locked();
if (infoStream != null) {
- message("now revisitPolicy");
+ infoStream.message("IFD", "now revisitPolicy");
}
if (commits.size() > 0) {
@@ -432,7 +422,7 @@ final class IndexFileDeleter {
int size = oldDeletable.size();
for(int i=0;i<size;i++) {
if (infoStream != null) {
- message("delete pending file " + oldDeletable.get(i));
+ infoStream.message("IFD", "delete pending file " + oldDeletable.get(i));
}
deleteFile(oldDeletable.get(i));
}
@@ -463,7 +453,7 @@ final class IndexFileDeleter {
assert locked();
if (infoStream != null) {
- message("now checkpoint \"" + segmentInfos.toString(directory) + "\" [" + segmentInfos.size() + " segments " + "; isCommit = " + isCommit + "]");
+ infoStream.message("IFD", "now checkpoint \"" + segmentInfos.toString(directory) + "\" [" + segmentInfos.size() + " segments " + "; isCommit = " + isCommit + "]");
}
// Try again now to delete any previously un-deletable
@@ -514,7 +504,7 @@ final class IndexFileDeleter {
assert locked();
RefCount rc = getRefCount(fileName);
if (infoStream != null && VERBOSE_REF_COUNTS) {
- message(" IncRef \"" + fileName + "\": pre-incr count is " + rc.count);
+ infoStream.message("IFD", " IncRef \"" + fileName + "\": pre-incr count is " + rc.count);
}
rc.IncRef();
}
@@ -530,7 +520,7 @@ final class IndexFileDeleter {
assert locked();
RefCount rc = getRefCount(fileName);
if (infoStream != null && VERBOSE_REF_COUNTS) {
- message(" DecRef \"" + fileName + "\": pre-decr count is " + rc.count);
+ infoStream.message("IFD", " DecRef \"" + fileName + "\": pre-decr count is " + rc.count);
}
if (0 == rc.DecRef()) {
// This file is no longer referenced by any past
@@ -582,7 +572,7 @@ final class IndexFileDeleter {
for (final String fileName: files) {
if (!refCounts.containsKey(fileName)) {
if (infoStream != null) {
- message("delete new file \"" + fileName + "\"");
+ infoStream.message("IFD", "delete new file \"" + fileName + "\"");
}
deleteFile(fileName);
}
@@ -594,7 +584,7 @@ final class IndexFileDeleter {
assert locked();
try {
if (infoStream != null) {
- message("delete \"" + fileName + "\"");
+ infoStream.message("IFD", "delete \"" + fileName + "\"");
}
directory.deleteFile(fileName);
} catch (IOException e) { // if delete fails
@@ -608,7 +598,7 @@ final class IndexFileDeleter {
// the file for subsequent deletion.
if (infoStream != null) {
- message("unable to remove file \"" + fileName + "\": " + e.toString() + "; Will re-try later.");
+ infoStream.message("IFD", "unable to remove file \"" + fileName + "\": " + e.toString() + "; Will re-try later.");
}
if (deletable == null) {
deletable = new ArrayList<String>();
Modified: lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/IndexUpgrader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/IndexUpgrader.java?rev=1198537&r1=1198536&r2=1198537&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/IndexUpgrader.java (original)
+++ lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/IndexUpgrader.java Sun Nov 6 19:09:08 2011
@@ -20,6 +20,7 @@ package org.apache.lucene.index;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
import org.apache.lucene.util.Constants;
+import org.apache.lucene.util.InfoStream;
import org.apache.lucene.util.Version;
import java.io.File;
@@ -86,30 +87,28 @@ public final class IndexUpgrader {
}
private final Directory dir;
- private final PrintStream infoStream;
private final IndexWriterConfig iwc;
private final boolean deletePriorCommits;
/** Creates index upgrader on the given directory, using an {@link IndexWriter} using the given
* {@code matchVersion}. The tool refuses to upgrade indexes with multiple commit points. */
public IndexUpgrader(Directory dir, Version matchVersion) {
- this(dir, new IndexWriterConfig(matchVersion, null), null, false);
+ this(dir, new IndexWriterConfig(matchVersion, null), false);
}
/** Creates index upgrader on the given directory, using an {@link IndexWriter} using the given
* {@code matchVersion}. You have the possibility to upgrade indexes with multiple commit points by removing
* all older ones. If {@code infoStream} is not {@code null}, all logging output will be sent to this stream. */
public IndexUpgrader(Directory dir, Version matchVersion, PrintStream infoStream, boolean deletePriorCommits) {
- this(dir, new IndexWriterConfig(matchVersion, null), infoStream, deletePriorCommits);
+ this(dir, new IndexWriterConfig(matchVersion, null).setInfoStream(infoStream), deletePriorCommits);
}
/** Creates index upgrader on the given directory, using an {@link IndexWriter} using the given
* config. You have the possibility to upgrade indexes with multiple commit points by removing
- * all older ones. If {@code infoStream} is not {@code null}, all logging output will be sent to this stream. */
- public IndexUpgrader(Directory dir, IndexWriterConfig iwc, PrintStream infoStream, boolean deletePriorCommits) {
+ * all older ones. */
+ public IndexUpgrader(Directory dir, IndexWriterConfig iwc, boolean deletePriorCommits) {
this.dir = dir;
this.iwc = iwc;
- this.infoStream = infoStream;
this.deletePriorCommits = deletePriorCommits;
}
@@ -131,10 +130,14 @@ public final class IndexUpgrader {
final IndexWriter w = new IndexWriter(dir, c);
try {
- w.setInfoStream(infoStream);
- w.message("Upgrading all pre-" + Constants.LUCENE_MAIN_VERSION + " segments of index directory '" + dir + "' to version " + Constants.LUCENE_MAIN_VERSION + "...");
+ InfoStream infoStream = c.getInfoStream();
+ if (infoStream != null) {
+ infoStream.message("IndexUpgrader", "Upgrading all pre-" + Constants.LUCENE_MAIN_VERSION + " segments of index directory '" + dir + "' to version " + Constants.LUCENE_MAIN_VERSION + "...");
+ }
w.optimize();
- w.message("All segments upgraded to version " + Constants.LUCENE_MAIN_VERSION);
+ if (infoStream != null) {
+ infoStream.message("IndexUpgrader", "All segments upgraded to version " + Constants.LUCENE_MAIN_VERSION);
+ }
} finally {
w.close();
}
Modified: lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/IndexWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/IndexWriter.java?rev=1198537&r1=1198536&r2=1198537&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/IndexWriter.java (original)
+++ lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/IndexWriter.java Sun Nov 6 19:09:08 2011
@@ -19,12 +19,10 @@ package org.apache.lucene.index;
import java.io.Closeable;
import java.io.IOException;
-import java.io.PrintStream;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
-import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
@@ -52,6 +50,7 @@ import org.apache.lucene.store.MergeInfo
import org.apache.lucene.util.BitVector;
import org.apache.lucene.util.Constants;
import org.apache.lucene.util.IOUtils;
+import org.apache.lucene.util.InfoStream;
import org.apache.lucene.util.StringHelper;
import org.apache.lucene.util.ThreadInterruptedException;
import org.apache.lucene.util.MapBackedSet;
@@ -205,9 +204,6 @@ public class IndexWriter implements Clos
* #setInfoStream}).
*/
public final static int MAX_TERM_LENGTH = DocumentsWriterPerThread.MAX_TERM_LENGTH_UTF8;
- // Used for printing messages
- private static final AtomicInteger MESSAGE_ID = new AtomicInteger();
- private int messageID = MESSAGE_ID.getAndIncrement();
volatile private boolean hitOOM;
private final Directory directory; // where this index resides
@@ -343,7 +339,7 @@ public class IndexWriter implements Clos
final long tStart = System.currentTimeMillis();
if (infoStream != null) {
- message("flush at getReader");
+ infoStream.message("IW", "flush at getReader");
}
// Do this up front before flushing so that the readers
// obtained during this flush are pooled, the first time
@@ -376,7 +372,7 @@ public class IndexWriter implements Clos
maybeApplyDeletes(applyAllDeletes);
r = new DirectoryReader(this, segmentInfos, applyAllDeletes);
if (infoStream != null) {
- message("return reader version=" + r.getVersion() + " reader=" + r);
+ infoStream.message("IW", "return reader version=" + r.getVersion() + " reader=" + r);
}
}
} catch (OutOfMemoryError oom) {
@@ -385,7 +381,7 @@ public class IndexWriter implements Clos
return null;
} finally {
if (!success && infoStream != null) {
- message("hit exception during NRT reader");
+ infoStream.message("IW", "hit exception during NRT reader");
}
// Done: finish the full flush!
docWriter.finishFullFlush(success);
@@ -396,7 +392,7 @@ public class IndexWriter implements Clos
maybeMerge();
}
if (infoStream != null) {
- message("getReader took " + (System.currentTimeMillis() - tStart) + " msec");
+ infoStream.message("IW", "getReader took " + (System.currentTimeMillis() - tStart) + " msec");
}
return r;
}
@@ -791,16 +787,6 @@ public class IndexWriter implements Clos
ensureOpen(true);
}
- /**
- * Prints a message to the infoStream (if non-null),
- * prefixed with the identifying information for this
- * writer and the thread that's calling it.
- */
- public void message(String message) {
- if (infoStream != null)
- infoStream.println("IW " + messageID + " [" + new Date() + "; " + Thread.currentThread().getName() + "]: " + message);
- }
-
final Codec codec; // for writing new segments
/**
@@ -832,14 +818,13 @@ public class IndexWriter implements Clos
config = (IndexWriterConfig) conf.clone();
directory = d;
analyzer = conf.getAnalyzer();
- infoStream = defaultInfoStream;
+ infoStream = conf.getInfoStream();
mergePolicy = conf.getMergePolicy();
mergePolicy.setIndexWriter(this);
mergeScheduler = conf.getMergeScheduler();
codec = conf.getCodec();
- bufferedDeletesStream = new BufferedDeletesStream(messageID);
- bufferedDeletesStream.setInfoStream(infoStream);
+ bufferedDeletesStream = new BufferedDeletesStream(infoStream);
poolReaders = conf.getReaderPooling();
writeLock = directory.makeLock(WRITE_LOCK_NAME);
@@ -897,7 +882,7 @@ public class IndexWriter implements Clos
changeCount++;
segmentInfos.changed();
if (infoStream != null)
- message("init: loaded commit \"" + commit.getSegmentsFileName() + "\"");
+ infoStream.message("IW", "init: loaded commit \"" + commit.getSegmentsFileName() + "\"");
}
}
@@ -906,7 +891,6 @@ public class IndexWriter implements Clos
// start with previous field numbers, but new FieldInfos
globalFieldNumberMap = segmentInfos.getOrLoadGlobalFieldNumberMap(directory);
docWriter = new DocumentsWriter(codec, config, directory, this, globalFieldNumberMap, bufferedDeletesStream);
- docWriter.setInfoStream(infoStream);
// Default deleter (for backwards compatibility) is
// KeepOnlyLastCommitDeleter:
@@ -926,7 +910,7 @@ public class IndexWriter implements Clos
}
if (infoStream != null) {
- message("init: create=" + create);
+ infoStream.message("IW", "init: create=" + create);
messageState();
}
@@ -935,7 +919,7 @@ public class IndexWriter implements Clos
} finally {
if (!success) {
if (infoStream != null) {
- message("init: hit exception on init; releasing write lock");
+ infoStream.message("IW", "init: hit exception on init; releasing write lock");
}
try {
writeLock.release();
@@ -963,51 +947,13 @@ public class IndexWriter implements Clos
return config;
}
- /** If non-null, this will be the default infoStream used
- * by a newly instantiated IndexWriter.
- * @see #setInfoStream
- */
- public static void setDefaultInfoStream(PrintStream infoStream) {
- IndexWriter.defaultInfoStream = infoStream;
- }
-
- /**
- * Returns the current default infoStream for newly
- * instantiated IndexWriters.
- * @see #setDefaultInfoStream
- */
- public static PrintStream getDefaultInfoStream() {
- return IndexWriter.defaultInfoStream;
- }
-
- /** If non-null, information about merges, deletes and a
- * message when maxFieldLength is reached will be printed
- * to this.
- */
- public void setInfoStream(PrintStream infoStream) throws IOException {
- ensureOpen();
- this.infoStream = infoStream;
- docWriter.setInfoStream(infoStream);
- deleter.setInfoStream(infoStream);
- bufferedDeletesStream.setInfoStream(infoStream);
- if (infoStream != null)
- messageState();
- }
-
private void messageState() throws IOException {
- message("\ndir=" + directory + "\n" +
+ if (infoStream != null) {
+ infoStream.message("IW", "\ndir=" + directory + "\n" +
"index=" + segString() + "\n" +
"version=" + Constants.LUCENE_VERSION + "\n" +
config.toString());
- }
-
- /**
- * Returns the current infoStream in use by this writer.
- * @see #setInfoStream
- */
- public PrintStream getInfoStream() {
- ensureOpen();
- return infoStream;
+ }
}
/** Returns true if verbosing is enabled (i.e., infoStream != null). */
@@ -1122,7 +1068,7 @@ public class IndexWriter implements Clos
try {
if (infoStream != null) {
- message("now flush at close waitForMerges=" + waitForMerges);
+ infoStream.message("IW", "now flush at close waitForMerges=" + waitForMerges);
}
docWriter.close();
@@ -1148,14 +1094,14 @@ public class IndexWriter implements Clos
mergeScheduler.close();
if (infoStream != null)
- message("now call final commit()");
+ infoStream.message("IW", "now call final commit()");
if (!hitOOM) {
commitInternal(null);
}
if (infoStream != null)
- message("at close: " + segString());
+ infoStream.message("IW", "at close: " + segString());
// used by assert below
final DocumentsWriter oldWriter = docWriter;
synchronized(this) {
@@ -1180,7 +1126,7 @@ public class IndexWriter implements Clos
notifyAll();
if (!closed) {
if (infoStream != null)
- message("hit exception while closing");
+ infoStream.message("IW", "hit exception while closing");
}
}
}
@@ -1410,7 +1356,7 @@ public class IndexWriter implements Clos
success = true;
} finally {
if (!success && infoStream != null) {
- message("hit exception updating document");
+ infoStream.message("IW", "hit exception updating document");
}
}
if (anySegmentFlushed) {
@@ -1557,7 +1503,7 @@ public class IndexWriter implements Clos
success = true;
} finally {
if (!success && infoStream != null)
- message("hit exception updating document");
+ infoStream.message("IW", "hit exception updating document");
}
if (anySegmentFlushed) {
@@ -1619,8 +1565,7 @@ public class IndexWriter implements Clos
/** If non-null, information about merges will be printed to this.
*/
- private PrintStream infoStream;
- private static PrintStream defaultInfoStream;
+ final InfoStream infoStream;
/**
* Requests an "optimize" operation on an index, priming the index
@@ -1737,8 +1682,8 @@ public class IndexWriter implements Clos
throw new IllegalArgumentException("maxNumSegments must be >= 1; got " + maxNumSegments);
if (infoStream != null) {
- message("optimize: index now " + segString());
- message("now flush at optimize");
+ infoStream.message("IW", "optimize: index now " + segString());
+ infoStream.message("IW", "now flush at optimize");
}
flush(true, true);
@@ -1849,7 +1794,7 @@ public class IndexWriter implements Clos
flush(true, true);
if (infoStream != null)
- message("expungeDeletes: index now " + segString());
+ infoStream.message("IW", "expungeDeletes: index now " + segString());
MergePolicy.MergeSpecification spec;
@@ -2057,7 +2002,7 @@ public class IndexWriter implements Clos
boolean success = false;
if (infoStream != null ) {
- message("rollback");
+ infoStream.message("IW", "rollback");
}
try {
@@ -2067,7 +2012,7 @@ public class IndexWriter implements Clos
}
if (infoStream != null ) {
- message("rollback: done finish merges");
+ infoStream.message("IW", "rollback: done finish merges");
}
// Must pre-close these two, in case they increment
@@ -2094,7 +2039,7 @@ public class IndexWriter implements Clos
// once").
segmentInfos.rollbackSegmentInfos(rollbackSegments);
if (infoStream != null ) {
- message("rollback: infos=" + segString(segmentInfos));
+ infoStream.message("IW", "rollback: infos=" + segString(segmentInfos));
}
docWriter.abort();
@@ -2121,7 +2066,7 @@ public class IndexWriter implements Clos
closing = false;
notifyAll();
if (infoStream != null)
- message("hit exception during rollback");
+ infoStream.message("IW", "hit exception during rollback");
}
}
}
@@ -2174,7 +2119,7 @@ public class IndexWriter implements Clos
handleOOM(oom, "deleteAll");
} finally {
if (!success && infoStream != null) {
- message("hit exception during deleteAll");
+ infoStream.message("IW", "hit exception during deleteAll");
}
}
}
@@ -2187,7 +2132,7 @@ public class IndexWriter implements Clos
// Abort all pending & running merges:
for (final MergePolicy.OneMerge merge : pendingMerges) {
if (infoStream != null)
- message("now abort pending merge " + merge.segString(directory));
+ infoStream.message("IW", "now abort pending merge " + merge.segString(directory));
merge.abort();
mergeFinish(merge);
}
@@ -2195,7 +2140,7 @@ public class IndexWriter implements Clos
for (final MergePolicy.OneMerge merge : runningMerges) {
if (infoStream != null)
- message("now abort running merge " + merge.segString(directory));
+ infoStream.message("IW", "now abort running merge " + merge.segString(directory));
merge.abort();
}
@@ -2206,7 +2151,7 @@ public class IndexWriter implements Clos
// they are aborted.
while(runningMerges.size() > 0) {
if (infoStream != null)
- message("now wait for " + runningMerges.size() + " running merge to abort");
+ infoStream.message("IW", "now wait for " + runningMerges.size() + " running merge to abort");
doWait();
}
@@ -2216,7 +2161,7 @@ public class IndexWriter implements Clos
assert 0 == mergingSegments.size();
if (infoStream != null)
- message("all running merges have aborted");
+ infoStream.message("IW", "all running merges have aborted");
} else {
// waitForMerges() will ensure any running addIndexes finishes.
@@ -2237,7 +2182,7 @@ public class IndexWriter implements Clos
public synchronized void waitForMerges() {
ensureOpen(false);
if (infoStream != null) {
- message("waitForMerges");
+ infoStream.message("IW", "waitForMerges");
}
while(pendingMerges.size() > 0 || runningMerges.size() > 0) {
doWait();
@@ -2247,7 +2192,7 @@ public class IndexWriter implements Clos
assert 0 == mergingSegments.size();
if (infoStream != null) {
- message("waitForMerges done");
+ infoStream.message("IW", "waitForMerges done");
}
}
@@ -2284,7 +2229,9 @@ public class IndexWriter implements Clos
try {
if (useCompoundFile(newSegment)) {
String compoundFileName = IndexFileNames.segmentFileName(newSegment.name, "", IndexFileNames.COMPOUND_FILE_EXTENSION);
- message("creating compound file " + compoundFileName);
+ if (infoStream != null) {
+ infoStream.message("IW", "creating compound file " + compoundFileName);
+ }
// Now build compound file
final Directory cfsDir = new CompoundFileDirectory(directory, compoundFileName, context, true);
IOException prior = null;
@@ -2315,7 +2262,7 @@ public class IndexWriter implements Clos
newSegment.advanceDelGen();
final String delFileName = newSegment.getDelFileName();
if (infoStream != null) {
- message("flush: write " + delCount + " deletes to " + delFileName);
+ infoStream.message("IW", "flush: write " + delCount + " deletes to " + delFileName);
}
boolean success2 = false;
try {
@@ -2342,7 +2289,7 @@ public class IndexWriter implements Clos
} finally {
if (!success) {
if (infoStream != null) {
- message("hit exception " +
+ infoStream.message("IW", "hit exception " +
"reating compound file for newly flushed segment " + newSegment.name);
}
@@ -2374,7 +2321,7 @@ public class IndexWriter implements Clos
// Lock order IW -> BDS
synchronized (bufferedDeletesStream) {
if (infoStream != null) {
- message("publishFlushedSegment");
+ infoStream.message("IW", "publishFlushedSegment");
}
if (globalPacket != null && globalPacket.any()) {
@@ -2391,7 +2338,7 @@ public class IndexWriter implements Clos
nextGen = bufferedDeletesStream.getNextGen();
}
if (infoStream != null) {
- message("publish sets newSegment delGen=" + nextGen);
+ infoStream.message("IW", "publish sets newSegment delGen=" + nextGen);
}
newSegment.setBufferedDeletesGen(nextGen);
segmentInfos.add(newSegment);
@@ -2473,7 +2420,7 @@ public class IndexWriter implements Clos
try {
if (infoStream != null)
- message("flush at addIndexes(Directory...)");
+ infoStream.message("IW", "flush at addIndexes(Directory...)");
flush(false, true);
int docCount = 0;
@@ -2481,7 +2428,7 @@ public class IndexWriter implements Clos
Comparator<String> versionComparator = StringHelper.getVersionComparator();
for (Directory dir : dirs) {
if (infoStream != null) {
- message("addIndexes: process directory " + dir);
+ infoStream.message("IW", "addIndexes: process directory " + dir);
}
SegmentInfos sis = new SegmentInfos(); // read infos from dir
sis.read(dir);
@@ -2495,7 +2442,7 @@ public class IndexWriter implements Clos
String dsName = info.getDocStoreSegment();
if (infoStream != null) {
- message("addIndexes: process segment origName=" + info.name + " newName=" + newSegName + " dsName=" + dsName + " info=" + info);
+ infoStream.message("IW", "addIndexes: process segment origName=" + info.name + " newName=" + newSegName + " dsName=" + dsName + " info=" + info);
}
// create CFS only if the source segment is not CFS, and MP agrees it
@@ -2561,7 +2508,7 @@ public class IndexWriter implements Clos
try {
if (infoStream != null)
- message("flush at addIndexes(IndexReader...)");
+ infoStream.message("IW", "flush at addIndexes(IndexReader...)");
flush(false, true);
String mergedName = newSegmentName();
@@ -2572,7 +2519,7 @@ public class IndexWriter implements Clos
// TODO: somehow we should fix this merge so it's
// abortable so that IW.close(false) is able to stop it
- SegmentMerger merger = new SegmentMerger(directory, config.getTermIndexInterval(),
+ SegmentMerger merger = new SegmentMerger(infoStream, directory, config.getTermIndexInterval(),
mergedName, null, payloadProcessorProvider,
new FieldInfos(globalFieldNumberMap), codec, context);
@@ -2753,8 +2700,8 @@ public class IndexWriter implements Clos
ensureOpen(false);
if (infoStream != null) {
- message("prepareCommit: flush");
- message(" index before flush " + segString());
+ infoStream.message("IW", "prepareCommit: flush");
+ infoStream.message("IW", " index before flush " + segString());
}
if (hitOOM) {
@@ -2812,7 +2759,7 @@ public class IndexWriter implements Clos
success = true;
} finally {
if (!success && infoStream != null) {
- message("hit exception during prepareCommit");
+ infoStream.message("IW", "hit exception during prepareCommit");
}
// Done: finish the full flush!
docWriter.finishFullFlush(flushSuccess);
@@ -2896,21 +2843,21 @@ public class IndexWriter implements Clos
private final void commitInternal(Map<String,String> commitUserData) throws CorruptIndexException, IOException {
if (infoStream != null) {
- message("commit: start");
+ infoStream.message("IW", "commit: start");
}
synchronized(commitLock) {
if (infoStream != null) {
- message("commit: enter lock");
+ infoStream.message("IW", "commit: enter lock");
}
if (pendingCommit == null) {
if (infoStream != null) {
- message("commit: now prepare");
+ infoStream.message("IW", "commit: now prepare");
}
prepareCommit(commitUserData);
} else if (infoStream != null) {
- message("commit: already prepared");
+ infoStream.message("IW", "commit: already prepared");
}
finishCommit();
@@ -2922,10 +2869,10 @@ public class IndexWriter implements Clos
if (pendingCommit != null) {
try {
if (infoStream != null)
- message("commit: pendingCommit != null");
+ infoStream.message("IW", "commit: pendingCommit != null");
pendingCommit.finishCommit(directory, codec);
if (infoStream != null)
- message("commit: wrote segments file \"" + pendingCommit.getCurrentSegmentFileName() + "\"");
+ infoStream.message("IW", "commit: wrote segments file \"" + pendingCommit.getCurrentSegmentFileName() + "\"");
lastCommitChangeCount = pendingCommitChangeCount;
segmentInfos.updateGeneration(pendingCommit);
segmentInfos.setUserData(pendingCommit.getUserData());
@@ -2939,11 +2886,11 @@ public class IndexWriter implements Clos
}
} else if (infoStream != null) {
- message("commit: pendingCommit == null; skip");
+ infoStream.message("IW", "commit: pendingCommit == null; skip");
}
if (infoStream != null) {
- message("commit: done");
+ infoStream.message("IW", "commit: done");
}
}
@@ -2985,8 +2932,8 @@ public class IndexWriter implements Clos
try {
if (infoStream != null) {
- message(" start flush: applyAllDeletes=" + applyAllDeletes);
- message(" index before flush " + segString());
+ infoStream.message("IW", " start flush: applyAllDeletes=" + applyAllDeletes);
+ infoStream.message("IW", " index before flush " + segString());
}
final boolean anySegmentFlushed;
@@ -3015,18 +2962,18 @@ public class IndexWriter implements Clos
return false;
} finally {
if (!success && infoStream != null)
- message("hit exception during flush");
+ infoStream.message("IW", "hit exception during flush");
}
}
final synchronized void maybeApplyDeletes(boolean applyAllDeletes) throws IOException {
if (applyAllDeletes) {
if (infoStream != null) {
- message("apply all deletes during flush");
+ infoStream.message("IW", "apply all deletes during flush");
}
applyAllDeletes();
} else if (infoStream != null) {
- message("don't apply deletes now delTermCount=" + bufferedDeletesStream.numTerms() + " bytesUsed=" + bufferedDeletesStream.bytesUsed());
+ infoStream.message("IW", "don't apply deletes now delTermCount=" + bufferedDeletesStream.numTerms() + " bytesUsed=" + bufferedDeletesStream.bytesUsed());
}
}
@@ -3039,7 +2986,7 @@ public class IndexWriter implements Clos
}
if (!keepFullyDeletedSegments && result.allDeleted != null) {
if (infoStream != null) {
- message("drop 100% deleted segments: " + segString(result.allDeleted));
+ infoStream.message("IW", "drop 100% deleted segments: " + segString(result.allDeleted));
}
for (SegmentInfo info : result.allDeleted) {
// If a merge has already registered for this
@@ -3104,7 +3051,7 @@ public class IndexWriter implements Clos
final List<SegmentInfo> sourceSegments = merge.segments;
if (infoStream != null)
- message("commitMergeDeletes " + merge.segString(directory));
+ infoStream.message("IW", "commitMergeDeletes " + merge.segString(directory));
// Carefully merge deletes that occurred after we
// started merging:
@@ -3202,7 +3149,7 @@ public class IndexWriter implements Clos
}
if (infoStream != null)
- message("commitMerge: " + merge.segString(directory) + " index=" + segString());
+ infoStream.message("IW", "commitMerge: " + merge.segString(directory) + " index=" + segString());
assert merge.registerDone;
@@ -3214,7 +3161,7 @@ public class IndexWriter implements Clos
// abort this merge
if (merge.isAborted()) {
if (infoStream != null)
- message("commitMerge: skipping merge " + merge.segString(directory) + ": it was aborted");
+ infoStream.message("IW", "commitMerge: skipping merge " + merge.segString(directory) + ": it was aborted");
return false;
}
@@ -3230,7 +3177,7 @@ public class IndexWriter implements Clos
final boolean allDeleted = mergedReader.numDocs() == 0;
if (infoStream != null && allDeleted) {
- message("merged segment " + merge.info + " is 100% deleted" + (keepFullyDeletedSegments ? "" : "; skipping insert"));
+ infoStream.message("IW", "merged segment " + merge.info + " is 100% deleted" + (keepFullyDeletedSegments ? "" : "; skipping insert"));
}
final boolean dropSegment = allDeleted && !keepFullyDeletedSegments;
@@ -3241,7 +3188,7 @@ public class IndexWriter implements Clos
}
if (infoStream != null) {
- message("after commit: " + segString());
+ infoStream.message("IW", "after commit: " + segString());
}
closeMergeReaders(merge, false);
@@ -3268,7 +3215,7 @@ public class IndexWriter implements Clos
final private void handleMergeException(Throwable t, MergePolicy.OneMerge merge) throws IOException {
if (infoStream != null) {
- message("handleMergeException: merge=" + merge.segString(directory) + " exc=" + t);
+ infoStream.message("IW", "handleMergeException: merge=" + merge.segString(directory) + " exc=" + t);
}
// Set the exception on the merge, so if
@@ -3317,7 +3264,7 @@ public class IndexWriter implements Clos
mergeInit(merge);
if (infoStream != null)
- message("now merge\n merge=" + merge.segString(directory) + "\n index=" + segString());
+ infoStream.message("IW", "now merge\n merge=" + merge.segString(directory) + "\n index=" + segString());
mergeMiddle(merge);
mergeSuccess(merge);
@@ -3331,7 +3278,7 @@ public class IndexWriter implements Clos
if (!success) {
if (infoStream != null)
- message("hit exception during merge");
+ infoStream.message("IW", "hit exception during merge");
if (merge.info != null && !segmentInfos.contains(merge.info))
deleter.refresh(merge.info.name);
}
@@ -3348,7 +3295,7 @@ public class IndexWriter implements Clos
handleOOM(oom, "merge");
}
if (infoStream != null && merge.info != null) {
- message("merge time " + (System.currentTimeMillis()-t0) + " msec for " + merge.info.docCount + " docs");
+ infoStream.message("IW", "merge time " + (System.currentTimeMillis()-t0) + " msec for " + merge.info.docCount + " docs");
}
//System.out.println(Thread.currentThread().getName() + ": merge end");
}
@@ -3395,7 +3342,7 @@ public class IndexWriter implements Clos
pendingMerges.add(merge);
if (infoStream != null)
- message("add merge to pendingMerges: " + merge.segString(directory) + " [total " + pendingMerges.size() + " pending]");
+ infoStream.message("IW", "add merge to pendingMerges: " + merge.segString(directory) + " [total " + pendingMerges.size() + " pending]");
merge.mergeGen = mergeGen;
merge.isExternal = isExternal;
@@ -3412,10 +3359,12 @@ public class IndexWriter implements Clos
builder.append("]");
// don't call mergingSegments.toString() could lead to ConcurrentModException
// since merge updates the segments FieldInfos
- message(builder.toString());
+ infoStream.message("IW", builder.toString());
}
for(SegmentInfo info : merge.segments) {
- message("registerMerge info=" + info);
+ if (infoStream != null) {
+ infoStream.message("IW", "registerMerge info=" + info);
+ }
mergingSegments.add(info);
}
@@ -3435,7 +3384,7 @@ public class IndexWriter implements Clos
} finally {
if (!success) {
if (infoStream != null) {
- message("hit exception in mergeInit");
+ infoStream.message("IW", "hit exception in mergeInit");
}
mergeFinish(merge);
}
@@ -3477,7 +3426,7 @@ public class IndexWriter implements Clos
if (!keepFullyDeletedSegments && result.allDeleted != null) {
if (infoStream != null) {
- message("drop 100% deleted segments: " + result.allDeleted);
+ infoStream.message("IW", "drop 100% deleted segments: " + result.allDeleted);
}
for(SegmentInfo info : result.allDeleted) {
segmentInfos.remove(info);
@@ -3502,7 +3451,7 @@ public class IndexWriter implements Clos
setDiagnostics(merge.info, "merge", details);
if (infoStream != null) {
- message("merge seg=" + merge.info.name);
+ infoStream.message("IW", "merge seg=" + merge.info.name);
}
assert merge.estimatedMergeBytes == 0;
@@ -3639,11 +3588,11 @@ public class IndexWriter implements Clos
IOContext context = new IOContext(merge.getMergeInfo());
- SegmentMerger merger = new SegmentMerger(directory, config.getTermIndexInterval(), mergedName, merge,
+ SegmentMerger merger = new SegmentMerger(infoStream, directory, config.getTermIndexInterval(), mergedName, merge,
payloadProcessorProvider, merge.info.getFieldInfos(), codec, context);
if (infoStream != null) {
- message("merging " + merge.segString(directory) + " mergeVectors=" + merge.info.getFieldInfos().hasVectors());
+ infoStream.message("IW", "merging " + merge.segString(directory) + " mergeVectors=" + merge.info.getFieldInfos().hasVectors());
}
merge.readers = new ArrayList<SegmentReader>();
@@ -3677,7 +3626,7 @@ public class IndexWriter implements Clos
}
if (infoStream != null) {
- message("merge: total " + totDocCount + " docs");
+ infoStream.message("IW", "merge: total " + totDocCount + " docs");
}
merge.checkAborted(directory);
@@ -3689,8 +3638,8 @@ public class IndexWriter implements Clos
merge.info.setCodec(codec);
if (infoStream != null) {
- message("merge codec=" + codec);
- message("merge store matchedCount=" + merger.getMatchedSubReaderCount() + " vs " + merge.readers.size());
+ infoStream.message("IW", "merge codec=" + codec);
+ infoStream.message("IW", "merge store matchedCount=" + merger.getMatchedSubReaderCount() + " vs " + merge.readers.size());
}
anyNonBulkMerges |= merger.getAnyNonBulkMerges();
@@ -3711,7 +3660,7 @@ public class IndexWriter implements Clos
try {
if (infoStream != null) {
- message("create compound file " + compoundFileName);
+ infoStream.message("IW", "create compound file " + compoundFileName);
}
merger.createCompoundFile(compoundFileName, merge.info, new IOContext(merge.getMergeInfo()));
success = true;
@@ -3730,7 +3679,7 @@ public class IndexWriter implements Clos
} finally {
if (!success) {
if (infoStream != null) {
- message("hit exception creating compound file during merge");
+ infoStream.message("IW", "hit exception creating compound file during merge");
}
synchronized(this) {
@@ -3751,7 +3700,7 @@ public class IndexWriter implements Clos
if (merge.isAborted()) {
if (infoStream != null) {
- message("abort merge after building CFS");
+ infoStream.message("IW", "abort merge after building CFS");
}
deleter.deleteFile(compoundFileName);
return 0;
@@ -3762,7 +3711,7 @@ public class IndexWriter implements Clos
}
if (infoStream != null) {
- message(String.format("merged segment size=%.3f MB vs estimate=%.3f MB", merge.info.sizeInBytes(true)/1024./1024., merge.estimatedMergeBytes/1024/1024.));
+ infoStream.message("IW", String.format("merged segment size=%.3f MB vs estimate=%.3f MB", merge.info.sizeInBytes(true)/1024./1024., merge.estimatedMergeBytes/1024/1024.));
}
final IndexReaderWarmer mergedSegmentWarmer = config.getMergedSegmentWarmer();
@@ -3936,7 +3885,7 @@ public class IndexWriter implements Clos
try {
if (infoStream != null) {
- message("startCommit(): start");
+ infoStream.message("IW", "startCommit(): start");
}
synchronized(this) {
@@ -3945,14 +3894,14 @@ public class IndexWriter implements Clos
if (pendingCommitChangeCount == lastCommitChangeCount) {
if (infoStream != null) {
- message(" skip startCommit(): no changes pending");
+ infoStream.message("IW", " skip startCommit(): no changes pending");
}
deleter.decRef(toSync);
return;
}
if (infoStream != null) {
- message("startCommit index=" + segString(toSync) + " changeCount=" + changeCount);
+ infoStream.message("IW", "startCommit index=" + segString(toSync) + " changeCount=" + changeCount);
}
assert filesExist(toSync);
@@ -3989,7 +3938,7 @@ public class IndexWriter implements Clos
}
if (infoStream != null) {
- message("done all syncs");
+ infoStream.message("IW", "done all syncs");
}
assert testPoint("midStartCommitSuccess");
@@ -4004,7 +3953,7 @@ public class IndexWriter implements Clos
if (!pendingCommitSet) {
if (infoStream != null) {
- message("hit exception committing segments file");
+ infoStream.message("IW", "hit exception committing segments file");
}
// Hit exception
@@ -4057,7 +4006,7 @@ public class IndexWriter implements Clos
private void handleOOM(OutOfMemoryError oom, String location) {
if (infoStream != null) {
- message("hit OutOfMemoryError inside " + location);
+ infoStream.message("IW", "hit OutOfMemoryError inside " + location);
}
hitOOM = true;
throw oom;
@@ -4082,7 +4031,7 @@ public class IndexWriter implements Clos
//System.out.println("IW.nrtIsCurrent " + (infos.version == segmentInfos.version && !docWriter.anyChanges() && !bufferedDeletesStream.any()));
ensureOpen();
if (infoStream != null) {
- message("nrtIsCurrent: infoVersion matches: " + (infos.version == segmentInfos.version) + " DW changes: " + docWriter.anyChanges() + " BD changes: "+bufferedDeletesStream.any());
+ infoStream.message("IW", "nrtIsCurrent: infoVersion matches: " + (infos.version == segmentInfos.version) + " DW changes: " + docWriter.anyChanges() + " BD changes: "+bufferedDeletesStream.any());
}
return infos.version == segmentInfos.version && !docWriter.anyChanges() && !bufferedDeletesStream.any();