You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucenenet.apache.org by ni...@apache.org on 2020/08/26 04:24:18 UTC
[lucenenet] branch master updated: PERFORMANCE:
Lucene.Net.Diagnostics: Added Asssert overload for passing a string for
cases where no concatenation is used.
This is an automated email from the ASF dual-hosted git repository.
nightowl888 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/lucenenet.git
The following commit(s) were added to refs/heads/master by this push:
new 5e4d299 PERFORMANCE: Lucene.Net.Diagnostics: Added Asssert overload for passing a string for cases where no concatenation is used.
5e4d299 is described below
commit 5e4d2998ce697c1078fe7cb416982567bd9e1203
Author: Shad Storhaug <sh...@shadstorhaug.com>
AuthorDate: Wed Aug 26 09:17:55 2020 +0700
PERFORMANCE: Lucene.Net.Diagnostics: Added Asssert overload for passing a string for cases where no concatenation is used.
---
.../Analysis/CharFilter/HTMLStripCharFilter.cs | 2 +-
.../Analysis/Gl/GalicianStemmer.cs | 2 +-
.../Analysis/Miscellaneous/ASCIIFoldingFilter.cs | 2 +-
.../Analysis/Pt/PortugueseStemmer.cs | 2 +-
.../Icu/Segmentation/ICUTokenizerFactory.cs | 4 +-
src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs | 2 +-
.../Taxonomy/Directory/DirectoryTaxonomyWriter.cs | 4 +-
src/Lucene.Net.Replicator/ReplicationClient.cs | 2 +-
.../Prefix/AbstractVisitingPrefixTreeFilter.cs | 4 +-
.../Analysis/MockTokenizer.cs | 4 +-
.../Index/AssertingAtomicReader.cs | 58 +++++++++++-----------
src/Lucene.Net.TestFramework/Index/RandomCodec.cs | 4 +-
.../Util/TestRuleSetupAndRestoreClassEnv.cs | 12 ++---
.../Classic/TestQueryParser.cs | 4 +-
.../Flexible/Standard/TestStandardQP.cs | 4 +-
.../Suggest/LookupBenchmarkTest.cs | 4 +-
.../Analysis/TestGraphTokenizers.cs | 2 +-
src/Lucene.Net/Index/BufferedUpdatesStream.cs | 2 +-
src/Lucene.Net/Index/DocFieldProcessor.cs | 2 +-
src/Lucene.Net/Index/DocumentsWriter.cs | 6 +--
src/Lucene.Net/Index/DocumentsWriterDeleteQueue.cs | 4 +-
.../Index/DocumentsWriterFlushControl.cs | 10 ++--
src/Lucene.Net/Index/DocumentsWriterFlushQueue.cs | 4 +-
src/Lucene.Net/Index/DocumentsWriterPerThread.cs | 6 +--
.../Index/DocumentsWriterPerThreadPool.cs | 4 +-
src/Lucene.Net/Index/FlushPolicy.cs | 2 +-
src/Lucene.Net/Index/FrozenBufferedUpdates.cs | 2 +-
src/Lucene.Net/Index/MultiFields.cs | 2 +-
src/Lucene.Net/Index/MultiTerms.cs | 2 +-
src/Lucene.Net/Search/MultiPhraseQuery.cs | 2 +-
src/Lucene.Net/Search/NumericRangeQuery.cs | 2 +-
src/Lucene.Net/Search/PhraseQuery.cs | 4 +-
src/Lucene.Net/Search/ReferenceManager.cs | 2 +-
src/Lucene.Net/Search/ScoringRewrite.cs | 2 +-
src/Lucene.Net/Search/TermQuery.cs | 2 +-
src/Lucene.Net/Search/TopTermsRewrite.cs | 6 +--
src/Lucene.Net/Store/CompoundFileWriter.cs | 2 +-
src/Lucene.Net/Store/IOContext.cs | 4 +-
src/Lucene.Net/Support/Diagnostics/Debugging.cs | 14 ++++++
src/Lucene.Net/Util/AttributeSource.cs | 2 +-
.../Util/Automaton/DaciukMihovAutomatonBuilder.cs | 6 +--
src/Lucene.Net/Util/BytesRefHash.cs | 12 ++---
src/Lucene.Net/Util/InfoStream.cs | 2 +-
src/Lucene.Net/Util/RamUsageEstimator.cs | 6 +--
44 files changed, 122 insertions(+), 108 deletions(-)
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/HTMLStripCharFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/HTMLStripCharFilter.cs
index 5bff511..dfa3ba6 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/HTMLStripCharFilter.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/HTMLStripCharFilter.cs
@@ -30953,7 +30953,7 @@ namespace Lucene.Net.Analysis.CharFilters
/// </summary>
internal int NextChar()
{
- if (Debugging.AssertsEnabled) Debugging.Assert(!IsRead, () => "Attempting to read past the end of a segment.");
+ if (Debugging.AssertsEnabled) Debugging.Assert(!IsRead, "Attempting to read past the end of a segment.");
return m_buf[pos++];
}
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Gl/GalicianStemmer.cs b/src/Lucene.Net.Analysis.Common/Analysis/Gl/GalicianStemmer.cs
index 5d41c25..e50e048 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Gl/GalicianStemmer.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Gl/GalicianStemmer.cs
@@ -47,7 +47,7 @@ namespace Lucene.Net.Analysis.Gl
/// <returns> new valid length, stemmed </returns>
public virtual int Stem(char[] s, int len)
{
- if (Debugging.AssertsEnabled) Debugging.Assert(s.Length >= len + 1, () => "this stemmer requires an oversized array of at least 1");
+ if (Debugging.AssertsEnabled) Debugging.Assert(s.Length >= len + 1, "this stemmer requires an oversized array of at least 1");
len = plural.Apply(s, len);
len = unification.Apply(s, len);
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/ASCIIFoldingFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/ASCIIFoldingFilter.cs
index f219184..70095ee 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/ASCIIFoldingFilter.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/ASCIIFoldingFilter.cs
@@ -92,7 +92,7 @@ namespace Lucene.Net.Analysis.Miscellaneous
{
if (state != null)
{
- if (Debugging.AssertsEnabled) Debugging.Assert(preserveOriginal, () => "state should only be captured if preserveOriginal is true");
+ if (Debugging.AssertsEnabled) Debugging.Assert(preserveOriginal, "state should only be captured if preserveOriginal is true");
RestoreState(state);
posIncAttr.PositionIncrement = 0;
state = null;
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Pt/PortugueseStemmer.cs b/src/Lucene.Net.Analysis.Common/Analysis/Pt/PortugueseStemmer.cs
index 0d0a94c..0bfee1e 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Pt/PortugueseStemmer.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Pt/PortugueseStemmer.cs
@@ -46,7 +46,7 @@ namespace Lucene.Net.Analysis.Pt
/// <returns> new valid length, stemmed </returns>
public virtual int Stem(char[] s, int len)
{
- if (Debugging.AssertsEnabled) Debugging.Assert(s.Length >= len + 1, () => "this stemmer requires an oversized array of at least 1");
+ if (Debugging.AssertsEnabled) Debugging.Assert(s.Length >= len + 1, "this stemmer requires an oversized array of at least 1");
len = plural.Apply(s, len);
len = adverb.Apply(s, len);
diff --git a/src/Lucene.Net.Analysis.ICU/Analysis/Icu/Segmentation/ICUTokenizerFactory.cs b/src/Lucene.Net.Analysis.ICU/Analysis/Icu/Segmentation/ICUTokenizerFactory.cs
index fe38e72..f959065 100644
--- a/src/Lucene.Net.Analysis.ICU/Analysis/Icu/Segmentation/ICUTokenizerFactory.cs
+++ b/src/Lucene.Net.Analysis.ICU/Analysis/Icu/Segmentation/ICUTokenizerFactory.cs
@@ -105,7 +105,7 @@ namespace Lucene.Net.Analysis.Icu.Segmentation
public virtual void Inform(IResourceLoader loader)
{
- if (Debugging.AssertsEnabled) Debugging.Assert(tailored != null, () => "init must be called first!");
+ if (Debugging.AssertsEnabled) Debugging.Assert(tailored != null, "init must be called first!");
if (tailored.Count == 0)
{
config = new DefaultICUTokenizerConfig(cjkAsWords, myanmarAsWords);
@@ -165,7 +165,7 @@ namespace Lucene.Net.Analysis.Icu.Segmentation
public override Tokenizer Create(AttributeSource.AttributeFactory factory, TextReader input)
{
- if (Debugging.AssertsEnabled) Debugging.Assert(config != null, () => "inform must be called first!");
+ if (Debugging.AssertsEnabled) Debugging.Assert(config != null, "inform must be called first!");
return new ICUTokenizer(factory, input, config);
}
}
diff --git a/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs b/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs
index 7f3f6b2..7616e78 100644
--- a/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs
@@ -75,7 +75,7 @@ namespace Lucene.Net.Facet.Taxonomy
/// </summary>
public CategoryPath(params string[] components)
{
- if (Debugging.AssertsEnabled) Debugging.Assert(components.Length > 0, () => "use CategoryPath.EMPTY to create an empty path");
+ if (Debugging.AssertsEnabled) Debugging.Assert(components.Length > 0, "use CategoryPath.EMPTY to create an empty path");
foreach (string comp in components)
{
if (string.IsNullOrEmpty(comp))
diff --git a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs
index 79da91f..3e9fb6f 100644
--- a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs
@@ -190,7 +190,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
// verify (to some extent) that merge policy in effect would preserve category docids
if (indexWriter != null)
{
- if (Debugging.AssertsEnabled) Debugging.Assert(!(indexWriter.Config.MergePolicy is TieredMergePolicy), () => "for preserving category docids, merging none-adjacent segments is not allowed");
+ if (Debugging.AssertsEnabled) Debugging.Assert(!(indexWriter.Config.MergePolicy is TieredMergePolicy), "for preserving category docids, merging none-adjacent segments is not allowed");
}
// after we opened the writer, and the index is locked, it's safe to check
@@ -826,7 +826,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
FacetLabel cp = new FacetLabel(FacetsConfig.StringToPath(t.Utf8ToString()));
docsEnum = termsEnum.Docs(null, docsEnum, DocsFlags.NONE);
bool res = cache.Put(cp, docsEnum.NextDoc() + ctx.DocBase);
- if (Debugging.AssertsEnabled) Debugging.Assert(!res, () => "entries should not have been evicted from the cache");
+ if (Debugging.AssertsEnabled) Debugging.Assert(!res, "entries should not have been evicted from the cache");
}
else
{
diff --git a/src/Lucene.Net.Replicator/ReplicationClient.cs b/src/Lucene.Net.Replicator/ReplicationClient.cs
index a9a7844..23b378c 100644
--- a/src/Lucene.Net.Replicator/ReplicationClient.cs
+++ b/src/Lucene.Net.Replicator/ReplicationClient.cs
@@ -416,7 +416,7 @@ namespace Lucene.Net.Replicator
updateThread = new ReplicationThread(intervalMillis, threadName, DoUpdate, HandleUpdateException, updateLock);
updateThread.Start();
// we rely on isAlive to return true in isUpdateThreadAlive, assert to be on the safe side
- if (Debugging.AssertsEnabled) Debugging.Assert(updateThread.IsAlive, () => "updateThread started but not alive?");
+ if (Debugging.AssertsEnabled) Debugging.Assert(updateThread.IsAlive, "updateThread started but not alive?");
}
/// <summary>
diff --git a/src/Lucene.Net.Spatial/Prefix/AbstractVisitingPrefixTreeFilter.cs b/src/Lucene.Net.Spatial/Prefix/AbstractVisitingPrefixTreeFilter.cs
index 9edae90..101fac3 100644
--- a/src/Lucene.Net.Spatial/Prefix/AbstractVisitingPrefixTreeFilter.cs
+++ b/src/Lucene.Net.Spatial/Prefix/AbstractVisitingPrefixTreeFilter.cs
@@ -135,7 +135,7 @@ namespace Lucene.Net.Spatial.Prefix
public virtual DocIdSet GetDocIdSet()
{
- if (Debugging.AssertsEnabled) Debugging.Assert(curVNode == null, () => "Called more than once?");
+ if (Debugging.AssertsEnabled) Debugging.Assert(curVNode == null, "Called more than once?");
if (m_termsEnum == null)
{
return null;
@@ -206,7 +206,7 @@ namespace Lucene.Net.Spatial.Prefix
if (compare > 0)
{
// leap frog (termsEnum is beyond where we would otherwise seek)
- if (Debugging.AssertsEnabled) Debugging.Assert(!m_context.AtomicReader.GetTerms(m_outerInstance.m_fieldName).GetIterator(null).SeekExact(curVNodeTerm), () => "should be absent");
+ if (Debugging.AssertsEnabled) Debugging.Assert(!m_context.AtomicReader.GetTerms(m_outerInstance.m_fieldName).GetIterator(null).SeekExact(curVNodeTerm), "should be absent");
}
else
{
diff --git a/src/Lucene.Net.TestFramework/Analysis/MockTokenizer.cs b/src/Lucene.Net.TestFramework/Analysis/MockTokenizer.cs
index d6da069..43ad5b0 100644
--- a/src/Lucene.Net.TestFramework/Analysis/MockTokenizer.cs
+++ b/src/Lucene.Net.TestFramework/Analysis/MockTokenizer.cs
@@ -300,7 +300,7 @@ namespace Lucene.Net.Analysis
state = runAutomaton.InitialState;
lastOffset = off = 0;
bufferedCodePoint = -1;
- if (Debugging.AssertsEnabled) Debugging.Assert(!enableChecks || streamState != State.RESET, () => "Double Reset()");
+ if (Debugging.AssertsEnabled) Debugging.Assert(!enableChecks || streamState != State.RESET, "Double Reset()");
streamState = State.RESET;
}
@@ -333,7 +333,7 @@ namespace Lucene.Net.Analysis
// these tests should disable this check (in general you should consume the entire stream)
try
{
- if (Debugging.AssertsEnabled) Debugging.Assert(!enableChecks || streamState == State.INCREMENT_FALSE, () => "End() called before IncrementToken() returned false!");
+ if (Debugging.AssertsEnabled) Debugging.Assert(!enableChecks || streamState == State.INCREMENT_FALSE, "End() called before IncrementToken() returned false!");
}
finally
{
diff --git a/src/Lucene.Net.TestFramework/Index/AssertingAtomicReader.cs b/src/Lucene.Net.TestFramework/Index/AssertingAtomicReader.cs
index f36ddb8..2d67f05 100644
--- a/src/Lucene.Net.TestFramework/Index/AssertingAtomicReader.cs
+++ b/src/Lucene.Net.TestFramework/Index/AssertingAtomicReader.cs
@@ -116,7 +116,7 @@ namespace Lucene.Net.Index
public override int NextDoc()
{
- if (Debugging.AssertsEnabled) Debugging.Assert(state != DocsEnumState.FINISHED, () => "NextDoc() called after NO_MORE_DOCS");
+ if (Debugging.AssertsEnabled) Debugging.Assert(state != DocsEnumState.FINISHED, "NextDoc() called after NO_MORE_DOCS");
int nextDoc = base.NextDoc();
if (Debugging.AssertsEnabled) Debugging.Assert(nextDoc > doc, () => "backwards NextDoc from " + doc + " to " + nextDoc + " " + m_input);
if (nextDoc == DocIdSetIterator.NO_MORE_DOCS)
@@ -133,7 +133,7 @@ namespace Lucene.Net.Index
public override int Advance(int target)
{
- if (Debugging.AssertsEnabled) Debugging.Assert(state != DocsEnumState.FINISHED, () => "Advance() called after NO_MORE_DOCS");
+ if (Debugging.AssertsEnabled) Debugging.Assert(state != DocsEnumState.FINISHED, "Advance() called after NO_MORE_DOCS");
if (Debugging.AssertsEnabled) Debugging.Assert(target > doc, () => "target must be > DocID, got " + target + " <= " + doc);
int advanced = base.Advance(target);
if (Debugging.AssertsEnabled) Debugging.Assert(advanced >= target, () => "backwards advance from: " + target + " to: " + advanced);
@@ -162,8 +162,8 @@ namespace Lucene.Net.Index
{
get
{
- if (Debugging.AssertsEnabled) Debugging.Assert(state != DocsEnumState.START, () => "Freq called before NextDoc()/Advance()");
- if (Debugging.AssertsEnabled) Debugging.Assert(state != DocsEnumState.FINISHED, () => "Freq called after NO_MORE_DOCS");
+ if (Debugging.AssertsEnabled) Debugging.Assert(state != DocsEnumState.START, "Freq called before NextDoc()/Advance()");
+ if (Debugging.AssertsEnabled) Debugging.Assert(state != DocsEnumState.FINISHED, "Freq called after NO_MORE_DOCS");
int freq = base.Freq;
if (Debugging.AssertsEnabled) Debugging.Assert(freq > 0);
return freq;
@@ -409,7 +409,7 @@ namespace Lucene.Net.Index
public override DocsEnum Docs(IBits liveDocs, DocsEnum reuse, DocsFlags flags)
{
- if (Debugging.AssertsEnabled) Debugging.Assert(state == State.POSITIONED, () => "Docs(...) called on unpositioned TermsEnum");
+ if (Debugging.AssertsEnabled) Debugging.Assert(state == State.POSITIONED, "Docs(...) called on unpositioned TermsEnum");
// TODO: should we give this thing a random to be super-evil,
// and randomly *not* unwrap?
@@ -423,7 +423,7 @@ namespace Lucene.Net.Index
public override DocsAndPositionsEnum DocsAndPositions(IBits liveDocs, DocsAndPositionsEnum reuse, DocsAndPositionsFlags flags)
{
- if (Debugging.AssertsEnabled) Debugging.Assert(state == State.POSITIONED, () => "DocsAndPositions(...) called on unpositioned TermsEnum");
+ if (Debugging.AssertsEnabled) Debugging.Assert(state == State.POSITIONED, "DocsAndPositions(...) called on unpositioned TermsEnum");
// TODO: should we give this thing a random to be super-evil,
// and randomly *not* unwrap?
@@ -439,7 +439,7 @@ namespace Lucene.Net.Index
// someone should not call next() after it returns null!!!!
public override BytesRef Next()
{
- if (Debugging.AssertsEnabled) Debugging.Assert(state == State.INITIAL || state == State.POSITIONED, () => "Next() called on unpositioned TermsEnum");
+ if (Debugging.AssertsEnabled) Debugging.Assert(state == State.INITIAL || state == State.POSITIONED, "Next() called on unpositioned TermsEnum");
BytesRef result = base.Next();
if (result == null)
{
@@ -457,7 +457,7 @@ namespace Lucene.Net.Index
{
get
{
- if (Debugging.AssertsEnabled) Debugging.Assert(state == State.POSITIONED, () => "Ord called on unpositioned TermsEnum");
+ if (Debugging.AssertsEnabled) Debugging.Assert(state == State.POSITIONED, "Ord called on unpositioned TermsEnum");
return base.Ord;
}
}
@@ -466,7 +466,7 @@ namespace Lucene.Net.Index
{
get
{
- if (Debugging.AssertsEnabled) Debugging.Assert(state == State.POSITIONED, () => "DocFreq called on unpositioned TermsEnum");
+ if (Debugging.AssertsEnabled) Debugging.Assert(state == State.POSITIONED, "DocFreq called on unpositioned TermsEnum");
return base.DocFreq;
}
}
@@ -475,7 +475,7 @@ namespace Lucene.Net.Index
{
get
{
- if (Debugging.AssertsEnabled) Debugging.Assert(state == State.POSITIONED, () => "TotalTermFreq called on unpositioned TermsEnum");
+ if (Debugging.AssertsEnabled) Debugging.Assert(state == State.POSITIONED, "TotalTermFreq called on unpositioned TermsEnum");
return base.TotalTermFreq;
}
}
@@ -484,7 +484,7 @@ namespace Lucene.Net.Index
{
get
{
- if (Debugging.AssertsEnabled) Debugging.Assert(state == State.POSITIONED, () => "Term called on unpositioned TermsEnum");
+ if (Debugging.AssertsEnabled) Debugging.Assert(state == State.POSITIONED, "Term called on unpositioned TermsEnum");
BytesRef ret = base.Term;
if (Debugging.AssertsEnabled) Debugging.Assert(ret == null || ret.IsValid());
return ret;
@@ -529,7 +529,7 @@ namespace Lucene.Net.Index
public override TermState GetTermState()
{
- if (Debugging.AssertsEnabled) Debugging.Assert(state == State.POSITIONED, () => "GetTermState() called on unpositioned TermsEnum");
+ if (Debugging.AssertsEnabled) Debugging.Assert(state == State.POSITIONED, "GetTermState() called on unpositioned TermsEnum");
return base.GetTermState();
}
@@ -562,7 +562,7 @@ namespace Lucene.Net.Index
public override int NextDoc()
{
- if (Debugging.AssertsEnabled) Debugging.Assert(state != DocsEnumState.FINISHED, () => "NextDoc() called after NO_MORE_DOCS");
+ if (Debugging.AssertsEnabled) Debugging.Assert(state != DocsEnumState.FINISHED, "NextDoc() called after NO_MORE_DOCS");
int nextDoc = base.NextDoc();
if (Debugging.AssertsEnabled) Debugging.Assert(nextDoc > doc, () => "backwards nextDoc from " + doc + " to " + nextDoc);
positionCount = 0;
@@ -582,7 +582,7 @@ namespace Lucene.Net.Index
public override int Advance(int target)
{
- if (Debugging.AssertsEnabled) Debugging.Assert(state != DocsEnumState.FINISHED, () => "Advance() called after NO_MORE_DOCS");
+ if (Debugging.AssertsEnabled) Debugging.Assert(state != DocsEnumState.FINISHED, "Advance() called after NO_MORE_DOCS");
if (Debugging.AssertsEnabled) Debugging.Assert(target > doc, () => "target must be > DocID, got " + target + " <= " + doc);
int advanced = base.Advance(target);
if (Debugging.AssertsEnabled) Debugging.Assert(advanced >= target, () => "backwards advance from: " + target + " to: " + advanced);
@@ -614,8 +614,8 @@ namespace Lucene.Net.Index
{
get
{
- if (Debugging.AssertsEnabled) Debugging.Assert(state != DocsEnumState.START, () => "Freq called before NextDoc()/Advance()");
- if (Debugging.AssertsEnabled) Debugging.Assert(state != DocsEnumState.FINISHED, () => "Freq called after NO_MORE_DOCS");
+ if (Debugging.AssertsEnabled) Debugging.Assert(state != DocsEnumState.START, "Freq called before NextDoc()/Advance()");
+ if (Debugging.AssertsEnabled) Debugging.Assert(state != DocsEnumState.FINISHED, "Freq called after NO_MORE_DOCS");
int freq = base.Freq;
if (Debugging.AssertsEnabled) Debugging.Assert(freq > 0);
return freq;
@@ -624,9 +624,9 @@ namespace Lucene.Net.Index
public override int NextPosition()
{
- if (Debugging.AssertsEnabled) Debugging.Assert(state != DocsEnumState.START, () => "NextPosition() called before NextDoc()/Advance()");
- if (Debugging.AssertsEnabled) Debugging.Assert(state != DocsEnumState.FINISHED, () => "NextPosition() called after NO_MORE_DOCS");
- if (Debugging.AssertsEnabled) Debugging.Assert(positionCount < positionMax, () => "NextPosition() called more than Freq times!");
+ if (Debugging.AssertsEnabled) Debugging.Assert(state != DocsEnumState.START, "NextPosition() called before NextDoc()/Advance()");
+ if (Debugging.AssertsEnabled) Debugging.Assert(state != DocsEnumState.FINISHED, "NextPosition() called after NO_MORE_DOCS");
+ if (Debugging.AssertsEnabled) Debugging.Assert(positionCount < positionMax, "NextPosition() called more than Freq times!");
int position = base.NextPosition();
if (Debugging.AssertsEnabled) Debugging.Assert(position >= 0 || position == -1, () => "invalid position: " + position);
positionCount++;
@@ -637,9 +637,9 @@ namespace Lucene.Net.Index
{
get
{
- if (Debugging.AssertsEnabled) Debugging.Assert(state != DocsEnumState.START, () => "StartOffset called before NextDoc()/Advance()");
- if (Debugging.AssertsEnabled) Debugging.Assert(state != DocsEnumState.FINISHED, () => "StartOffset called after NO_MORE_DOCS");
- if (Debugging.AssertsEnabled) Debugging.Assert(positionCount > 0, () => "StartOffset called before NextPosition()!");
+ if (Debugging.AssertsEnabled) Debugging.Assert(state != DocsEnumState.START, "StartOffset called before NextDoc()/Advance()");
+ if (Debugging.AssertsEnabled) Debugging.Assert(state != DocsEnumState.FINISHED, "StartOffset called after NO_MORE_DOCS");
+ if (Debugging.AssertsEnabled) Debugging.Assert(positionCount > 0, "StartOffset called before NextPosition()!");
return base.StartOffset;
}
}
@@ -648,20 +648,20 @@ namespace Lucene.Net.Index
{
get
{
- if (Debugging.AssertsEnabled) Debugging.Assert(state != DocsEnumState.START, () => "EndOffset called before NextDoc()/Advance()");
- if (Debugging.AssertsEnabled) Debugging.Assert(state != DocsEnumState.FINISHED, () => "EndOffset called after NO_MORE_DOCS");
- if (Debugging.AssertsEnabled) Debugging.Assert(positionCount > 0, () => "EndOffset called before NextPosition()!");
+ if (Debugging.AssertsEnabled) Debugging.Assert(state != DocsEnumState.START, "EndOffset called before NextDoc()/Advance()");
+ if (Debugging.AssertsEnabled) Debugging.Assert(state != DocsEnumState.FINISHED, "EndOffset called after NO_MORE_DOCS");
+ if (Debugging.AssertsEnabled) Debugging.Assert(positionCount > 0, "EndOffset called before NextPosition()!");
return base.EndOffset;
}
}
public override BytesRef GetPayload()
{
- if (Debugging.AssertsEnabled) Debugging.Assert(state != DocsEnumState.START, () => "GetPayload() called before NextDoc()/Advance()");
- if (Debugging.AssertsEnabled) Debugging.Assert(state != DocsEnumState.FINISHED, () => "GetPayload() called after NO_MORE_DOCS");
- if (Debugging.AssertsEnabled) Debugging.Assert(positionCount > 0, () => "GetPayload() called before NextPosition()!");
+ if (Debugging.AssertsEnabled) Debugging.Assert(state != DocsEnumState.START, "GetPayload() called before NextDoc()/Advance()");
+ if (Debugging.AssertsEnabled) Debugging.Assert(state != DocsEnumState.FINISHED, "GetPayload() called after NO_MORE_DOCS");
+ if (Debugging.AssertsEnabled) Debugging.Assert(positionCount > 0, "GetPayload() called before NextPosition()!");
BytesRef payload = base.GetPayload();
- if (Debugging.AssertsEnabled) Debugging.Assert(payload == null || payload.IsValid() && payload.Length > 0, () => "GetPayload() returned payload with invalid length!");
+ if (Debugging.AssertsEnabled) Debugging.Assert(payload == null || payload.IsValid() && payload.Length > 0, "GetPayload() returned payload with invalid length!");
return payload;
}
}
diff --git a/src/Lucene.Net.TestFramework/Index/RandomCodec.cs b/src/Lucene.Net.TestFramework/Index/RandomCodec.cs
index 74da42e..3634b23 100644
--- a/src/Lucene.Net.TestFramework/Index/RandomCodec.cs
+++ b/src/Lucene.Net.TestFramework/Index/RandomCodec.cs
@@ -92,7 +92,7 @@ namespace Lucene.Net.Index
}
previousMappings[name] = codec;
// Safety:
- if (Debugging.AssertsEnabled) Debugging.Assert(previousMappings.Count < 10000, () => "test went insane");
+ if (Debugging.AssertsEnabled) Debugging.Assert(previousMappings.Count < 10000, "test went insane");
}
//if (LuceneTestCase.VERBOSE)
@@ -115,7 +115,7 @@ namespace Lucene.Net.Index
}
previousDVMappings[name] = codec;
// Safety:
- if (Debugging.AssertsEnabled) Debugging.Assert(previousDVMappings.Count < 10000, () => "test went insane");
+ if (Debugging.AssertsEnabled) Debugging.Assert(previousDVMappings.Count < 10000, "test went insane");
}
//if (LuceneTestCase.VERBOSE)
diff --git a/src/Lucene.Net.TestFramework/Util/TestRuleSetupAndRestoreClassEnv.cs b/src/Lucene.Net.TestFramework/Util/TestRuleSetupAndRestoreClassEnv.cs
index a0ca6d2..7e37025 100644
--- a/src/Lucene.Net.TestFramework/Util/TestRuleSetupAndRestoreClassEnv.cs
+++ b/src/Lucene.Net.TestFramework/Util/TestRuleSetupAndRestoreClassEnv.cs
@@ -169,7 +169,7 @@ namespace Lucene.Net.Util
!ShouldAvoidCodec("Lucene3x"))) // preflex-only setup
{
codec = Codec.ForName("Lucene3x");
- if (Debugging.AssertsEnabled) Debugging.Assert((codec is PreFlexRWCodec), () => "fix your ICodecFactory to scan Lucene.Net.Tests before Lucene.Net.TestFramework");
+ if (Debugging.AssertsEnabled) Debugging.Assert((codec is PreFlexRWCodec), "fix your ICodecFactory to scan Lucene.Net.Tests before Lucene.Net.TestFramework");
LuceneTestCase.OldFormatImpersonationIsActive = true;
}
else if ("Lucene40".Equals(LuceneTestCase.TestCodec, StringComparison.Ordinal) || ("random".Equals(LuceneTestCase.TestCodec, StringComparison.Ordinal) &&
@@ -179,8 +179,8 @@ namespace Lucene.Net.Util
{
codec = Codec.ForName("Lucene40");
LuceneTestCase.OldFormatImpersonationIsActive = true;
- if (Debugging.AssertsEnabled) Debugging.Assert((codec is Lucene40RWCodec), () => "fix your ICodecFactory to scan Lucene.Net.Tests before Lucene.Net.TestFramework");
- if (Debugging.AssertsEnabled) Debugging.Assert((PostingsFormat.ForName("Lucene40") is Lucene40RWPostingsFormat), () => "fix your IPostingsFormatFactory to scan Lucene.Net.Tests before Lucene.Net.TestFramework");
+ if (Debugging.AssertsEnabled) Debugging.Assert((codec is Lucene40RWCodec), "fix your ICodecFactory to scan Lucene.Net.Tests before Lucene.Net.TestFramework");
+ if (Debugging.AssertsEnabled) Debugging.Assert((PostingsFormat.ForName("Lucene40") is Lucene40RWPostingsFormat), "fix your IPostingsFormatFactory to scan Lucene.Net.Tests before Lucene.Net.TestFramework");
}
else if ("Lucene41".Equals(LuceneTestCase.TestCodec, StringComparison.Ordinal) || ("random".Equals(LuceneTestCase.TestCodec, StringComparison.Ordinal) &&
"random".Equals(LuceneTestCase.TestPostingsFormat, StringComparison.Ordinal) &&
@@ -190,7 +190,7 @@ namespace Lucene.Net.Util
{
codec = Codec.ForName("Lucene41");
LuceneTestCase.OldFormatImpersonationIsActive = true;
- if (Debugging.AssertsEnabled) Debugging.Assert((codec is Lucene41RWCodec), () => "fix your ICodecFactory to scan Lucene.Net.Tests before Lucene.Net.TestFramework");
+ if (Debugging.AssertsEnabled) Debugging.Assert((codec is Lucene41RWCodec), "fix your ICodecFactory to scan Lucene.Net.Tests before Lucene.Net.TestFramework");
}
else if ("Lucene42".Equals(LuceneTestCase.TestCodec, StringComparison.Ordinal) || ("random".Equals(LuceneTestCase.TestCodec, StringComparison.Ordinal) &&
"random".Equals(LuceneTestCase.TestPostingsFormat, StringComparison.Ordinal) &&
@@ -200,7 +200,7 @@ namespace Lucene.Net.Util
{
codec = Codec.ForName("Lucene42");
LuceneTestCase.OldFormatImpersonationIsActive = true;
- if (Debugging.AssertsEnabled) Debugging.Assert((codec is Lucene42RWCodec), () => "fix your ICodecFactory to scan Lucene.Net.Tests before Lucene.Net.TestFramework");
+ if (Debugging.AssertsEnabled) Debugging.Assert((codec is Lucene42RWCodec), "fix your ICodecFactory to scan Lucene.Net.Tests before Lucene.Net.TestFramework");
}
else if ("Lucene45".Equals(LuceneTestCase.TestCodec, StringComparison.Ordinal) || ("random".Equals(LuceneTestCase.TestCodec, StringComparison.Ordinal) &&
"random".Equals(LuceneTestCase.TestPostingsFormat, StringComparison.Ordinal) &&
@@ -210,7 +210,7 @@ namespace Lucene.Net.Util
{
codec = Codec.ForName("Lucene45");
LuceneTestCase.OldFormatImpersonationIsActive = true;
- if (Debugging.AssertsEnabled) Debugging.Assert((codec is Lucene45RWCodec), () => "fix your ICodecFactory to scan Lucene.Net.Tests before Lucene.Net.TestFramework");
+ if (Debugging.AssertsEnabled) Debugging.Assert((codec is Lucene45RWCodec), "fix your ICodecFactory to scan Lucene.Net.Tests before Lucene.Net.TestFramework");
}
else if (("random".Equals(LuceneTestCase.TestPostingsFormat, StringComparison.Ordinal) == false)
|| ("random".Equals(LuceneTestCase.TestDocValuesFormat, StringComparison.Ordinal) == false))
diff --git a/src/Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs b/src/Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs
index f8417b7..bf3e2db 100644
--- a/src/Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs
+++ b/src/Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs
@@ -65,8 +65,8 @@ namespace Lucene.Net.QueryParsers.Classic
public override Query GetQuery(string query, ICommonQueryParserConfiguration cqpC)
{
- if (Debugging.AssertsEnabled) Debugging.Assert(cqpC != null, () => "Parameter must not be null");
- if (Debugging.AssertsEnabled) Debugging.Assert(cqpC is QueryParser, () => "Parameter must be instance of QueryParser");
+ if (Debugging.AssertsEnabled) Debugging.Assert(cqpC != null, "Parameter must not be null");
+ if (Debugging.AssertsEnabled) Debugging.Assert(cqpC is QueryParser, "Parameter must be instance of QueryParser");
QueryParser qp = (QueryParser)cqpC;
return qp.Parse(query);
}
diff --git a/src/Lucene.Net.Tests.QueryParser/Flexible/Standard/TestStandardQP.cs b/src/Lucene.Net.Tests.QueryParser/Flexible/Standard/TestStandardQP.cs
index 5cdeb98..a8d156b 100644
--- a/src/Lucene.Net.Tests.QueryParser/Flexible/Standard/TestStandardQP.cs
+++ b/src/Lucene.Net.Tests.QueryParser/Flexible/Standard/TestStandardQP.cs
@@ -59,8 +59,8 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard
public override Query GetQuery(String query, ICommonQueryParserConfiguration cqpC)
{
- if (Debugging.AssertsEnabled) Debugging.Assert(cqpC != null, () => "Parameter must not be null");
- if (Debugging.AssertsEnabled) Debugging.Assert((cqpC is StandardQueryParser), () => "Parameter must be instance of StandardQueryParser");
+ if (Debugging.AssertsEnabled) Debugging.Assert(cqpC != null, "Parameter must not be null");
+ if (Debugging.AssertsEnabled) Debugging.Assert((cqpC is StandardQueryParser), "Parameter must be instance of StandardQueryParser");
StandardQueryParser qp = (StandardQueryParser)cqpC;
return Parse(query, qp);
}
diff --git a/src/Lucene.Net.Tests.Suggest/Suggest/LookupBenchmarkTest.cs b/src/Lucene.Net.Tests.Suggest/Suggest/LookupBenchmarkTest.cs
index 883a110..b1c4460 100644
--- a/src/Lucene.Net.Tests.Suggest/Suggest/LookupBenchmarkTest.cs
+++ b/src/Lucene.Net.Tests.Suggest/Suggest/LookupBenchmarkTest.cs
@@ -75,7 +75,7 @@ namespace Lucene.Net.Search.Suggest
public override void SetUp()
{
- if (Debugging.AssertsEnabled) Debugging.Assert(false, () => "disable assertions before running benchmarks!");
+ if (Debugging.AssertsEnabled) Debugging.Assert(false, "disable assertions before running benchmarks!");
IList<Input> input = ReadTop50KWiki();
input.Shuffle(Random);
dictionaryInput = input.ToArray();
@@ -93,7 +93,7 @@ namespace Lucene.Net.Search.Suggest
List<Input> input = new List<Input>();
var resource = typeof(LookupBenchmarkTest).FindAndGetManifestResourceStream("Top50KWiki.utf8");
- if (Debugging.AssertsEnabled) Debugging.Assert(resource != null, () => "Resource missing: Top50KWiki.utf8");
+ if (Debugging.AssertsEnabled) Debugging.Assert(resource != null, "Resource missing: Top50KWiki.utf8");
string line = null;
using (TextReader br = new StreamReader(resource, UTF_8))
diff --git a/src/Lucene.Net.Tests/Analysis/TestGraphTokenizers.cs b/src/Lucene.Net.Tests/Analysis/TestGraphTokenizers.cs
index f351acb..60ed3d4 100644
--- a/src/Lucene.Net.Tests/Analysis/TestGraphTokenizers.cs
+++ b/src/Lucene.Net.Tests/Analysis/TestGraphTokenizers.cs
@@ -176,7 +176,7 @@ namespace Lucene.Net.Analysis
pos += minPosLength;
offset = 2 * pos;
}
- if (Debugging.AssertsEnabled) Debugging.Assert(maxPos <= pos, () => "input string mal-formed: posLength>1 tokens hang over the end");
+ if (Debugging.AssertsEnabled) Debugging.Assert(maxPos <= pos, "input string mal-formed: posLength>1 tokens hang over the end");
}
}
diff --git a/src/Lucene.Net/Index/BufferedUpdatesStream.cs b/src/Lucene.Net/Index/BufferedUpdatesStream.cs
index f329ad2..4e32316 100644
--- a/src/Lucene.Net/Index/BufferedUpdatesStream.cs
+++ b/src/Lucene.Net/Index/BufferedUpdatesStream.cs
@@ -93,7 +93,7 @@ namespace Lucene.Net.Index
Debugging.Assert(packet.Any());
Debugging.Assert(CheckDeleteStats());
Debugging.Assert(packet.DelGen < nextGen);
- Debugging.Assert(updates.Count == 0 || updates[updates.Count - 1].DelGen < packet.DelGen, () => "Delete packets must be in order");
+ Debugging.Assert(updates.Count == 0 || updates[updates.Count - 1].DelGen < packet.DelGen, "Delete packets must be in order");
}
updates.Add(packet);
numTerms.AddAndGet(packet.numTermDeletes);
diff --git a/src/Lucene.Net/Index/DocFieldProcessor.cs b/src/Lucene.Net/Index/DocFieldProcessor.cs
index 343d005..ea2decd 100644
--- a/src/Lucene.Net/Index/DocFieldProcessor.cs
+++ b/src/Lucene.Net/Index/DocFieldProcessor.cs
@@ -246,7 +246,7 @@ namespace Lucene.Net.Index
// need to addOrUpdate so that FieldInfos can update globalFieldNumbers
// with the correct DocValue type (LUCENE-5192)
FieldInfo fi = fieldInfos.AddOrUpdate(fieldName, field.IndexableFieldType);
- if (Debugging.AssertsEnabled) Debugging.Assert(fi == fp.fieldInfo, () => "should only have updated an existing FieldInfo instance");
+ if (Debugging.AssertsEnabled) Debugging.Assert(fi == fp.fieldInfo, "should only have updated an existing FieldInfo instance");
}
if (thisFieldGen != fp.lastGen)
diff --git a/src/Lucene.Net/Index/DocumentsWriter.cs b/src/Lucene.Net/Index/DocumentsWriter.cs
index b10606e..abe7274 100644
--- a/src/Lucene.Net/Index/DocumentsWriter.cs
+++ b/src/Lucene.Net/Index/DocumentsWriter.cs
@@ -244,7 +244,7 @@ namespace Lucene.Net.Index
{
lock (this)
{
- if (Debugging.AssertsEnabled) Debugging.Assert(!Monitor.IsEntered(writer), () => "IndexWriter lock should never be hold when aborting");
+ if (Debugging.AssertsEnabled) Debugging.Assert(!Monitor.IsEntered(writer), "IndexWriter lock should never be hold when aborting");
bool success = false;
JCG.HashSet<string> newFilesSet = new JCG.HashSet<string>();
try
@@ -498,7 +498,7 @@ namespace Lucene.Net.Index
if (!perThread.IsActive)
{
EnsureOpen();
- if (Debugging.AssertsEnabled) Debugging.Assert(false, () => "perThread is not active but we are still open");
+ if (Debugging.AssertsEnabled) Debugging.Assert(false, "perThread is not active but we are still open");
}
EnsureInitialized(perThread);
if (Debugging.AssertsEnabled) Debugging.Assert(perThread.IsInitialized);
@@ -544,7 +544,7 @@ namespace Lucene.Net.Index
if (!perThread.IsActive)
{
EnsureOpen();
- if (Debugging.AssertsEnabled) Debugging.Assert(false, () => "perThread is not active but we are still open");
+ if (Debugging.AssertsEnabled) Debugging.Assert(false, "perThread is not active but we are still open");
}
EnsureInitialized(perThread);
if (Debugging.AssertsEnabled) Debugging.Assert(perThread.IsInitialized);
diff --git a/src/Lucene.Net/Index/DocumentsWriterDeleteQueue.cs b/src/Lucene.Net/Index/DocumentsWriterDeleteQueue.cs
index b262315..5be829a 100644
--- a/src/Lucene.Net/Index/DocumentsWriterDeleteQueue.cs
+++ b/src/Lucene.Net/Index/DocumentsWriterDeleteQueue.cs
@@ -144,7 +144,7 @@ namespace Lucene.Net.Index
* competing updates wins!
*/
slice.sliceTail = termNode;
- if (Debugging.AssertsEnabled) Debugging.Assert(slice.sliceHead != slice.sliceTail, () => "slice head and tail must differ after add");
+ if (Debugging.AssertsEnabled) Debugging.Assert(slice.sliceHead != slice.sliceTail, "slice head and tail must differ after add");
TryApplyGlobalSlice(); // TODO doing this each time is not necessary maybe
// we can do it just every n times or so?
}
@@ -319,7 +319,7 @@ namespace Lucene.Net.Index
do
{
current = current.next;
- if (Debugging.AssertsEnabled) Debugging.Assert(current != null, () => "slice property violated between the head on the tail must not be a null node");
+ if (Debugging.AssertsEnabled) Debugging.Assert(current != null, "slice property violated between the head on the tail must not be a null node");
current.Apply(del, docIDUpto);
// System.out.println(Thread.currentThread().getName() + ": pull " + current + " docIDUpto=" + docIDUpto);
} while (current != sliceTail);
diff --git a/src/Lucene.Net/Index/DocumentsWriterFlushControl.cs b/src/Lucene.Net/Index/DocumentsWriterFlushControl.cs
index faf35c9..346d834 100644
--- a/src/Lucene.Net/Index/DocumentsWriterFlushControl.cs
+++ b/src/Lucene.Net/Index/DocumentsWriterFlushControl.cs
@@ -428,7 +428,7 @@ namespace Lucene.Net.Index
long bytes = perThread.bytesUsed; // do that before
// replace!
dwpt = perThreadPool.Reset(perThread, closed);
- if (Debugging.AssertsEnabled) Debugging.Assert(!flushingWriters.ContainsKey(dwpt), () => "DWPT is already flushing");
+ if (Debugging.AssertsEnabled) Debugging.Assert(!flushingWriters.ContainsKey(dwpt), "DWPT is already flushing");
// Record the flushing DWPT to reduce flushBytes in doAfterFlush
flushingWriters[dwpt] = bytes;
numPending--; // write access synced
@@ -625,7 +625,7 @@ namespace Lucene.Net.Index
{
if (Debugging.AssertsEnabled)
{
- Debugging.Assert(!fullFlush, () => "called DWFC#markForFullFlush() while full flush is still running");
+ Debugging.Assert(!fullFlush, "called DWFC#markForFullFlush() while full flush is still running");
Debugging.Assert(fullFlushBuffer.Count == 0, () => "full flush buffer should be empty: " + fullFlushBuffer);
}
fullFlush = true;
@@ -728,8 +728,8 @@ namespace Lucene.Net.Index
DocumentsWriterPerThread flushingDWPT = InternalTryCheckOutForFlush(perThread);
if (Debugging.AssertsEnabled)
{
- Debugging.Assert(flushingDWPT != null, () => "DWPT must never be null here since we hold the lock and it holds documents");
- Debugging.Assert(dwpt == flushingDWPT, () => "flushControl returned different DWPT");
+ Debugging.Assert(flushingDWPT != null, "DWPT must never be null here since we hold the lock and it holds documents");
+ Debugging.Assert(dwpt == flushingDWPT, "flushControl returned different DWPT");
}
fullFlushBuffer.Add(flushingDWPT);
}
@@ -753,7 +753,7 @@ namespace Lucene.Net.Index
if (blockedFlush.Dwpt.deleteQueue == flushingQueue)
{
blockedFlushes.Remove(node);
- if (Debugging.AssertsEnabled) Debugging.Assert(!flushingWriters.ContainsKey(blockedFlush.Dwpt), () => "DWPT is already flushing");
+ if (Debugging.AssertsEnabled) Debugging.Assert(!flushingWriters.ContainsKey(blockedFlush.Dwpt), "DWPT is already flushing");
// Record the flushing DWPT to reduce flushBytes in doAfterFlush
flushingWriters[blockedFlush.Dwpt] = blockedFlush.Bytes;
// don't decr pending here - its already done when DWPT is blocked
diff --git a/src/Lucene.Net/Index/DocumentsWriterFlushQueue.cs b/src/Lucene.Net/Index/DocumentsWriterFlushQueue.cs
index c8ccede..11b8ee1 100644
--- a/src/Lucene.Net/Index/DocumentsWriterFlushQueue.cs
+++ b/src/Lucene.Net/Index/DocumentsWriterFlushQueue.cs
@@ -295,7 +295,7 @@ namespace Lucene.Net.Index
protected internal override void Publish(IndexWriter writer)
{
- if (Debugging.AssertsEnabled) Debugging.Assert(!m_published, () => "ticket was already publised - can not publish twice");
+ if (Debugging.AssertsEnabled) Debugging.Assert(!m_published, "ticket was already publised - can not publish twice");
m_published = true;
// its a global ticket - no segment to publish
FinishFlush(writer, null, m_frozenUpdates);
@@ -316,7 +316,7 @@ namespace Lucene.Net.Index
protected internal override void Publish(IndexWriter writer)
{
- if (Debugging.AssertsEnabled) Debugging.Assert(!m_published, () => "ticket was already publised - can not publish twice");
+ if (Debugging.AssertsEnabled) Debugging.Assert(!m_published, "ticket was already publised - can not publish twice");
m_published = true;
FinishFlush(writer, segment, m_frozenUpdates);
}
diff --git a/src/Lucene.Net/Index/DocumentsWriterPerThread.cs b/src/Lucene.Net/Index/DocumentsWriterPerThread.cs
index 1329f85..3a5ca37 100644
--- a/src/Lucene.Net/Index/DocumentsWriterPerThread.cs
+++ b/src/Lucene.Net/Index/DocumentsWriterPerThread.cs
@@ -400,7 +400,7 @@ namespace Lucene.Net.Index
if (delTerm != null)
{
deleteQueue.Add(delTerm, deleteSlice);
- if (Debugging.AssertsEnabled) Debugging.Assert(deleteSlice.IsTailItem(delTerm), () => "expected the delete term as the tail item");
+ if (Debugging.AssertsEnabled) Debugging.Assert(deleteSlice.IsTailItem(delTerm), "expected the delete term as the tail item");
deleteSlice.Apply(pendingUpdates, numDocsInRAM - docCount);
}
}
@@ -439,7 +439,7 @@ namespace Lucene.Net.Index
if (delTerm != null)
{
deleteQueue.Add(delTerm, deleteSlice);
- if (Debugging.AssertsEnabled) Debugging.Assert(deleteSlice.IsTailItem(delTerm), () => "expected the delete term as the tail item");
+ if (Debugging.AssertsEnabled) Debugging.Assert(deleteSlice.IsTailItem(delTerm), "expected the delete term as the tail item");
}
else
{
@@ -512,7 +512,7 @@ namespace Lucene.Net.Index
if (Debugging.AssertsEnabled)
{
Debugging.Assert(numDocsInRAM > 0);
- Debugging.Assert(deleteSlice.IsEmpty, () => "all deletes must be applied in prepareFlush");
+ Debugging.Assert(deleteSlice.IsEmpty, "all deletes must be applied in prepareFlush");
}
segmentInfo.DocCount = numDocsInRAM;
SegmentWriteState flushState = new SegmentWriteState(infoStream, directory, segmentInfo, fieldInfos.Finish(), indexWriterConfig.TermIndexInterval, pendingUpdates, new IOContext(new FlushInfo(numDocsInRAM, BytesUsed)));
diff --git a/src/Lucene.Net/Index/DocumentsWriterPerThreadPool.cs b/src/Lucene.Net/Index/DocumentsWriterPerThreadPool.cs
index 41394cc..1e356d8 100644
--- a/src/Lucene.Net/Index/DocumentsWriterPerThreadPool.cs
+++ b/src/Lucene.Net/Index/DocumentsWriterPerThreadPool.cs
@@ -253,10 +253,10 @@ namespace Lucene.Net.Index
{
for (int i = numThreadStatesActive; i < threadStates.Length; i++)
{
- if (Debugging.AssertsEnabled) Debugging.Assert(threadStates[i].TryLock(), () => "unreleased threadstate should not be locked");
+ if (Debugging.AssertsEnabled) Debugging.Assert(threadStates[i].TryLock(), "unreleased threadstate should not be locked");
try
{
- if (Debugging.AssertsEnabled) Debugging.Assert(!threadStates[i].IsInitialized, () => "expected unreleased thread state to be inactive");
+ if (Debugging.AssertsEnabled) Debugging.Assert(!threadStates[i].IsInitialized, "expected unreleased thread state to be inactive");
}
finally
{
diff --git a/src/Lucene.Net/Index/FlushPolicy.cs b/src/Lucene.Net/Index/FlushPolicy.cs
index 2d2bff0..a7ad1e9 100644
--- a/src/Lucene.Net/Index/FlushPolicy.cs
+++ b/src/Lucene.Net/Index/FlushPolicy.cs
@@ -117,7 +117,7 @@ namespace Lucene.Net.Index
long maxRamSoFar = perThreadState.bytesUsed;
// the dwpt which needs to be flushed eventually
ThreadState maxRamUsingThreadState = perThreadState;
- if (Debugging.AssertsEnabled) Debugging.Assert(!perThreadState.flushPending, () => "DWPT should have flushed");
+ if (Debugging.AssertsEnabled) Debugging.Assert(!perThreadState.flushPending, "DWPT should have flushed");
IEnumerator<ThreadState> activePerThreadsIterator = control.AllActiveThreadStates();
while (activePerThreadsIterator.MoveNext())
{
diff --git a/src/Lucene.Net/Index/FrozenBufferedUpdates.cs b/src/Lucene.Net/Index/FrozenBufferedUpdates.cs
index 25a8a35..4a7a810 100644
--- a/src/Lucene.Net/Index/FrozenBufferedUpdates.cs
+++ b/src/Lucene.Net/Index/FrozenBufferedUpdates.cs
@@ -67,7 +67,7 @@ namespace Lucene.Net.Index
public FrozenBufferedUpdates(BufferedUpdates deletes, bool isSegmentPrivate)
{
this.isSegmentPrivate = isSegmentPrivate;
- if (Debugging.AssertsEnabled) Debugging.Assert(!isSegmentPrivate || deletes.terms.Count == 0, () => "segment private package should only have del queries");
+ if (Debugging.AssertsEnabled) Debugging.Assert(!isSegmentPrivate || deletes.terms.Count == 0, "segment private package should only have del queries");
Term[] termsArray = deletes.terms.Keys.ToArray(/*new Term[deletes.terms.Count]*/);
termCount = termsArray.Length;
diff --git a/src/Lucene.Net/Index/MultiFields.cs b/src/Lucene.Net/Index/MultiFields.cs
index 923bb9f..bc27956 100644
--- a/src/Lucene.Net/Index/MultiFields.cs
+++ b/src/Lucene.Net/Index/MultiFields.cs
@@ -117,7 +117,7 @@ namespace Lucene.Net.Index
{
IList<AtomicReaderContext> leaves = reader.Leaves;
int size = leaves.Count;
- if (Debugging.AssertsEnabled) Debugging.Assert(size > 0, () => "A reader with deletions must have at least one leave");
+ if (Debugging.AssertsEnabled) Debugging.Assert(size > 0, "A reader with deletions must have at least one leave");
if (size == 1)
{
return leaves[0].AtomicReader.LiveDocs;
diff --git a/src/Lucene.Net/Index/MultiTerms.cs b/src/Lucene.Net/Index/MultiTerms.cs
index 7ffec6f..da005b9 100644
--- a/src/Lucene.Net/Index/MultiTerms.cs
+++ b/src/Lucene.Net/Index/MultiTerms.cs
@@ -54,7 +54,7 @@ namespace Lucene.Net.Index
this.subSlices = subSlices;
IComparer<BytesRef> _termComp = null;
- if (Debugging.AssertsEnabled) Debugging.Assert(subs.Length > 0, () => "inefficient: don't use MultiTerms over one sub");
+ if (Debugging.AssertsEnabled) Debugging.Assert(subs.Length > 0, "inefficient: don't use MultiTerms over one sub");
bool _hasFreqs = true;
bool _hasOffsets = true;
bool _hasPositions = true;
diff --git a/src/Lucene.Net/Search/MultiPhraseQuery.cs b/src/Lucene.Net/Search/MultiPhraseQuery.cs
index 3d09685..00c9abf 100644
--- a/src/Lucene.Net/Search/MultiPhraseQuery.cs
+++ b/src/Lucene.Net/Search/MultiPhraseQuery.cs
@@ -286,7 +286,7 @@ namespace Lucene.Net.Search
if (postingsEnum == null)
{
// term does exist, but has no positions
- if (Debugging.AssertsEnabled) Debugging.Assert(termsEnum.Docs(liveDocs, null, DocsFlags.NONE) != null, () => "termstate found but no term exists in reader");
+ if (Debugging.AssertsEnabled) Debugging.Assert(termsEnum.Docs(liveDocs, null, DocsFlags.NONE) != null, "termstate found but no term exists in reader");
throw new InvalidOperationException("field \"" + term.Field + "\" was indexed without position data; cannot run PhraseQuery (term=" + term.Text() + ")");
}
diff --git a/src/Lucene.Net/Search/NumericRangeQuery.cs b/src/Lucene.Net/Search/NumericRangeQuery.cs
index cc58d36..05cbd5a 100644
--- a/src/Lucene.Net/Search/NumericRangeQuery.cs
+++ b/src/Lucene.Net/Search/NumericRangeQuery.cs
@@ -448,7 +448,7 @@ namespace Lucene.Net.Search
if (Debugging.AssertsEnabled) Debugging.Assert(rangeBounds.Count % 2 == 0);
currentLowerBound = rangeBounds.Dequeue();
- if (Debugging.AssertsEnabled) Debugging.Assert(currentUpperBound == null || termComp.Compare(currentUpperBound, currentLowerBound) <= 0, () => "The current upper bound must be <= the new lower bound");
+ if (Debugging.AssertsEnabled) Debugging.Assert(currentUpperBound == null || termComp.Compare(currentUpperBound, currentLowerBound) <= 0, "The current upper bound must be <= the new lower bound");
currentUpperBound = rangeBounds.Dequeue();
}
diff --git a/src/Lucene.Net/Search/PhraseQuery.cs b/src/Lucene.Net/Search/PhraseQuery.cs
index d5416e3..c36bb5d 100644
--- a/src/Lucene.Net/Search/PhraseQuery.cs
+++ b/src/Lucene.Net/Search/PhraseQuery.cs
@@ -357,7 +357,7 @@ namespace Lucene.Net.Search
TermState state = states[i].Get(context.Ord);
if (state == null) // term doesnt exist in this segment
{
- if (Debugging.AssertsEnabled) Debugging.Assert(TermNotInReader(reader, t), () => "no termstate found but term exists in reader");
+ if (Debugging.AssertsEnabled) Debugging.Assert(TermNotInReader(reader, t), "no termstate found but term exists in reader");
return null;
}
te.SeekExact(t.Bytes, state);
@@ -367,7 +367,7 @@ namespace Lucene.Net.Search
// positions.
if (postingsEnum == null)
{
- if (Debugging.AssertsEnabled) Debugging.Assert(te.SeekExact(t.Bytes), () => "termstate found but no term exists in reader");
+ if (Debugging.AssertsEnabled) Debugging.Assert(te.SeekExact(t.Bytes), "termstate found but no term exists in reader");
// term does exist, but has no positions
throw new InvalidOperationException("field \"" + t.Field + "\" was indexed without position data; cannot run PhraseQuery (term=" + t.Text() + ")");
}
diff --git a/src/Lucene.Net/Search/ReferenceManager.cs b/src/Lucene.Net/Search/ReferenceManager.cs
index f8e75c1..a6374e9 100644
--- a/src/Lucene.Net/Search/ReferenceManager.cs
+++ b/src/Lucene.Net/Search/ReferenceManager.cs
@@ -200,7 +200,7 @@ namespace Lucene.Net.Search
G newReference = RefreshIfNeeded(reference);
if (newReference != null)
{
- if (Debugging.AssertsEnabled) Debugging.Assert(!ReferenceEquals(newReference, reference), () => "refreshIfNeeded should return null if refresh wasn't needed");
+ if (Debugging.AssertsEnabled) Debugging.Assert(!ReferenceEquals(newReference, reference), "refreshIfNeeded should return null if refresh wasn't needed");
try
{
SwapReference(newReference);
diff --git a/src/Lucene.Net/Search/ScoringRewrite.cs b/src/Lucene.Net/Search/ScoringRewrite.cs
index 3651a12..08516f7 100644
--- a/src/Lucene.Net/Search/ScoringRewrite.cs
+++ b/src/Lucene.Net/Search/ScoringRewrite.cs
@@ -179,7 +179,7 @@ namespace Lucene.Net.Search
// duplicate term: update docFreq
int pos = (-e) - 1;
array.termState[pos].Register(state, m_readerContext.Ord, termsEnum.DocFreq, termsEnum.TotalTermFreq);
- if (Debugging.AssertsEnabled) Debugging.Assert(array.boost[pos] == boostAtt.Boost, () => "boost should be equal in all segment TermsEnums");
+ if (Debugging.AssertsEnabled) Debugging.Assert(array.boost[pos] == boostAtt.Boost, "boost should be equal in all segment TermsEnums");
}
else
{
diff --git a/src/Lucene.Net/Search/TermQuery.cs b/src/Lucene.Net/Search/TermQuery.cs
index 773db4e..49e51c4 100644
--- a/src/Lucene.Net/Search/TermQuery.cs
+++ b/src/Lucene.Net/Search/TermQuery.cs
@@ -60,7 +60,7 @@ namespace Lucene.Net.Search
public TermWeight(TermQuery outerInstance, IndexSearcher searcher, TermContext termStates)
{
this.outerInstance = outerInstance;
- if (Debugging.AssertsEnabled) Debugging.Assert(termStates != null, () => "TermContext must not be null");
+ if (Debugging.AssertsEnabled) Debugging.Assert(termStates != null, "TermContext must not be null");
this.termStates = termStates;
this.similarity = searcher.Similarity;
this.stats = similarity.ComputeWeight(outerInstance.Boost, searcher.CollectionStatistics(outerInstance.term.Field), searcher.TermStatistics(outerInstance.term, termStates));
diff --git a/src/Lucene.Net/Search/TopTermsRewrite.cs b/src/Lucene.Net/Search/TopTermsRewrite.cs
index 3c07844..89397b4 100644
--- a/src/Lucene.Net/Search/TopTermsRewrite.cs
+++ b/src/Lucene.Net/Search/TopTermsRewrite.cs
@@ -178,7 +178,7 @@ namespace Lucene.Net.Search
if (visitedTerms.TryGetValue(bytes, out ScoreTerm t2))
{
// if the term is already in the PQ, only update docFreq of term in PQ
- if (Debugging.AssertsEnabled) Debugging.Assert(t2.Boost == boost, () => "boost should be equal in all segment TermsEnums");
+ if (Debugging.AssertsEnabled) Debugging.Assert(t2.Boost == boost, "boost should be equal in all segment TermsEnums");
t2.TermState.Register(state, m_readerContext.Ord, termsEnum.DocFreq, termsEnum.TotalTermFreq);
}
else
@@ -201,7 +201,7 @@ namespace Lucene.Net.Search
{
st = new ScoreTerm(termComp, new TermContext(m_topReaderContext));
}
- if (Debugging.AssertsEnabled) Debugging.Assert(stQueue.Count <= maxSize, () => "the PQ size must be limited to maxSize");
+ if (Debugging.AssertsEnabled) Debugging.Assert(stQueue.Count <= maxSize, "the PQ size must be limited to maxSize");
// set maxBoostAtt with values to help FuzzyTermsEnum to optimize
if (stQueue.Count == maxSize)
{
@@ -247,7 +247,7 @@ namespace Lucene.Net.Search
private static readonly IComparer<ScoreTerm> scoreTermSortByTermComp = Comparer<ScoreTerm>.Create((st1, st2) =>
{
- if (Debugging.AssertsEnabled) Debugging.Assert(st1.TermComp == st2.TermComp, () => "term comparer should not change between segments");
+ if (Debugging.AssertsEnabled) Debugging.Assert(st1.TermComp == st2.TermComp, "term comparer should not change between segments");
return st1.TermComp.Compare(st1.Bytes, st2.Bytes);
});
diff --git a/src/Lucene.Net/Store/CompoundFileWriter.cs b/src/Lucene.Net/Store/CompoundFileWriter.cs
index a3b7614..29f549a 100644
--- a/src/Lucene.Net/Store/CompoundFileWriter.cs
+++ b/src/Lucene.Net/Store/CompoundFileWriter.cs
@@ -253,7 +253,7 @@ namespace Lucene.Net.Store
bool outputLocked = false;
try
{
- if (Debugging.AssertsEnabled) Debugging.Assert(name != null, () => "name must not be null");
+ if (Debugging.AssertsEnabled) Debugging.Assert(name != null, "name must not be null");
if (entries.ContainsKey(name))
{
throw new ArgumentException("File " + name + " already exists");
diff --git a/src/Lucene.Net/Store/IOContext.cs b/src/Lucene.Net/Store/IOContext.cs
index 857ad60..07d6f72 100644
--- a/src/Lucene.Net/Store/IOContext.cs
+++ b/src/Lucene.Net/Store/IOContext.cs
@@ -96,8 +96,8 @@ namespace Lucene.Net.Store
{
if (Debugging.AssertsEnabled)
{
- Debugging.Assert(context != UsageContext.MERGE || mergeInfo != null, () => "MergeInfo must not be null if context is MERGE");
- Debugging.Assert(context != UsageContext.FLUSH, () => "Use IOContext(FlushInfo) to create a FLUSH IOContext");
+ Debugging.Assert(context != UsageContext.MERGE || mergeInfo != null, "MergeInfo must not be null if context is MERGE");
+ Debugging.Assert(context != UsageContext.FLUSH, "Use IOContext(FlushInfo) to create a FLUSH IOContext");
}
this.Context = context;
this.ReadOnce = false;
diff --git a/src/Lucene.Net/Support/Diagnostics/Debugging.cs b/src/Lucene.Net/Support/Diagnostics/Debugging.cs
index d908420..f67667e 100644
--- a/src/Lucene.Net/Support/Diagnostics/Debugging.cs
+++ b/src/Lucene.Net/Support/Diagnostics/Debugging.cs
@@ -60,6 +60,20 @@ namespace Lucene.Net.Diagnostics
throw new AssertionException(messageFactory());
}
+ /// <summary>
+ /// Checks for a condition; if the <paramref name="condition"/> is <c>false</c>, throws an <see cref="AssertionException"/> with the given message.
+ /// <para/>
+ /// IMPORTANT: If you need to use string concatenation when building the message, use <see cref="Assert(bool, Func{string})"/> for better performance.
+ /// </summary>
+ /// <param name="condition">The conditional expression to evaluate. If the condition is <c>true</c>, no exception is thrown.</param>
+ /// <param name="message">The message to use.</param>
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public static void Assert(bool condition, string message)
+ {
+ if (AssertsEnabled && !condition)
+ throw new AssertionException(message);
+ }
+
///// <summary>
///// Checks for a condition; if the condition is <c>false</c>, throws an <see cref="AssertionException"/>.
///// </summary>
diff --git a/src/Lucene.Net/Util/AttributeSource.cs b/src/Lucene.Net/Util/AttributeSource.cs
index 0b5f922..70417a5 100644
--- a/src/Lucene.Net/Util/AttributeSource.cs
+++ b/src/Lucene.Net/Util/AttributeSource.cs
@@ -361,7 +361,7 @@ namespace Lucene.Net.Util
foreach (var curInterfaceRef in foundInterfaces)
{
curInterfaceRef.TryGetTarget(out Type curInterface);
- if (Debugging.AssertsEnabled) Debugging.Assert(curInterface != null, () => "We have a strong reference on the class holding the interfaces, so they should never get evicted");
+ if (Debugging.AssertsEnabled) Debugging.Assert(curInterface != null, "We have a strong reference on the class holding the interfaces, so they should never get evicted");
// Attribute is a superclass of this interface
if (!attributes.ContainsKey(curInterface))
{
diff --git a/src/Lucene.Net/Util/Automaton/DaciukMihovAutomatonBuilder.cs b/src/Lucene.Net/Util/Automaton/DaciukMihovAutomatonBuilder.cs
index 90990fd..fbf2c33 100644
--- a/src/Lucene.Net/Util/Automaton/DaciukMihovAutomatonBuilder.cs
+++ b/src/Lucene.Net/Util/Automaton/DaciukMihovAutomatonBuilder.cs
@@ -145,7 +145,7 @@ namespace Lucene.Net.Util.Automaton
/// </summary>
internal State LastChild() // LUCENENET NOTE: Kept this a method because there is another overload
{
- if (Debugging.AssertsEnabled) Debugging.Assert(HasChildren, () => "No outgoing transitions.");
+ if (Debugging.AssertsEnabled) Debugging.Assert(HasChildren, "No outgoing transitions.");
return states[states.Length - 1];
}
@@ -171,7 +171,7 @@ namespace Lucene.Net.Util.Automaton
/// </summary>
internal void ReplaceLastChild(State state)
{
- if (Debugging.AssertsEnabled) Debugging.Assert(HasChildren, () => "No outgoing transitions.");
+ if (Debugging.AssertsEnabled) Debugging.Assert(HasChildren, "No outgoing transitions.");
states[states.Length - 1] = state;
}
@@ -229,7 +229,7 @@ namespace Lucene.Net.Util.Automaton
{
if (Debugging.AssertsEnabled)
{
- Debugging.Assert(stateRegistry != null, () => "Automaton already built.");
+ Debugging.Assert(stateRegistry != null, "Automaton already built.");
Debugging.Assert(previous == null || comparer.Compare(previous, current) <= 0, () => "Input must be in sorted UTF-8 order: " + previous + " >= " + current);
Debugging.Assert(SetPrevious(current));
}
diff --git a/src/Lucene.Net/Util/BytesRefHash.cs b/src/Lucene.Net/Util/BytesRefHash.cs
index b7d0975..31d822c 100644
--- a/src/Lucene.Net/Util/BytesRefHash.cs
+++ b/src/Lucene.Net/Util/BytesRefHash.cs
@@ -123,7 +123,7 @@ namespace Lucene.Net.Util
{
if (Debugging.AssertsEnabled)
{
- Debugging.Assert(bytesStart != null, () => "bytesStart is null - not initialized");
+ Debugging.Assert(bytesStart != null, "bytesStart is null - not initialized");
Debugging.Assert(bytesID < bytesStart.Length, () => "bytesID exceeds byteStart len: " + bytesStart.Length);
}
pool.SetBytesRef(@ref, bytesStart[bytesID]);
@@ -140,7 +140,7 @@ namespace Lucene.Net.Util
/// </summary>
public int[] Compact()
{
- if (Debugging.AssertsEnabled) Debugging.Assert(bytesStart != null, () => "bytesStart is null - not initialized");
+ if (Debugging.AssertsEnabled) Debugging.Assert(bytesStart != null, "bytesStart is null - not initialized");
int upto = 0;
for (int i = 0; i < hashSize; i++)
{
@@ -304,7 +304,7 @@ namespace Lucene.Net.Util
/// <see cref="ByteBlockPool.BYTE_BLOCK_SIZE"/> </exception>
public int Add(BytesRef bytes)
{
- if (Debugging.AssertsEnabled) Debugging.Assert(bytesStart != null, () => "Bytesstart is null - not initialized");
+ if (Debugging.AssertsEnabled) Debugging.Assert(bytesStart != null, "bytesStart is null - not initialized");
int length = bytes.Length;
// final position
int hashPos = FindHash(bytes);
@@ -380,7 +380,7 @@ namespace Lucene.Net.Util
private int FindHash(BytesRef bytes)
{
- if (Debugging.AssertsEnabled) Debugging.Assert(bytesStart != null, () => "bytesStart is null - not initialized");
+ if (Debugging.AssertsEnabled) Debugging.Assert(bytesStart != null, "bytesStart is null - not initialized");
int code = DoHash(bytes.Bytes, bytes.Offset, bytes.Length);
@@ -412,7 +412,7 @@ namespace Lucene.Net.Util
/// </summary>
public int AddByPoolOffset(int offset)
{
- if (Debugging.AssertsEnabled) Debugging.Assert(bytesStart != null, () => "Bytesstart is null - not initialized");
+ if (Debugging.AssertsEnabled) Debugging.Assert(bytesStart != null, "bytesStart is null - not initialized");
// final position
int code = offset;
int hashPos = offset & hashMask;
@@ -551,7 +551,7 @@ namespace Lucene.Net.Util
{
if (Debugging.AssertsEnabled)
{
- Debugging.Assert(bytesStart != null, () => "bytesStart is null - not initialized");
+ Debugging.Assert(bytesStart != null, "bytesStart is null - not initialized");
Debugging.Assert(bytesID >= 0 && bytesID < count, () => bytesID.ToString());
}
return bytesStart[bytesID];
diff --git a/src/Lucene.Net/Util/InfoStream.cs b/src/Lucene.Net/Util/InfoStream.cs
index 3899476..db4d004 100644
--- a/src/Lucene.Net/Util/InfoStream.cs
+++ b/src/Lucene.Net/Util/InfoStream.cs
@@ -42,7 +42,7 @@ namespace Lucene.Net.Util
{
public override void Message(string component, string message)
{
- if (Debugging.AssertsEnabled) Debugging.Assert(false, () => "message() should not be called when isEnabled returns false");
+ if (Debugging.AssertsEnabled) Debugging.Assert(false, "Message() should not be called when IsEnabled returns false");
}
public override bool IsEnabled(string component)
diff --git a/src/Lucene.Net/Util/RamUsageEstimator.cs b/src/Lucene.Net/Util/RamUsageEstimator.cs
index b5e75e0..27dda7a 100644
--- a/src/Lucene.Net/Util/RamUsageEstimator.cs
+++ b/src/Lucene.Net/Util/RamUsageEstimator.cs
@@ -782,7 +782,7 @@ namespace Lucene.Net.Util
if (Debugging.AssertsEnabled)
{
Debugging.Assert(initialCapacity > 0, () => "Initial capacity must be between (0, " + int.MaxValue + "].");
- Debugging.Assert(loadFactor > 0 && loadFactor < 1, () => "Load factor must be between (0, 1).");
+ Debugging.Assert(loadFactor > 0 && loadFactor < 1, "Load factor must be between (0, 1).");
}
this.LoadFactor = loadFactor;
AllocateBuffers(RoundCapacity(initialCapacity));
@@ -793,7 +793,7 @@ namespace Lucene.Net.Util
/// </summary>
public bool Add(KType e)
{
- if (Debugging.AssertsEnabled) Debugging.Assert(e != null, () => "Null keys not allowed.");
+ if (Debugging.AssertsEnabled) Debugging.Assert(e != null, "Null keys not allowed.");
if (Assigned >= resizeThreshold)
{
@@ -908,7 +908,7 @@ namespace Lucene.Net.Util
{
if (Debugging.AssertsEnabled)
{
- Debugging.Assert(current > 0 && ((current & (current - 1)) == 0), () => "Capacity must be a power of two.");
+ Debugging.Assert(current > 0 && ((current & (current - 1)) == 0), "Capacity must be a power of two.");
Debugging.Assert((current << 1) > 0, () => "Maximum capacity exceeded (" + ((int)((uint)0x80000000 >> 1)) + ").");
}