You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucenenet.apache.org by ni...@apache.org on 2020/11/03 21:24:21 UTC
[lucenenet] 09/11: SWEEP: Fixed Debugging.Assert() calls that
format strings with parameters so the parameters are not resolved until a
condition fails. There are still some calls that do light math and pick
items from arrays,
but this performance hit in the tests is something we can live with for
better production performance. Closes #346, closes #373, closes #372.
This is an automated email from the ASF dual-hosted git repository.
nightowl888 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/lucenenet.git
commit ca2ecb225f9dfbf4d5a2251ce4763429b08b273a
Author: Shad Storhaug <sh...@shadstorhaug.com>
AuthorDate: Tue Nov 3 10:49:50 2020 +0700
SWEEP: Fixed Debugging.Assert() calls that format strings with parameters so the parameters are not resolved until a condition fails. There are still some calls that do light math and pick items from arrays, but this performance hit in the tests is something we can live with for better production performance. Closes #346, closes #373, closes #372.
---
.../Analysis/CharFilter/BaseCharFilter.cs | 2 +-
.../Analysis/CharFilter/HTMLStripCharFilter.cs | 4 +-
.../Analysis/Util/RollingCharBuffer.cs | 11 ++++--
.../SimpleText/SimpleTextDocValuesReader.cs | 16 +++++---
.../SimpleText/SimpleTextFieldsReader.cs | 10 +++--
.../Suggest/Analyzing/FSTUtil.cs | 2 +-
.../Analysis/LookaheadTokenFilter.cs | 4 +-
.../Analysis/MockTokenizer.cs | 6 +--
.../Codecs/Lucene3x/PreFlexRWNormsConsumer.cs | 2 +-
.../Codecs/Lucene3x/TermInfosWriter.cs | 15 +++++--
.../Codecs/Lucene40/Lucene40PostingsWriter.cs | 2 +-
.../Index/AssertingAtomicReader.cs | 4 +-
.../Search/ShardSearchingTestBase.cs | 4 +-
.../Store/MockDirectoryWrapper.cs | 2 +-
.../Util/Automaton/AutomatonTestUtil.cs | 2 +-
.../Analysis/CharFilters/TestMappingCharFilter.cs | 2 +-
.../Util/Automaton/TestUTF32ToUTF8.cs | 2 +-
src/Lucene.Net/Codecs/BlockTreeTermsReader.cs | 13 +++---
src/Lucene.Net/Codecs/BlockTreeTermsWriter.cs | 19 +++++----
.../Compressing/CompressingStoredFieldsReader.cs | 2 +-
.../Compressing/CompressingTermVectorsWriter.cs | 2 +-
.../Codecs/Compressing/CompressionMode.cs | 2 +-
src/Lucene.Net/Codecs/Lucene3x/Lucene3xFields.cs | 5 ++-
.../Codecs/Lucene3x/Lucene3xStoredFieldsReader.cs | 4 +-
.../Codecs/Lucene3x/Lucene3xTermVectorsReader.cs | 2 +-
.../Codecs/Lucene40/Lucene40LiveDocsFormat.cs | 2 +-
.../Codecs/Lucene40/Lucene40PostingsReader.cs | 4 +-
.../Codecs/Lucene40/Lucene40StoredFieldsReader.cs | 2 +-
.../Codecs/Lucene40/Lucene40TermVectorsWriter.cs | 2 +-
src/Lucene.Net/Codecs/MappingMultiDocsEnum.cs | 2 +-
.../Codecs/PerField/PerFieldDocValuesFormat.cs | 2 +-
src/Lucene.Net/Codecs/TermVectorsWriter.cs | 2 +-
src/Lucene.Net/Index/BufferedUpdatesStream.cs | 2 +-
src/Lucene.Net/Index/DocumentsWriter.cs | 4 +-
.../Index/DocumentsWriterFlushControl.cs | 15 +++++--
.../Index/FreqProxTermsWriterPerField.cs | 4 +-
src/Lucene.Net/Index/IndexFileDeleter.cs | 9 +++--
src/Lucene.Net/Index/IndexWriter.cs | 46 +++++++++++++++-------
src/Lucene.Net/Index/MultiBits.cs | 2 +-
src/Lucene.Net/Index/ReadersAndUpdates.cs | 4 +-
src/Lucene.Net/Search/IndexSearcher.cs | 2 +-
src/Lucene.Net/Search/Spans/NearSpansOrdered.cs | 2 +-
src/Lucene.Net/Search/TermQuery.cs | 2 +-
src/Lucene.Net/Search/TopTermsRewrite.cs | 2 +-
src/Lucene.Net/Store/BufferedIndexInput.cs | 2 +-
src/Lucene.Net/Util/Automaton/BasicOperations.cs | 2 +-
src/Lucene.Net/Util/BroadWord.cs | 2 +-
src/Lucene.Net/Util/BytesRefHash.cs | 2 +-
src/Lucene.Net/Util/FixedBitSet.cs | 22 +++++------
src/Lucene.Net/Util/Fst/Builder.cs | 4 +-
src/Lucene.Net/Util/Fst/ByteSequenceOutputs.cs | 2 +-
src/Lucene.Net/Util/Fst/BytesStore.cs | 4 +-
src/Lucene.Net/Util/Fst/FST.cs | 2 +-
src/Lucene.Net/Util/Fst/FSTEnum.cs | 4 +-
src/Lucene.Net/Util/Fst/NoOutputs.cs | 2 +-
src/Lucene.Net/Util/Fst/NodeHash.cs | 7 +++-
src/Lucene.Net/Util/LongBitSet.cs | 10 ++---
src/Lucene.Net/Util/OfflineSorter.cs | 2 +-
src/Lucene.Net/Util/PForDeltaDocIdSet.cs | 2 +-
src/Lucene.Net/Util/Packed/EliasFanoDecoder.cs | 2 +-
src/Lucene.Net/Util/Packed/EliasFanoEncoder.cs | 2 +-
src/Lucene.Net/Util/Packed/Packed64.cs | 43 +-------------------
src/Lucene.Net/Util/Packed/PackedDataInput.cs | 2 +-
src/Lucene.Net/Util/Packed/PackedInts.cs | 16 ++++----
src/Lucene.Net/Util/Packed/PackedWriter.cs | 2 +-
src/Lucene.Net/Util/PagedBytes.cs | 2 +-
src/Lucene.Net/Util/RamUsageEstimator.cs | 2 +-
src/Lucene.Net/Util/RecyclingByteBlockAllocator.cs | 2 +-
src/Lucene.Net/Util/RecyclingIntBlockAllocator.cs | 2 +-
src/Lucene.Net/Util/RollingBuffer.cs | 2 +-
src/Lucene.Net/Util/UnicodeUtil.cs | 2 +-
71 files changed, 208 insertions(+), 189 deletions(-)
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/BaseCharFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/BaseCharFilter.cs
index d8c54c0..097ba8c 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/BaseCharFilter.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/BaseCharFilter.cs
@@ -114,7 +114,7 @@ namespace Lucene.Net.Analysis.CharFilters
}
int offset = offsets[(size == 0) ? 0 : size - 1];
- if (Debugging.AssertsEnabled) Debugging.Assert(size == 0 || off >= offset, "Offset #{0}({1}) is less than the last recorded offset {2}\n{3}\n{4}", size, off, offset, Arrays.ToString(offsets), Arrays.ToString(diffs));
+ if (Debugging.AssertsEnabled) Debugging.Assert(size == 0 || off >= offset, "Offset #{0}({1}) is less than the last recorded offset {2}\n{3}\n{4}", size, off, offset, offsets, diffs);
if (size == 0 || off != offsets[size - 1])
{
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/HTMLStripCharFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/HTMLStripCharFilter.cs
index b8a7849..7e4af70 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/HTMLStripCharFilter.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/HTMLStripCharFilter.cs
@@ -31636,9 +31636,9 @@ namespace Lucene.Net.Analysis.CharFilters
{
codePoint = int.Parse(hexCharRef, NumberStyles.HexNumber, CultureInfo.InvariantCulture);
}
- catch (Exception /*e*/)
+ catch (Exception e)
{
- if (Debugging.AssertsEnabled) Debugging.Assert(false, "Exception parsing hex code point '{0}'");
+ if (Debugging.AssertsEnabled) Debugging.Assert(false, "Exception parsing hex code point '{0}'", e);
}
if (codePoint <= 0x10FFFF)
{
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Util/RollingCharBuffer.cs b/src/Lucene.Net.Analysis.Common/Analysis/Util/RollingCharBuffer.cs
index 71340c9..103edce 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Util/RollingCharBuffer.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Util/RollingCharBuffer.cs
@@ -107,11 +107,14 @@ namespace Lucene.Net.Analysis.Util
}
else
{
- // Cannot read from future (except by 1):
- if (Debugging.AssertsEnabled) Debugging.Assert(pos < nextPos);
+ if (Debugging.AssertsEnabled)
+ {
+ // Cannot read from future (except by 1):
+ Debugging.Assert(pos < nextPos);
- // Cannot read from already freed past:
- if (Debugging.AssertsEnabled) Debugging.Assert(nextPos - pos <= count, "nextPos={0} pos={1} count={2}", nextPos, pos, count);
+ // Cannot read from already freed past:
+ Debugging.Assert(nextPos - pos <= count, "nextPos={0} pos={1} count={2}", nextPos, pos, count);
+ }
return buffer[GetIndex(pos)];
}
diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesReader.cs b/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesReader.cs
index c8407d2..aa70804 100644
--- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesReader.cs
+++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesReader.cs
@@ -1,6 +1,7 @@
using J2N.Text;
using Lucene.Net.Diagnostics;
using Lucene.Net.Support;
+using Lucene.Net.Util;
using System;
using System.Collections.Generic;
using System.Diagnostics;
@@ -76,21 +77,24 @@ namespace Lucene.Net.Codecs.SimpleText
{
break;
}
- if (Debugging.AssertsEnabled) Debugging.Assert(StartsWith(SimpleTextDocValuesWriter.FIELD), scratch.Utf8ToString());
+ // LUCENENET specific - use wrapper BytesRefFormatter struct to defer building the string unless string.Format() is called
+ if (Debugging.AssertsEnabled) Debugging.Assert(StartsWith(SimpleTextDocValuesWriter.FIELD), "{0}", new BytesRefFormatter(scratch, BytesRefFormat.UTF8));
var fieldName = StripPrefix(SimpleTextDocValuesWriter.FIELD);
var field = new OneField();
fields[fieldName] = field;
ReadLine();
- if (Debugging.AssertsEnabled) Debugging.Assert(StartsWith(SimpleTextDocValuesWriter.TYPE), scratch.Utf8ToString());
+ // LUCENENET specific - use wrapper BytesRefFormatter struct to defer building the string unless string.Format() is called
+ if (Debugging.AssertsEnabled) Debugging.Assert(StartsWith(SimpleTextDocValuesWriter.TYPE), "{0}", new BytesRefFormatter(scratch, BytesRefFormat.UTF8));
var dvType = (DocValuesType)Enum.Parse(typeof(DocValuesType), StripPrefix(SimpleTextDocValuesWriter.TYPE));
// if (Debugging.AssertsEnabled) Debugging.Assert(dvType != null); // LUCENENET: Not possible for an enum to be null in .NET
if (dvType == DocValuesType.NUMERIC)
{
ReadLine();
- if (Debugging.AssertsEnabled) Debugging.Assert(StartsWith(SimpleTextDocValuesWriter.MINVALUE), "got {0} field={1} ext={2}", scratch.Utf8ToString(), fieldName, ext);
+ // LUCENENET specific - use wrapper BytesRefFormatter struct to defer building the string unless string.Format() is called
+ if (Debugging.AssertsEnabled) Debugging.Assert(StartsWith(SimpleTextDocValuesWriter.MINVALUE), "got {0} field={1} ext={2}", new BytesRefFormatter(scratch, BytesRefFormat.UTF8), fieldName, ext);
field.MinValue = Convert.ToInt64(StripPrefix(SimpleTextDocValuesWriter.MINVALUE), CultureInfo.InvariantCulture);
ReadLine();
if (Debugging.AssertsEnabled) Debugging.Assert(StartsWith(SimpleTextDocValuesWriter.PATTERN));
@@ -435,7 +439,8 @@ namespace Lucene.Net.Codecs.SimpleText
}
_input.Seek(_field.DataStartFilePointer + ord * (9 + _field.Pattern.Length + _field.MaxLength));
SimpleTextUtil.ReadLine(_input, _scratch);
- if (Debugging.AssertsEnabled) Debugging.Assert(StringHelper.StartsWith(_scratch, SimpleTextDocValuesWriter.LENGTH), "got {0} in={1}", _scratch.Utf8ToString(), _input.ToString());
+ // LUCENENET specific - use wrapper BytesRefFormatter struct to defer building the string unless string.Format() is called
+ if (Debugging.AssertsEnabled) Debugging.Assert(StringHelper.StartsWith(_scratch, SimpleTextDocValuesWriter.LENGTH), "got {0} in={1}", new BytesRefFormatter(_scratch, BytesRefFormat.UTF8), _input);
int len;
try
{
@@ -539,7 +544,8 @@ namespace Lucene.Net.Codecs.SimpleText
_input.Seek(_field.DataStartFilePointer + ord * (9 + _field.Pattern.Length + _field.MaxLength));
SimpleTextUtil.ReadLine(_input, _scratch);
- if (Debugging.AssertsEnabled) Debugging.Assert(StringHelper.StartsWith(_scratch, SimpleTextDocValuesWriter.LENGTH), "got {0} in={1}", _scratch.Utf8ToString(), _input.ToString());
+ // LUCENENET specific - use wrapper BytesRefFormatter struct to defer building the string unless string.Format() is called
+ if (Debugging.AssertsEnabled) Debugging.Assert(StringHelper.StartsWith(_scratch, SimpleTextDocValuesWriter.LENGTH), "got {0} in={1}", new BytesRefFormatter(_scratch, BytesRefFormat.UTF8), _input);
int len;
try
{
diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldsReader.cs b/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldsReader.cs
index 015e766..6bb03ca 100644
--- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldsReader.cs
+++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldsReader.cs
@@ -1,5 +1,6 @@
using Lucene.Net.Diagnostics;
using Lucene.Net.Index;
+using Lucene.Net.Util;
using Lucene.Net.Util.Fst;
using System;
using System.Collections.Generic;
@@ -477,7 +478,8 @@ namespace Lucene.Net.Codecs.SimpleText
if (_readPositions)
{
SimpleTextUtil.ReadLine(_in, _scratch);
- if (Debugging.AssertsEnabled) Debugging.Assert(StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.POS), "got line={0}", _scratch.Utf8ToString());
+ // LUCENENET specific - use wrapper BytesRefFormatter struct to defer building the string unless string.Format() is called
+ if (Debugging.AssertsEnabled) Debugging.Assert(StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.POS), "got line={0}", new BytesRefFormatter(_scratch, BytesRefFormat.UTF8));
UnicodeUtil.UTF8toUTF16(_scratch.Bytes, _scratch.Offset + SimpleTextFieldsWriter.POS.Length, _scratch.Length - SimpleTextFieldsWriter.POS.Length,
_scratchUtf162);
pos = ArrayUtil.ParseInt32(_scratchUtf162.Chars, 0, _scratchUtf162.Length);
@@ -490,12 +492,14 @@ namespace Lucene.Net.Codecs.SimpleText
if (_readOffsets)
{
SimpleTextUtil.ReadLine(_in, _scratch);
- if (Debugging.AssertsEnabled) Debugging.Assert(StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.START_OFFSET), "got line={0}", _scratch.Utf8ToString());
+ // LUCENENET specific - use wrapper BytesRefFormatter struct to defer building the string unless string.Format() is called
+ if (Debugging.AssertsEnabled) Debugging.Assert(StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.START_OFFSET), "got line={0}", new BytesRefFormatter(_scratch, BytesRefFormat.UTF8));
UnicodeUtil.UTF8toUTF16(_scratch.Bytes, _scratch.Offset + SimpleTextFieldsWriter.START_OFFSET.Length,
_scratch.Length - SimpleTextFieldsWriter.START_OFFSET.Length, _scratchUtf162);
_startOffset = ArrayUtil.ParseInt32(_scratchUtf162.Chars, 0, _scratchUtf162.Length);
SimpleTextUtil.ReadLine(_in, _scratch);
- if (Debugging.AssertsEnabled) Debugging.Assert(StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.END_OFFSET), "got line={0}", _scratch.Utf8ToString());
+ // LUCENENET specific - use wrapper BytesRefFormatter struct to defer building the string unless string.Format() is called
+ if (Debugging.AssertsEnabled) Debugging.Assert(StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.END_OFFSET), "got line={0}", new BytesRefFormatter(_scratch, BytesRefFormat.UTF8));
UnicodeUtil.UTF8toUTF16(_scratch.Bytes, _scratch.Offset + SimpleTextFieldsWriter.END_OFFSET.Length,
_scratch.Length - SimpleTextFieldsWriter.END_OFFSET.Length, _scratchUtf162);
_endOffset = ArrayUtil.ParseInt32(_scratchUtf162.Chars, 0, _scratchUtf162.Length);
diff --git a/src/Lucene.Net.Suggest/Suggest/Analyzing/FSTUtil.cs b/src/Lucene.Net.Suggest/Suggest/Analyzing/FSTUtil.cs
index d9f8bd7..5ad3fde 100644
--- a/src/Lucene.Net.Suggest/Suggest/Analyzing/FSTUtil.cs
+++ b/src/Lucene.Net.Suggest/Suggest/Analyzing/FSTUtil.cs
@@ -130,7 +130,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing
.CopyFrom(nextArc), fst.Outputs.Add(path.Output, nextArc.Output), newInput));
int label = nextArc.Label; // used in assert
nextArc = nextArc.IsLast ? null : fst.ReadNextRealArc(nextArc, fstReader);
- if (Debugging.AssertsEnabled) Debugging.Assert(nextArc == null || label < nextArc.Label, "last: {0} next: {1}", label, (nextArc == null ? "" : nextArc.Label.ToString()));
+ if (Debugging.AssertsEnabled) Debugging.Assert(nextArc == null || label < nextArc.Label, "last: {0} next: {1}", label, nextArc?.Label);
}
}
}
diff --git a/src/Lucene.Net.TestFramework/Analysis/LookaheadTokenFilter.cs b/src/Lucene.Net.TestFramework/Analysis/LookaheadTokenFilter.cs
index 85c4db1..54afdf3 100644
--- a/src/Lucene.Net.TestFramework/Analysis/LookaheadTokenFilter.cs
+++ b/src/Lucene.Net.TestFramework/Analysis/LookaheadTokenFilter.cs
@@ -216,7 +216,7 @@ namespace Lucene.Net.Analysis
else
{
// Make sure our input isn't messing up offsets:
- if (Debugging.AssertsEnabled) Debugging.Assert(startPosData.StartOffset == startOffset,"prev startOffset={0} vs new startOffset={1}", startPosData.StartOffset, startOffset + " inputPos=" + m_inputPos);
+ if (Debugging.AssertsEnabled) Debugging.Assert(startPosData.StartOffset == startOffset, "prev startOffset={0} vs new startOffset={1} inputPos={2}", startPosData.StartOffset, startOffset, m_inputPos);
}
int endOffset = m_offsetAtt.EndOffset;
@@ -227,7 +227,7 @@ namespace Lucene.Net.Analysis
else
{
// Make sure our input isn't messing up offsets:
- if (Debugging.AssertsEnabled) Debugging.Assert(endPosData.EndOffset == endOffset,"prev endOffset={0} vs new endOffset={1}", endPosData.EndOffset, endOffset + " inputPos=" + m_inputPos);
+ if (Debugging.AssertsEnabled) Debugging.Assert(endPosData.EndOffset == endOffset, "prev endOffset={0} vs new endOffset={1} inputPos={2}", endPosData.EndOffset, endOffset, m_inputPos);
}
tokenPending = true;
diff --git a/src/Lucene.Net.TestFramework/Analysis/MockTokenizer.cs b/src/Lucene.Net.TestFramework/Analysis/MockTokenizer.cs
index 7a9cb2c..278b3df 100644
--- a/src/Lucene.Net.TestFramework/Analysis/MockTokenizer.cs
+++ b/src/Lucene.Net.TestFramework/Analysis/MockTokenizer.cs
@@ -219,7 +219,7 @@ namespace Lucene.Net.Analysis
}
else
{
- if (Debugging.AssertsEnabled) Debugging.Assert(!char.IsLowSurrogate((char)ch),"unpaired low surrogate: {0}", ch.ToString("x"));
+ if (Debugging.AssertsEnabled) Debugging.Assert(!char.IsLowSurrogate((char)ch),"unpaired low surrogate: {0:x}", ch);
off++;
if (char.IsHighSurrogate((char)ch))
{
@@ -227,12 +227,12 @@ namespace Lucene.Net.Analysis
if (ch2 >= 0)
{
off++;
- if (Debugging.AssertsEnabled) Debugging.Assert(char.IsLowSurrogate((char)ch2),"unpaired high surrogate: {0}, followed by: {1}", ch.ToString("x"), ch2.ToString("x"));
+ if (Debugging.AssertsEnabled) Debugging.Assert(char.IsLowSurrogate((char)ch2),"unpaired high surrogate: {0:x}, followed by: {1:x}", ch, ch2);
return Character.ToCodePoint((char)ch, (char)ch2);
}
else
{
- if (Debugging.AssertsEnabled) Debugging.Assert(false,"stream ends with unpaired high surrogate: {0}", ch.ToString("x"));
+ if (Debugging.AssertsEnabled) Debugging.Assert(false,"stream ends with unpaired high surrogate: {0:x}", ch);
}
}
return ch;
diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWNormsConsumer.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWNormsConsumer.cs
index 9459657..293d408 100644
--- a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWNormsConsumer.cs
+++ b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWNormsConsumer.cs
@@ -75,7 +75,7 @@ namespace Lucene.Net.Codecs.Lucene3x
public override void AddNumericField(FieldInfo field, IEnumerable<long?> values)
{
- if (Debugging.AssertsEnabled) Debugging.Assert(field.Number > lastFieldNumber,"writing norms fields out of order{0} -> {1}", lastFieldNumber, field.Number);
+ if (Debugging.AssertsEnabled) Debugging.Assert(field.Number > lastFieldNumber,"writing norms fields out of order {0} -> {1}", lastFieldNumber, field.Number);
foreach (var n in values)
{
if (((sbyte)(byte)(long)n) < sbyte.MinValue || ((sbyte)(byte)(long)n) > sbyte.MaxValue)
diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/TermInfosWriter.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/TermInfosWriter.cs
index ccf6084..3904ee9 100644
--- a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/TermInfosWriter.cs
+++ b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/TermInfosWriter.cs
@@ -243,10 +243,17 @@ namespace Lucene.Net.Codecs.Lucene3x
/// </summary>
public void Add(int fieldNumber, BytesRef term, TermInfo ti)
{
- if (Debugging.AssertsEnabled) Debugging.Assert(CompareToLastTerm(fieldNumber, term) < 0 || (isIndex && term.Length == 0 && lastTerm.Length == 0), "Terms are out of order: field={0} (number {1}) lastField={2} (number {3}) text={4} lastText={5}", FieldName(fieldInfos, fieldNumber), fieldNumber, FieldName(fieldInfos, lastFieldNumber), lastFieldNumber, term.Utf8ToString(), lastTerm.Utf8ToString());
-
- if (Debugging.AssertsEnabled) Debugging.Assert(ti.FreqPointer >= lastTi.FreqPointer,"freqPointer out of order ({0} < {1})", ti.FreqPointer, lastTi.FreqPointer);
- if (Debugging.AssertsEnabled) Debugging.Assert(ti.ProxPointer >= lastTi.ProxPointer,"proxPointer out of order ({0} < {1})", ti.ProxPointer, lastTi.ProxPointer);
+ if (Debugging.AssertsEnabled)
+ {
+ Debugging.Assert(CompareToLastTerm(fieldNumber, term) < 0 || (isIndex && term.Length == 0 && lastTerm.Length == 0),
+ "Terms are out of order: field={0} (number {1}) lastField={2} (number {3}) text={4} lastText={5}",
+ FieldName(fieldInfos, fieldNumber), fieldNumber, FieldName(fieldInfos, lastFieldNumber), lastFieldNumber,
+ // LUCENENET specific - use wrapper BytesRefFormatter struct to defer building the string unless string.Format() is called
+ new BytesRefFormatter(term, BytesRefFormat.UTF8), new BytesRefFormatter(lastTerm, BytesRefFormat.UTF8));
+
+ Debugging.Assert(ti.FreqPointer >= lastTi.FreqPointer, "freqPointer out of order ({0} < {1})", ti.FreqPointer, lastTi.FreqPointer);
+ Debugging.Assert(ti.ProxPointer >= lastTi.ProxPointer, "proxPointer out of order ({0} < {1})", ti.ProxPointer, lastTi.ProxPointer);
+ }
if (!isIndex && size % indexInterval == 0)
{
diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40PostingsWriter.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40PostingsWriter.cs
index 86364a5..4f1853e 100644
--- a/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40PostingsWriter.cs
+++ b/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40PostingsWriter.cs
@@ -271,7 +271,7 @@ namespace Lucene.Net.Codecs.Lucene40
// and the numbers aren't that much smaller anyways.
int offsetDelta = startOffset - lastOffset;
int offsetLength = endOffset - startOffset;
- if (Debugging.AssertsEnabled) Debugging.Assert(offsetDelta >= 0 && offsetLength >= 0,"startOffset={0},lastOffset={1}", startOffset, lastOffset + ",endOffset=" + endOffset);
+ if (Debugging.AssertsEnabled) Debugging.Assert(offsetDelta >= 0 && offsetLength >= 0, "startOffset={0},lastOffset={1},endOffset={2}", startOffset, lastOffset, endOffset);
if (offsetLength != lastOffsetLength)
{
proxOut.WriteVInt32(offsetDelta << 1 | 1);
diff --git a/src/Lucene.Net.TestFramework/Index/AssertingAtomicReader.cs b/src/Lucene.Net.TestFramework/Index/AssertingAtomicReader.cs
index 4259150..ef3c24a 100644
--- a/src/Lucene.Net.TestFramework/Index/AssertingAtomicReader.cs
+++ b/src/Lucene.Net.TestFramework/Index/AssertingAtomicReader.cs
@@ -160,7 +160,7 @@ namespace Lucene.Net.Index
{
get
{
- if (Debugging.AssertsEnabled) Debugging.Assert(doc == base.DocID," invalid DocID in {0} {1}", m_input.GetType(), base.DocID + " instead of " + doc);
+ if (Debugging.AssertsEnabled) Debugging.Assert(doc == base.DocID, " invalid DocID in {0} {1} instead of {2}", m_input.GetType(), base.DocID, doc);
return doc;
}
}
@@ -621,7 +621,7 @@ namespace Lucene.Net.Index
{
get
{
- if (Debugging.AssertsEnabled) Debugging.Assert(doc == base.DocID," invalid DocID in {0} {1}", m_input.GetType(), base.DocID + " instead of " + doc);
+ if (Debugging.AssertsEnabled) Debugging.Assert(doc == base.DocID, " invalid DocID in {0} {1} instead of {2}", m_input.GetType(), base.DocID, doc);
return doc;
}
}
diff --git a/src/Lucene.Net.TestFramework/Search/ShardSearchingTestBase.cs b/src/Lucene.Net.TestFramework/Search/ShardSearchingTestBase.cs
index 90ad597..48950c0 100644
--- a/src/Lucene.Net.TestFramework/Search/ShardSearchingTestBase.cs
+++ b/src/Lucene.Net.TestFramework/Search/ShardSearchingTestBase.cs
@@ -306,7 +306,7 @@ namespace Lucene.Net.Search
this.outerInstance = nodeState;
this.nodeVersions = nodeVersions;
MyNodeID = nodeID;
- if (Debugging.AssertsEnabled) Debugging.Assert(MyNodeID == nodeState.MyNodeID,"myNodeID={0} NodeState.this.myNodeID={1}", nodeID, nodeState.MyNodeID);
+ if (Debugging.AssertsEnabled) Debugging.Assert(MyNodeID == nodeState.MyNodeID,"myNodeID={0} nodeState.MyNodeID={1}", nodeID, nodeState.MyNodeID);
}
public override Query Rewrite(Query original)
@@ -419,7 +419,7 @@ namespace Lucene.Net.Search
}
// Collection stats are pre-shared on reopen, so,
// we better not have a cache miss:
- if (Debugging.AssertsEnabled) Debugging.Assert(nodeStats != null,"myNodeID={0} nodeID={1}", MyNodeID, nodeID + " version=" + nodeVersions[nodeID] + " field=" + field);
+ if (Debugging.AssertsEnabled) Debugging.Assert(nodeStats != null, "myNodeID={0} nodeID={1} version={2} field={3}", MyNodeID, nodeID, nodeVersions[nodeID], field);
long nodeDocCount = nodeStats.DocCount;
if (docCount >= 0 && nodeDocCount >= 0)
diff --git a/src/Lucene.Net.TestFramework/Store/MockDirectoryWrapper.cs b/src/Lucene.Net.TestFramework/Store/MockDirectoryWrapper.cs
index 3b3ac94..477cdbf 100644
--- a/src/Lucene.Net.TestFramework/Store/MockDirectoryWrapper.cs
+++ b/src/Lucene.Net.TestFramework/Store/MockDirectoryWrapper.cs
@@ -1024,7 +1024,7 @@ namespace Lucene.Net.Store
extras += "\n\nThese files we had previously tried to delete, but couldn't: " + pendingDeletions;
}
- if (Debugging.AssertsEnabled) Debugging.Assert(false,"unreferenced files: before delete:\n {0}\n after delete:\n {1}", Arrays.ToString(startFiles), Arrays.ToString(endFiles) + extras);
+ if (Debugging.AssertsEnabled) Debugging.Assert(false, "unreferenced files: before delete:\n {0}\n after delete:\n {1}{2}", startFiles, endFiles, extras);
}
DirectoryReader ir1 = DirectoryReader.Open(this);
diff --git a/src/Lucene.Net.TestFramework/Util/Automaton/AutomatonTestUtil.cs b/src/Lucene.Net.TestFramework/Util/Automaton/AutomatonTestUtil.cs
index 34b2b8f..6fc769d 100644
--- a/src/Lucene.Net.TestFramework/Util/Automaton/AutomatonTestUtil.cs
+++ b/src/Lucene.Net.TestFramework/Util/Automaton/AutomatonTestUtil.cs
@@ -399,7 +399,7 @@ namespace Lucene.Net.Util.Automaton
{
int numStates = a.GetNumberOfStates();
a.ClearNumberedStates(); // force recomputation of cached numbered states
- if (Debugging.AssertsEnabled) Debugging.Assert(numStates == a.GetNumberOfStates(),"automaton has {0}", (numStates - a.GetNumberOfStates()) + " detached states");
+ if (Debugging.AssertsEnabled) Debugging.Assert(numStates == a.GetNumberOfStates(), "automaton has {0} detached states", numStates - a.GetNumberOfStates());
}
}
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/CharFilters/TestMappingCharFilter.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/CharFilters/TestMappingCharFilter.cs
index ec98692..91967c1 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/CharFilters/TestMappingCharFilter.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/CharFilters/TestMappingCharFilter.cs
@@ -422,7 +422,7 @@ namespace Lucene.Net.Analysis.CharFilters
// Same length: no change to offset
}
- if (Debugging.AssertsEnabled) Debugging.Assert(inputOffsets.Count == output.Length,"inputOffsets.size()={0} vs output.length()={1}", inputOffsets.Count, output.Length);
+ if (Debugging.AssertsEnabled) Debugging.Assert(inputOffsets.Count == output.Length,"inputOffsets.Count={0} vs output.Length={1}", inputOffsets.Count, output.Length);
}
else
{
diff --git a/src/Lucene.Net.Tests/Util/Automaton/TestUTF32ToUTF8.cs b/src/Lucene.Net.Tests/Util/Automaton/TestUTF32ToUTF8.cs
index c9fcb46..8a67109 100644
--- a/src/Lucene.Net.Tests/Util/Automaton/TestUTF32ToUTF8.cs
+++ b/src/Lucene.Net.Tests/Util/Automaton/TestUTF32ToUTF8.cs
@@ -95,7 +95,7 @@ namespace Lucene.Net.Util.Automaton
}
}
- if (Debugging.AssertsEnabled) Debugging.Assert(code >= startCode && code <= endCode,"code={0} start={1}", code, startCode + " end=" + endCode);
+ if (Debugging.AssertsEnabled) Debugging.Assert(code >= startCode && code <= endCode, "code={0} start={1} end={2}", code, startCode, endCode);
if (Debugging.AssertsEnabled) Debugging.Assert(!IsSurrogate(code));
Assert.IsTrue(Matches(a, code), "DFA for range " + startCode + "-" + endCode + " failed to match code=" + code);
diff --git a/src/Lucene.Net/Codecs/BlockTreeTermsReader.cs b/src/Lucene.Net/Codecs/BlockTreeTermsReader.cs
index 210cfa3..59e1209 100644
--- a/src/Lucene.Net/Codecs/BlockTreeTermsReader.cs
+++ b/src/Lucene.Net/Codecs/BlockTreeTermsReader.cs
@@ -1,6 +1,7 @@
using Lucene.Net.Diagnostics;
using Lucene.Net.Index;
using Lucene.Net.Support;
+using Lucene.Net.Util;
using Lucene.Net.Util.Fst;
using System;
using System.Collections.Generic;
@@ -1430,7 +1431,9 @@ namespace Lucene.Net.Codecs
{
CopyTerm();
//if (DEBUG) System.out.println(" term match to state=" + state + "; return term=" + brToString(term));
- if (Debugging.AssertsEnabled) Debugging.Assert(savedStartTerm == null || term.CompareTo(savedStartTerm) > 0, "saveStartTerm={0} term={1}", savedStartTerm.Utf8ToString(), term.Utf8ToString());
+ if (Debugging.AssertsEnabled) Debugging.Assert(savedStartTerm == null || term.CompareTo(savedStartTerm) > 0, "saveStartTerm={0} term={1}",
+ // LUCENENET specific - use wrapper BytesRefFormatter struct to defer building the string unless string.Format() is called
+ new BytesRefFormatter(savedStartTerm, BytesRefFormat.UTF8), new BytesRefFormatter(term, BytesRefFormat.UTF8));
return true;
}
else
@@ -2388,7 +2391,7 @@ namespace Lucene.Net.Codecs
// }
// if (OuterInstance.Index != null)
// {
- // if (Debugging.AssertsEnabled) Debugging.Assert(!isSeekFrame || f.Arc != null, "isSeekFrame=" + isSeekFrame + " f.arc=" + f.Arc);
+ // if (Debugging.AssertsEnabled) Debugging.Assert(!isSeekFrame || f.Arc != null, "isSeekFrame={0} f.arc={1}", isSeekFrame, f.Arc);
// if (f.Prefix > 0 && isSeekFrame && f.Arc.Label != (term.Bytes[f.Prefix - 1] & 0xFF))
// {
// @out.println(" broken seek state: arc.label=" + (char)f.Arc.Label + " vs term byte=" + (char)(term.Bytes[f.Prefix - 1] & 0xFF));
@@ -2760,7 +2763,7 @@ namespace Lucene.Net.Codecs
//if (DEBUG) {
//System.out.println(" loadNextFloorBlock fp=" + fp + " fpEnd=" + fpEnd);
//}
- if (Debugging.AssertsEnabled) Debugging.Assert(arc == null || isFloor,"arc={0} isFloor={1}", arc, isFloor);
+ if (Debugging.AssertsEnabled) Debugging.Assert(arc == null || isFloor, "arc={0} isFloor={1}", arc, isFloor);
fp = fpEnd;
nextEnt = -1;
LoadBlock();
@@ -2917,7 +2920,7 @@ namespace Lucene.Net.Codecs
public bool NextLeaf()
{
//if (DEBUG) System.out.println(" frame.next ord=" + ord + " nextEnt=" + nextEnt + " entCount=" + entCount);
- if (Debugging.AssertsEnabled) Debugging.Assert(nextEnt != -1 && nextEnt < entCount,"nextEnt={0} entCount={1}", nextEnt, entCount + " fp=" + fp);
+ if (Debugging.AssertsEnabled) Debugging.Assert(nextEnt != -1 && nextEnt < entCount, "nextEnt={0} entCount={1} fp={2}", nextEnt, entCount, fp);
nextEnt++;
suffix = suffixesReader.ReadVInt32();
startBytePos = suffixesReader.Position;
@@ -2935,7 +2938,7 @@ namespace Lucene.Net.Codecs
public bool NextNonLeaf()
{
//if (DEBUG) System.out.println(" frame.next ord=" + ord + " nextEnt=" + nextEnt + " entCount=" + entCount);
- if (Debugging.AssertsEnabled) Debugging.Assert(nextEnt != -1 && nextEnt < entCount,"nextEnt={0} entCount={1}", nextEnt, entCount + " fp=" + fp);
+ if (Debugging.AssertsEnabled) Debugging.Assert(nextEnt != -1 && nextEnt < entCount, "nextEnt={0} entCount={1} fp={2}", nextEnt, entCount, fp);
nextEnt++;
int code = suffixesReader.ReadVInt32();
suffix = (int)((uint)code >> 1);
diff --git a/src/Lucene.Net/Codecs/BlockTreeTermsWriter.cs b/src/Lucene.Net/Codecs/BlockTreeTermsWriter.cs
index a9da552..2909c27 100644
--- a/src/Lucene.Net/Codecs/BlockTreeTermsWriter.cs
+++ b/src/Lucene.Net/Codecs/BlockTreeTermsWriter.cs
@@ -261,7 +261,7 @@ namespace Lucene.Net.Codecs
{
if (Debugging.AssertsEnabled) Debugging.Assert(numTerms > 0);
this.FieldInfo = fieldInfo;
- if (Debugging.AssertsEnabled) Debugging.Assert(rootCode != null,"field={0} numTerms={1}", fieldInfo.Name, numTerms);
+ if (Debugging.AssertsEnabled) Debugging.Assert(rootCode != null, "field={0} numTerms={1}", fieldInfo.Name, numTerms);
this.RootCode = rootCode;
this.IndexStartFP = indexStartFP;
this.NumTerms = numTerms;
@@ -490,11 +490,14 @@ namespace Lucene.Net.Codecs
public void CompileIndex(IList<PendingBlock> floorBlocks, RAMOutputStream scratchBytes)
{
- // LUCENENET specific - we use a custom wrapper function to display floorBlocks, since
- // it might contain garbage that cannot be converted into text.
- if (Debugging.AssertsEnabled) Debugging.Assert((IsFloor && floorBlocks != null && floorBlocks.Count != 0) || (!IsFloor && floorBlocks == null), "isFloor={0} floorBlocks={1}", IsFloor , new PendingBlocksFormatter(floorBlocks));
+ if (Debugging.AssertsEnabled)
+ {
+ // LUCENENET specific - we use a custom wrapper struct to display floorBlocks, since
+ // it might contain garbage that cannot be converted into text.
+ Debugging.Assert((IsFloor && floorBlocks != null && floorBlocks.Count != 0) || (!IsFloor && floorBlocks == null), "isFloor={0} floorBlocks={1}", IsFloor, new PendingBlocksFormatter(floorBlocks));
- if (Debugging.AssertsEnabled) Debugging.Assert(scratchBytes.GetFilePointer() == 0);
+ Debugging.Assert(scratchBytes.GetFilePointer() == 0);
+ }
// TODO: try writing the leading vLong in MSB order
// (opposite of what Lucene does today), for better
@@ -874,7 +877,7 @@ namespace Lucene.Net.Codecs
//System.out.println(" = " + pendingCount);
pendingCount = 0;
- if (Debugging.AssertsEnabled) Debugging.Assert(outerInstance.minItemsInBlock == 1 || subCount > 1,"minItemsInBlock={0} subCount={1}", outerInstance.minItemsInBlock, subCount + " sub=" + sub + " of " + numSubs + " subTermCount=" + subTermCountSums[sub] + " subSubCount=" + subSubCounts[sub] + " depth=" + prefixLength);
+ if (Debugging.AssertsEnabled) Debugging.Assert(outerInstance.minItemsInBlock == 1 || subCount > 1, "minItemsInBlock={0} subCount={1} sub={2} of {3} subTermCount={4} subSubCount={5} depth={6}", outerInstance.minItemsInBlock, subCount, sub, numSubs, subTermCountSums[sub], subSubCounts[sub], prefixLength);
subCount = 0;
startLabel = subBytes[sub + 1];
@@ -946,7 +949,7 @@ namespace Lucene.Net.Codecs
int start = pending.Count - startBackwards;
- if (Debugging.AssertsEnabled) Debugging.Assert(start >= 0,"pending.Count={0} startBackwards={1}", pending.Count, startBackwards + " length=" + length);
+ if (Debugging.AssertsEnabled) Debugging.Assert(start >= 0, "pending.Count={0} startBackwards={1} length={2}", pending.Count, startBackwards, length);
IList<PendingEntry> slice = pending.SubList(start, start + length);
@@ -1217,7 +1220,7 @@ namespace Lucene.Net.Codecs
blockBuilder.Finish();
// We better have one final "root" block:
- if (Debugging.AssertsEnabled) Debugging.Assert(pending.Count == 1 && !pending[0].IsTerm,"pending.size()={0} pending={1}", pending.Count, pending);
+ if (Debugging.AssertsEnabled) Debugging.Assert(pending.Count == 1 && !pending[0].IsTerm, "pending.Count={0} pending={1}", pending.Count, pending);
PendingBlock root = (PendingBlock)pending[0];
if (Debugging.AssertsEnabled)
{
diff --git a/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsReader.cs b/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsReader.cs
index 44937dc..99e38a1 100644
--- a/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsReader.cs
+++ b/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsReader.cs
@@ -357,7 +357,7 @@ namespace Lucene.Net.Codecs.Compressing
FieldInfo fieldInfo = fieldInfos.FieldInfo(fieldNumber);
int bits = (int)(infoAndBits & CompressingStoredFieldsWriter.TYPE_MASK);
- if (Debugging.AssertsEnabled) Debugging.Assert(bits <= CompressingStoredFieldsWriter.NUMERIC_DOUBLE,"bits={0}", bits.ToString("x"));
+ if (Debugging.AssertsEnabled) Debugging.Assert(bits <= CompressingStoredFieldsWriter.NUMERIC_DOUBLE,"bits={0:x}", bits);
switch (visitor.NeedsField(fieldInfo))
{
diff --git a/src/Lucene.Net/Codecs/Compressing/CompressingTermVectorsWriter.cs b/src/Lucene.Net/Codecs/Compressing/CompressingTermVectorsWriter.cs
index b43e5da..c30bc45 100644
--- a/src/Lucene.Net/Codecs/Compressing/CompressingTermVectorsWriter.cs
+++ b/src/Lucene.Net/Codecs/Compressing/CompressingTermVectorsWriter.cs
@@ -387,7 +387,7 @@ namespace Lucene.Net.Codecs.Compressing
private void Flush()
{
int chunkDocs = pendingDocs.Count;
- if (Debugging.AssertsEnabled) Debugging.Assert(chunkDocs > 0, chunkDocs.ToString());
+ if (Debugging.AssertsEnabled) Debugging.Assert(chunkDocs > 0, "{0}", chunkDocs);
// write the index file
indexWriter.WriteIndex(chunkDocs, vectorsStream.GetFilePointer());
diff --git a/src/Lucene.Net/Codecs/Compressing/CompressionMode.cs b/src/Lucene.Net/Codecs/Compressing/CompressionMode.cs
index 751ec4e..5bed637 100644
--- a/src/Lucene.Net/Codecs/Compressing/CompressionMode.cs
+++ b/src/Lucene.Net/Codecs/Compressing/CompressionMode.cs
@@ -275,7 +275,7 @@ namespace Lucene.Net.Codecs.Compressing
if (resultArray.Length == 0)
{
- if (Debugging.AssertsEnabled) Debugging.Assert(len == 0, len.ToString());
+ if (Debugging.AssertsEnabled) Debugging.Assert(len == 0, "{0}", len);
output.WriteVInt32(0);
return;
}
diff --git a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xFields.cs b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xFields.cs
index c1b3214..a65e195 100644
--- a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xFields.cs
+++ b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xFields.cs
@@ -6,6 +6,7 @@ using System.Collections.Generic;
using System.Diagnostics;
using JCG = J2N.Collections.Generic;
using Console = Lucene.Net.Util.SystemConsole;
+using Lucene.Net.Util;
namespace Lucene.Net.Codecs.Lucene3x
{
@@ -926,7 +927,9 @@ namespace Lucene.Net.Codecs.Lucene3x
else
{
current = t2.Bytes;
- if (Debugging.AssertsEnabled) Debugging.Assert(!unicodeSortOrder || term.CompareTo(current) < 0,"term={0} vs current={1}", UnicodeUtil.ToHexString(term.Utf8ToString()), UnicodeUtil.ToHexString(current.Utf8ToString()));
+ if (Debugging.AssertsEnabled) Debugging.Assert(!unicodeSortOrder || term.CompareTo(current) < 0,"term={0} vs current={1}",
+ // LUCENENET specific - use wrapper BytesRefFormatter struct to defer building the string unless string.Format() is called
+ new BytesRefFormatter(term, BytesRefFormat.UTF8AsHex), new BytesRefFormatter(current, BytesRefFormat.UTF8AsHex));
return SeekStatus.NOT_FOUND;
}
}
diff --git a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xStoredFieldsReader.cs b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xStoredFieldsReader.cs
index 8a09bbd..4243824 100644
--- a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xStoredFieldsReader.cs
+++ b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xStoredFieldsReader.cs
@@ -193,7 +193,7 @@ namespace Lucene.Net.Codecs.Lucene3x
// Verify the file is long enough to hold all of our
// docs
- if (Debugging.AssertsEnabled) Debugging.Assert(((int)(indexSize / 8)) >= size + this.docStoreOffset,"indexSize={0} size={1}", indexSize, size + " docStoreOffset=" + docStoreOffset);
+ if (Debugging.AssertsEnabled) Debugging.Assert(((int)(indexSize / 8)) >= size + this.docStoreOffset, "indexSize={0} size={1} docStoreOffset={2}", indexSize, size, docStoreOffset);
}
else
{
@@ -273,7 +273,7 @@ namespace Lucene.Net.Codecs.Lucene3x
FieldInfo fieldInfo = fieldInfos.FieldInfo(fieldNumber);
int bits = fieldsStream.ReadByte() & 0xFF;
- if (Debugging.AssertsEnabled) Debugging.Assert(bits <= (FIELD_IS_NUMERIC_MASK | FIELD_IS_BINARY),"bits={0}", bits.ToString("x"));
+ if (Debugging.AssertsEnabled) Debugging.Assert(bits <= (FIELD_IS_NUMERIC_MASK | FIELD_IS_BINARY),"bits={0:x}", bits);
switch (visitor.NeedsField(fieldInfo))
{
diff --git a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xTermVectorsReader.cs b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xTermVectorsReader.cs
index e210112..edb1314 100644
--- a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xTermVectorsReader.cs
+++ b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xTermVectorsReader.cs
@@ -160,7 +160,7 @@ namespace Lucene.Net.Codecs.Lucene3x
this.size = size;
// Verify the file is long enough to hold all of our
// docs
- if (Debugging.AssertsEnabled) Debugging.Assert(numTotalDocs >= size + docStoreOffset,"numTotalDocs={0} size={1}", numTotalDocs, size + " docStoreOffset=" + docStoreOffset);
+ if (Debugging.AssertsEnabled) Debugging.Assert(numTotalDocs >= size + docStoreOffset, "numTotalDocs={0} size={1} docStoreOffset={2}", numTotalDocs, size, docStoreOffset);
}
this.fieldInfos = fieldInfos;
diff --git a/src/Lucene.Net/Codecs/Lucene40/Lucene40LiveDocsFormat.cs b/src/Lucene.Net/Codecs/Lucene40/Lucene40LiveDocsFormat.cs
index 787dcfb..e954a65 100644
--- a/src/Lucene.Net/Codecs/Lucene40/Lucene40LiveDocsFormat.cs
+++ b/src/Lucene.Net/Codecs/Lucene40/Lucene40LiveDocsFormat.cs
@@ -94,7 +94,7 @@ namespace Lucene.Net.Codecs.Lucene40
BitVector liveDocs = new BitVector(dir, filename, context);
if (Debugging.AssertsEnabled)
{
- Debugging.Assert(liveDocs.Count() == info.Info.DocCount - info.DelCount,"liveDocs.count()={0} info.docCount={1}", liveDocs.Count(), info.Info.DocCount + " info.getDelCount()=" + info.DelCount);
+ Debugging.Assert(liveDocs.Count() == info.Info.DocCount - info.DelCount, "liveDocs.Count()={0} info.DocCount={1} info.DelCount={2}", liveDocs.Count(), info.Info.DocCount, info.DelCount);
Debugging.Assert(liveDocs.Length == info.Info.DocCount);
}
return liveDocs;
diff --git a/src/Lucene.Net/Codecs/Lucene40/Lucene40PostingsReader.cs b/src/Lucene.Net/Codecs/Lucene40/Lucene40PostingsReader.cs
index d161dc7..2f32c2f 100644
--- a/src/Lucene.Net/Codecs/Lucene40/Lucene40PostingsReader.cs
+++ b/src/Lucene.Net/Codecs/Lucene40/Lucene40PostingsReader.cs
@@ -933,7 +933,7 @@ namespace Lucene.Net.Codecs.Lucene40
posPendingCount--;
- if (Debugging.AssertsEnabled) Debugging.Assert(posPendingCount >= 0,"nextPosition() was called too many times (more than freq() times) posPendingCount={0}", posPendingCount);
+ if (Debugging.AssertsEnabled) Debugging.Assert(posPendingCount >= 0,"NextPosition() was called too many times (more than Freq( times) posPendingCount={0}", posPendingCount);
return position;
}
@@ -1228,7 +1228,7 @@ namespace Lucene.Net.Codecs.Lucene40
posPendingCount--;
- if (Debugging.AssertsEnabled) Debugging.Assert(posPendingCount >= 0,"nextPosition() was called too many times (more than freq() times) posPendingCount={0}", posPendingCount);
+ if (Debugging.AssertsEnabled) Debugging.Assert(posPendingCount >= 0,"NextPosition() was called too many times (more than Freq times) posPendingCount={0}", posPendingCount);
//System.out.println("StandardR.D&PE nextPos return pos=" + position);
return position;
diff --git a/src/Lucene.Net/Codecs/Lucene40/Lucene40StoredFieldsReader.cs b/src/Lucene.Net/Codecs/Lucene40/Lucene40StoredFieldsReader.cs
index bb9a4e8..f66567f 100644
--- a/src/Lucene.Net/Codecs/Lucene40/Lucene40StoredFieldsReader.cs
+++ b/src/Lucene.Net/Codecs/Lucene40/Lucene40StoredFieldsReader.cs
@@ -177,7 +177,7 @@ namespace Lucene.Net.Codecs.Lucene40
FieldInfo fieldInfo = fieldInfos.FieldInfo(fieldNumber);
int bits = fieldsStream.ReadByte() & 0xFF;
- if (Debugging.AssertsEnabled) Debugging.Assert(bits <= (Lucene40StoredFieldsWriter.FIELD_IS_NUMERIC_MASK | Lucene40StoredFieldsWriter.FIELD_IS_BINARY),"bits={0}", bits.ToString("x"));
+ if (Debugging.AssertsEnabled) Debugging.Assert(bits <= (Lucene40StoredFieldsWriter.FIELD_IS_NUMERIC_MASK | Lucene40StoredFieldsWriter.FIELD_IS_BINARY),"bits={0:x}", bits);
switch (visitor.NeedsField(fieldInfo))
{
diff --git a/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsWriter.cs b/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsWriter.cs
index 1481590..4263f39 100644
--- a/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsWriter.cs
+++ b/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsWriter.cs
@@ -113,7 +113,7 @@ namespace Lucene.Net.Codecs.Lucene40
public override void StartField(FieldInfo info, int numTerms, bool positions, bool offsets, bool payloads)
{
- if (Debugging.AssertsEnabled) Debugging.Assert(lastFieldName == null || info.Name.CompareToOrdinal(lastFieldName) > 0,"fieldName={0} lastFieldName={1}", info.Name, lastFieldName);
+ if (Debugging.AssertsEnabled) Debugging.Assert(lastFieldName == null || info.Name.CompareToOrdinal(lastFieldName) > 0, "fieldName={0} lastFieldName={1}", info.Name, lastFieldName);
lastFieldName = info.Name;
this.positions = positions;
this.offsets = offsets;
diff --git a/src/Lucene.Net/Codecs/MappingMultiDocsEnum.cs b/src/Lucene.Net/Codecs/MappingMultiDocsEnum.cs
index edc45dd..32e1fe4 100644
--- a/src/Lucene.Net/Codecs/MappingMultiDocsEnum.cs
+++ b/src/Lucene.Net/Codecs/MappingMultiDocsEnum.cs
@@ -106,7 +106,7 @@ namespace Lucene.Net.Codecs
current = subs[upto].DocsEnum;
currentBase = mergeState.DocBase[reader];
currentMap = mergeState.DocMaps[reader];
- if (Debugging.AssertsEnabled) Debugging.Assert(currentMap.MaxDoc == subs[upto].Slice.Length,"readerIndex={0} subs.len={1}", reader, subs.Length + " len1=" + currentMap.MaxDoc + " vs " + subs[upto].Slice.Length);
+ if (Debugging.AssertsEnabled) Debugging.Assert(currentMap.MaxDoc == subs[upto].Slice.Length, "readerIndex={0} subs.len={1} len1={2} vs {3}", reader, subs.Length, currentMap.MaxDoc, subs[upto].Slice.Length);
}
}
diff --git a/src/Lucene.Net/Codecs/PerField/PerFieldDocValuesFormat.cs b/src/Lucene.Net/Codecs/PerField/PerFieldDocValuesFormat.cs
index d01bce0..8d1eb9d 100644
--- a/src/Lucene.Net/Codecs/PerField/PerFieldDocValuesFormat.cs
+++ b/src/Lucene.Net/Codecs/PerField/PerFieldDocValuesFormat.cs
@@ -204,7 +204,7 @@ namespace Lucene.Net.Codecs.PerField
}
previousValue = field.PutAttribute(PER_FIELD_SUFFIX_KEY, Convert.ToString(suffix, CultureInfo.InvariantCulture));
- if (Debugging.AssertsEnabled) Debugging.Assert(field.DocValuesGen != -1 || previousValue == null,"suffix={0} prevValue={1}", Convert.ToString(suffix, CultureInfo.InvariantCulture), previousValue);
+ if (Debugging.AssertsEnabled) Debugging.Assert(field.DocValuesGen != -1 || previousValue == null,"suffix={0} prevValue={1}", suffix, previousValue);
// TODO: we should only provide the "slice" of FIS
// that this DVF actually sees ...
diff --git a/src/Lucene.Net/Codecs/TermVectorsWriter.cs b/src/Lucene.Net/Codecs/TermVectorsWriter.cs
index 5431d80..a817481 100644
--- a/src/Lucene.Net/Codecs/TermVectorsWriter.cs
+++ b/src/Lucene.Net/Codecs/TermVectorsWriter.cs
@@ -286,7 +286,7 @@ namespace Lucene.Net.Codecs
fieldCount++;
FieldInfo fieldInfo = mergeState.FieldInfos.FieldInfo(fieldName);
- if (Debugging.AssertsEnabled) Debugging.Assert(lastFieldName == null || fieldName.CompareToOrdinal(lastFieldName) > 0,"lastFieldName={0} fieldName={1}", lastFieldName, fieldName);
+ if (Debugging.AssertsEnabled) Debugging.Assert(lastFieldName == null || fieldName.CompareToOrdinal(lastFieldName) > 0, "lastFieldName={0} fieldName={1}", lastFieldName, fieldName);
lastFieldName = fieldName;
Terms terms = vectors.GetTerms(fieldName);
diff --git a/src/Lucene.Net/Index/BufferedUpdatesStream.cs b/src/Lucene.Net/Index/BufferedUpdatesStream.cs
index 08cd2f3..4aac6d0 100644
--- a/src/Lucene.Net/Index/BufferedUpdatesStream.cs
+++ b/src/Lucene.Net/Index/BufferedUpdatesStream.cs
@@ -688,7 +688,7 @@ namespace Lucene.Net.Index
{
if (term != null)
{
- if (Debugging.AssertsEnabled) Debugging.Assert(lastDeleteTerm == null || term.CompareTo(lastDeleteTerm) > 0,"lastTerm={0} vs term={1}", lastDeleteTerm, term);
+ if (Debugging.AssertsEnabled) Debugging.Assert(lastDeleteTerm == null || term.CompareTo(lastDeleteTerm) > 0, "lastTerm={0} vs term={1}", lastDeleteTerm, term);
}
// TODO: we re-use term now in our merged iterable, but we shouldn't clone, instead copy for this assert
lastDeleteTerm = term == null ? null : new Term(term.Field, BytesRef.DeepCopyOf(term.Bytes));
diff --git a/src/Lucene.Net/Index/DocumentsWriter.cs b/src/Lucene.Net/Index/DocumentsWriter.cs
index 07ea3cd..587a461 100644
--- a/src/Lucene.Net/Index/DocumentsWriter.cs
+++ b/src/Lucene.Net/Index/DocumentsWriter.cs
@@ -588,7 +588,9 @@ namespace Lucene.Net.Index
SegmentFlushTicket ticket = null;
try
{
- if (Debugging.AssertsEnabled) Debugging.Assert(currentFullFlushDelQueue == null || flushingDWPT.deleteQueue == currentFullFlushDelQueue,"expected: {0}but was: {1}", currentFullFlushDelQueue, flushingDWPT.deleteQueue + " " + flushControl.IsFullFlush);
+ if (Debugging.AssertsEnabled) Debugging.Assert(currentFullFlushDelQueue == null
+ || flushingDWPT.deleteQueue == currentFullFlushDelQueue,
+ "expected: {0} but was: {1} {2}", currentFullFlushDelQueue, flushingDWPT.deleteQueue, flushControl.IsFullFlush);
/*
* Since with DWPT the flush process is concurrent and several DWPT
* could flush at the same time we must maintain the order of the
diff --git a/src/Lucene.Net/Index/DocumentsWriterFlushControl.cs b/src/Lucene.Net/Index/DocumentsWriterFlushControl.cs
index 9310228..cdb4688 100644
--- a/src/Lucene.Net/Index/DocumentsWriterFlushControl.cs
+++ b/src/Lucene.Net/Index/DocumentsWriterFlushControl.cs
@@ -152,7 +152,11 @@ namespace Lucene.Net.Index
* fail. To prevent this we only assert if the the largest document seen
* is smaller than the 1/2 of the maxRamBufferMB
*/
- if (Debugging.AssertsEnabled) Debugging.Assert(ram <= expected, "actual mem: {0} byte, expected mem: {1} byte, flush mem: {2}, active mem: {3}, pending DWPT: {4}, flushing DWPT: {5}, blocked DWPT: {6}, peakDelta mem: {7} byte", ram, expected, flushBytes, activeBytes, numPending, NumFlushingDWPT, NumBlockedFlushes, peakDelta);
+ if (Debugging.AssertsEnabled) Debugging.Assert(ram <= expected,
+ "actual mem: {0} byte, expected mem: {1}"
+ + " byte, flush mem: {2}, active mem: {3}"
+ + ", pending DWPT: {4}, flushing DWPT: {5}"
+ + ", blocked DWPT: {6}, peakDelta mem: {7} byte", ram, expected, flushBytes, activeBytes, numPending, NumFlushingDWPT, NumBlockedFlushes, peakDelta);
}
}
return true;
@@ -609,8 +613,8 @@ namespace Lucene.Net.Index
{
if (!success) // make sure we unlock if this fails
{
- perThreadPool.Release(perThread);
- }
+ perThreadPool.Release(perThread);
+ }
}
}
@@ -646,7 +650,10 @@ namespace Lucene.Net.Index
}
continue;
}
- if (Debugging.AssertsEnabled) Debugging.Assert(next.dwpt.deleteQueue == flushingQueue || next.dwpt.deleteQueue == documentsWriter.deleteQueue," flushingQueue: {0} currentqueue: {1}", flushingQueue, documentsWriter.deleteQueue + " perThread queue: " + next.dwpt.deleteQueue + " numDocsInRam: " + next.dwpt.NumDocsInRAM);
+ if (Debugging.AssertsEnabled) Debugging.Assert(next.dwpt.deleteQueue == flushingQueue
+ || next.dwpt.deleteQueue == documentsWriter.deleteQueue,
+ " flushingQueue: {0} currentqueue: {1} perThread queue: {2} numDocsInRam: {3}",
+ flushingQueue, documentsWriter.deleteQueue, next.dwpt.deleteQueue, next.dwpt.NumDocsInRAM);
if (next.dwpt.deleteQueue != flushingQueue)
{
// this one is already a new DWPT
diff --git a/src/Lucene.Net/Index/FreqProxTermsWriterPerField.cs b/src/Lucene.Net/Index/FreqProxTermsWriterPerField.cs
index c93ecd1..5eaea1a 100644
--- a/src/Lucene.Net/Index/FreqProxTermsWriterPerField.cs
+++ b/src/Lucene.Net/Index/FreqProxTermsWriterPerField.cs
@@ -248,7 +248,7 @@ namespace Lucene.Net.Index
}
else if (docState.docID != postings.lastDocIDs[termID])
{
- if (Debugging.AssertsEnabled) Debugging.Assert(docState.docID > postings.lastDocIDs[termID],"id: {0} postings ID: {1}", docState.docID, postings.lastDocIDs[termID] + " termID: " + termID);
+ if (Debugging.AssertsEnabled) Debugging.Assert(docState.docID > postings.lastDocIDs[termID], "id: {0} postings ID: {1} termID: {2}", docState.docID, postings.lastDocIDs[termID], termID);
// Term not yet seen in the current doc but previously
// seen in other doc(s) since the last flush
@@ -644,7 +644,7 @@ namespace Lucene.Net.Index
{
if (writeOffsets)
{
- if (Debugging.AssertsEnabled) Debugging.Assert(startOffset >= 0 && endOffset >= startOffset,"startOffset={0},endOffset={1}", startOffset, endOffset + ",offset=" + offset);
+ if (Debugging.AssertsEnabled) Debugging.Assert(startOffset >= 0 && endOffset >= startOffset, "startOffset={0},endOffset={1},offset={2}", startOffset, endOffset, offset);
postingsConsumer.AddPosition(position, thisPayload, startOffset, endOffset);
}
else
diff --git a/src/Lucene.Net/Index/IndexFileDeleter.cs b/src/Lucene.Net/Index/IndexFileDeleter.cs
index 14a83e8..d2a07c0 100644
--- a/src/Lucene.Net/Index/IndexFileDeleter.cs
+++ b/src/Lucene.Net/Index/IndexFileDeleter.cs
@@ -529,9 +529,12 @@ namespace Lucene.Net.Index
/// </summary>
public void Checkpoint(SegmentInfos segmentInfos, bool isCommit)
{
- if (Debugging.AssertsEnabled) Debugging.Assert(IsLocked);
+ if (Debugging.AssertsEnabled)
+ {
+ Debugging.Assert(IsLocked);
- if (Debugging.AssertsEnabled) Debugging.Assert(Monitor.IsEntered(writer));
+ Debugging.Assert(Monitor.IsEntered(writer));
+ }
long t0 = 0;
if (infoStream.IsEnabled("IFD"))
{
@@ -723,8 +726,6 @@ namespace Lucene.Net.Index
// the file is open in another process, and queue
// the file for subsequent deletion.
- //if (Debugging.AssertsEnabled) Debugging.Assert(e.Message.Contains("cannot delete"));
-
if (infoStream.IsEnabled("IFD"))
{
infoStream.Message("IFD",
diff --git a/src/Lucene.Net/Index/IndexWriter.cs b/src/Lucene.Net/Index/IndexWriter.cs
index 2b47980..0fee329 100644
--- a/src/Lucene.Net/Index/IndexWriter.cs
+++ b/src/Lucene.Net/Index/IndexWriter.cs
@@ -1,4 +1,5 @@
using J2N;
+using J2N.Text;
using J2N.Threading;
using J2N.Threading.Atomic;
using Lucene.Net.Diagnostics;
@@ -707,7 +708,8 @@ namespace Lucene.Net.Index
}
else
{
- if (Debugging.AssertsEnabled) Debugging.Assert(rld.Info == info,"rld.info={0} info={1}", rld.Info, info + " isLive?=" + InfoIsLive(rld.Info) + " vs " + InfoIsLive(info));
+ if (Debugging.AssertsEnabled && !(rld.Info == info))
+ throw new AssertionException(string.Format("rld.info={0} info={1} isLive?={2} vs {3}", rld.Info, info, InfoIsLive(rld.Info),InfoIsLive(info)));
}
if (create)
@@ -1109,7 +1111,7 @@ namespace Lucene.Net.Index
}
foreach (IEvent e in eventQueue)
{
- if (Debugging.AssertsEnabled) Debugging.Assert(e is DocumentsWriter.MergePendingEvent, e.ToString());
+ if (Debugging.AssertsEnabled) Debugging.Assert(e is DocumentsWriter.MergePendingEvent, "{0}", e);
}
return true;
}
@@ -1276,7 +1278,12 @@ namespace Lucene.Net.Index
{
closed = true;
}
- if (Debugging.AssertsEnabled) Debugging.Assert(docWriter.perThreadPool.NumDeactivatedThreadStates() == docWriter.perThreadPool.MaxThreadStates,"{0} {1}", docWriter.perThreadPool.NumDeactivatedThreadStates(), docWriter.perThreadPool.MaxThreadStates);
+ if (Debugging.AssertsEnabled)
+ {
+ // LUCENENET specific - store the number of states so we don't have to call this method twice
+ int numDeactivatedThreadStates = docWriter.perThreadPool.NumDeactivatedThreadStates();
+ Debugging.Assert(numDeactivatedThreadStates == docWriter.perThreadPool.MaxThreadStates, "{0} {1}", numDeactivatedThreadStates, docWriter.perThreadPool.MaxThreadStates);
+ }
}
catch (OutOfMemoryException oom)
{
@@ -2402,7 +2409,7 @@ namespace Lucene.Net.Index
MergePolicy.MergeSpecification spec;
if (maxNumSegments != UNBOUNDED_MAX_MERGE_SEGMENTS)
{
- if (Debugging.AssertsEnabled) Debugging.Assert(trigger == MergeTrigger.EXPLICIT || trigger == MergeTrigger.MERGE_FINISHED,"Expected EXPLICT or MERGE_FINISHED as trigger even with maxNumSegments set but was: {0}", trigger.ToString());
+ if (Debugging.AssertsEnabled) Debugging.Assert(trigger == MergeTrigger.EXPLICIT || trigger == MergeTrigger.MERGE_FINISHED,"Expected EXPLICT or MERGE_FINISHED as trigger even with maxNumSegments set but was: {0}", trigger);
spec = mergePolicy.FindForcedMerges(segmentInfos, maxNumSegments, segmentsToMerge);
newMergesFound = spec != null;
if (newMergesFound)
@@ -2587,7 +2594,12 @@ namespace Lucene.Net.Index
IOUtils.Dispose(writeLock); // release write lock
writeLock = null;
- if (Debugging.AssertsEnabled) Debugging.Assert(docWriter.perThreadPool.NumDeactivatedThreadStates() == docWriter.perThreadPool.MaxThreadStates,"{0} {1}", docWriter.perThreadPool.NumDeactivatedThreadStates(), docWriter.perThreadPool.MaxThreadStates);
+ if (Debugging.AssertsEnabled)
+ {
+ // LUCENENET specific - store the number of states so we don't have to call this method twice
+ int numDeactivatedThreadStates = docWriter.perThreadPool.NumDeactivatedThreadStates();
+ Debugging.Assert(numDeactivatedThreadStates == docWriter.perThreadPool.MaxThreadStates, "{0} {1}", numDeactivatedThreadStates, docWriter.perThreadPool.MaxThreadStates);
+ }
}
success = true;
@@ -3450,7 +3462,7 @@ namespace Lucene.Net.Index
if (Debugging.AssertsEnabled)
{
- Debugging.Assert(!SlowFileExists(directory, newFileName), "file \"{0}\" already exists; siFiles={1}", newFileName, string.Format(J2N.Text.StringFormatter.InvariantCulture, "{0}", siFiles));
+ Debugging.Assert(!SlowFileExists(directory, newFileName), "file \"{0}\" already exists; siFiles={1}", newFileName, siFiles);
Debugging.Assert(!copiedFiles.Contains(file), "file \"{0}\" is being copied more than once", file);
}
copiedFiles.Add(file);
@@ -4070,7 +4082,7 @@ namespace Lucene.Net.Index
}
else
{
- if (Debugging.AssertsEnabled) Debugging.Assert(updatesIter.Doc > curDoc,"field={0} updateDoc={1}", mergingFields[idx], updatesIter.Doc + " curDoc=" + curDoc);
+ if (Debugging.AssertsEnabled) Debugging.Assert(updatesIter.Doc > curDoc, "field={0} updateDoc={1} curDoc={2}", mergingFields[idx], updatesIter.Doc, curDoc);
}
}
}
@@ -5037,7 +5049,7 @@ namespace Lucene.Net.Index
}
merge.readers.Add(reader);
- if (Debugging.AssertsEnabled) Debugging.Assert(delCount <= info.Info.DocCount,"delCount={0} info.docCount={1}", delCount, info.Info.DocCount + " rld.pendingDeleteCount=" + rld.PendingDeleteCount + " info.getDelCount()=" + info.DelCount);
+ if (Debugging.AssertsEnabled) Debugging.Assert(delCount <= info.Info.DocCount, "delCount={0} info.DocCount={1} rld.PendingDeleteCount={2} info.DelCount=", delCount, info.Info.DocCount, rld.PendingDeleteCount, info.DelCount);
segUpto++;
}
@@ -5379,13 +5391,17 @@ namespace Lucene.Net.Index
ICollection<string> files = toSync.GetFiles(directory, false);
foreach (string fileName in files)
{
- if (Debugging.AssertsEnabled) Debugging.Assert(SlowFileExists(directory, fileName), "file {0} does not exist; files={1}, ", fileName, Arrays.ToString(directory.ListAll()));
- // If this trips it means we are missing a call to
- // .checkpoint somewhere, because by the time we
- // are called, deleter should know about every
- // file referenced by the current head
- // segmentInfos:
- if (Debugging.AssertsEnabled) Debugging.Assert(deleter.Exists(fileName),"IndexFileDeleter doesn't know about file {0}", fileName);
+ if (Debugging.AssertsEnabled)
+ {
+ // LUCENENET specific - use Directory.ListAllFormatter to defer directory listing/string building until after the condition fails
+ Debugging.Assert(SlowFileExists(directory, fileName), "file {0} does not exist; files={1}", fileName, new Directory.ListAllFormatter(directory));
+ // If this trips it means we are missing a call to
+ // .checkpoint somewhere, because by the time we
+ // are called, deleter should know about every
+ // file referenced by the current head
+ // segmentInfos:
+ Debugging.Assert(deleter.Exists(fileName), "IndexFileDeleter doesn't know about file {0}", fileName);
+ }
}
return true;
}
diff --git a/src/Lucene.Net/Index/MultiBits.cs b/src/Lucene.Net/Index/MultiBits.cs
index b9cdb81..8b3a255 100644
--- a/src/Lucene.Net/Index/MultiBits.cs
+++ b/src/Lucene.Net/Index/MultiBits.cs
@@ -50,7 +50,7 @@ namespace Lucene.Net.Index
private bool CheckLength(int reader, int doc)
{
int length = starts[1 + reader] - starts[reader];
- if (Debugging.AssertsEnabled) Debugging.Assert(doc - starts[reader] < length,"doc={0} reader={1}", doc, reader + " starts[reader]=" + starts[reader] + " length=" + length);
+ if (Debugging.AssertsEnabled) Debugging.Assert(doc - starts[reader] < length, "doc={0} reader={1} starts[reader]={2} length={3}", doc, reader, starts[reader], length);
return true;
}
diff --git a/src/Lucene.Net/Index/ReadersAndUpdates.cs b/src/Lucene.Net/Index/ReadersAndUpdates.cs
index 9966682..eb3f836 100644
--- a/src/Lucene.Net/Index/ReadersAndUpdates.cs
+++ b/src/Lucene.Net/Index/ReadersAndUpdates.cs
@@ -154,7 +154,7 @@ namespace Lucene.Net.Index
count = Info.Info.DocCount;
}
- if (Debugging.AssertsEnabled) Debugging.Assert(Info.Info.DocCount - Info.DelCount - pendingDeleteCount == count,"info.docCount={0} info.DelCount={1}", Info.Info.DocCount, Info.DelCount + " pendingDeleteCount=" + pendingDeleteCount + " count=" + count);
+ if (Debugging.AssertsEnabled) Debugging.Assert(Info.Info.DocCount - Info.DelCount - pendingDeleteCount == count, "info.docCount={0} info.DelCount={1} pendingDeleteCount={2} count={3}", Info.Info.DocCount, Info.DelCount, pendingDeleteCount, count);
return true;
}
}
@@ -234,7 +234,7 @@ namespace Lucene.Net.Index
{
Debugging.Assert(liveDocs != null);
Debugging.Assert(Monitor.IsEntered(writer));
- Debugging.Assert(docID >= 0 && docID < liveDocs.Length,"out of bounds: docid={0} liveDocsLength={1}", docID, liveDocs.Length + " seg=" + Info.Info.Name + " docCount=" + Info.Info.DocCount);
+ Debugging.Assert(docID >= 0 && docID < liveDocs.Length, "out of bounds: docid={0} liveDocsLength={1} seg={2} docCount={3}", docID, liveDocs.Length, Info.Info.Name, Info.Info.DocCount);
Debugging.Assert(!liveDocsShared);
}
bool didDelete = liveDocs.Get(docID);
diff --git a/src/Lucene.Net/Search/IndexSearcher.cs b/src/Lucene.Net/Search/IndexSearcher.cs
index 6c54bb7..b9d5452 100644
--- a/src/Lucene.Net/Search/IndexSearcher.cs
+++ b/src/Lucene.Net/Search/IndexSearcher.cs
@@ -135,7 +135,7 @@ namespace Lucene.Net.Search
/// <seealso cref="IndexReader.Context"/>
public IndexSearcher(IndexReaderContext context, TaskScheduler executor)
{
- if (Debugging.AssertsEnabled) Debugging.Assert(context.IsTopLevel,"IndexSearcher's ReaderContext must be topLevel for reader{0}", context.Reader);
+ if (Debugging.AssertsEnabled) Debugging.Assert(context.IsTopLevel,"IndexSearcher's ReaderContext must be topLevel for reader {0}", context.Reader);
reader = context.Reader;
this.executor = executor;
this.m_readerContext = context;
diff --git a/src/Lucene.Net/Search/Spans/NearSpansOrdered.cs b/src/Lucene.Net/Search/Spans/NearSpansOrdered.cs
index d0bbc81..5a1b675 100644
--- a/src/Lucene.Net/Search/Spans/NearSpansOrdered.cs
+++ b/src/Lucene.Net/Search/Spans/NearSpansOrdered.cs
@@ -285,7 +285,7 @@ namespace Lucene.Net.Search.Spans
}
for (int i = 0; i < subSpansByDoc.Length; i++)
{
- if (Debugging.AssertsEnabled) Debugging.Assert(subSpansByDoc[i].Doc == maxDoc," NearSpansOrdered.toSameDoc() spans {0}\n at doc {1}", subSpansByDoc[0], subSpansByDoc[i].Doc + ", but should be at " + maxDoc);
+ if (Debugging.AssertsEnabled) Debugging.Assert(subSpansByDoc[i].Doc == maxDoc, " NearSpansOrdered.ToSameDoc() spans {0}\n at doc {1}, but should be at {2}", subSpansByDoc[0], subSpansByDoc[i].Doc, maxDoc);
}
inSameDoc = true;
return true;
diff --git a/src/Lucene.Net/Search/TermQuery.cs b/src/Lucene.Net/Search/TermQuery.cs
index 9076cb8..ec08a20 100644
--- a/src/Lucene.Net/Search/TermQuery.cs
+++ b/src/Lucene.Net/Search/TermQuery.cs
@@ -85,7 +85,7 @@ namespace Lucene.Net.Search
public override Scorer GetScorer(AtomicReaderContext context, IBits acceptDocs)
{
- if (Debugging.AssertsEnabled) Debugging.Assert(termStates.TopReaderContext == ReaderUtil.GetTopLevelContext(context),"The top-reader used to create Weight ({0}) is not the same as the current reader's top-reader ({1}", termStates.TopReaderContext, ReaderUtil.GetTopLevelContext(context));
+ if (Debugging.AssertsEnabled) Debugging.Assert(termStates.TopReaderContext == ReaderUtil.GetTopLevelContext(context),"The top-reader used to create Weight ({0}) is not the same as the current reader's top-reader ({1})", termStates.TopReaderContext, ReaderUtil.GetTopLevelContext(context));
TermsEnum termsEnum = GetTermsEnum(context);
if (termsEnum == null)
{
diff --git a/src/Lucene.Net/Search/TopTermsRewrite.cs b/src/Lucene.Net/Search/TopTermsRewrite.cs
index dd528d9..acdfb51 100644
--- a/src/Lucene.Net/Search/TopTermsRewrite.cs
+++ b/src/Lucene.Net/Search/TopTermsRewrite.cs
@@ -84,7 +84,7 @@ namespace Lucene.Net.Search
foreach (ScoreTerm st in scoreTerms)
{
Term term = new Term(query.m_field, st.Bytes);
- if (Debugging.AssertsEnabled) Debugging.Assert(reader.DocFreq(term) == st.TermState.DocFreq,"reader DF is {0} vs {1}", reader.DocFreq(term), st.TermState.DocFreq + " term=" + term);
+ if (Debugging.AssertsEnabled) Debugging.Assert(reader.DocFreq(term) == st.TermState.DocFreq, "reader DF is {0} vs {1} term={2}", reader.DocFreq(term), st.TermState.DocFreq, term);
AddClause(q, term, st.TermState.DocFreq, query.Boost * st.Boost, st.TermState); // add to query
}
return q;
diff --git a/src/Lucene.Net/Store/BufferedIndexInput.cs b/src/Lucene.Net/Store/BufferedIndexInput.cs
index 397f887..cfcd26a 100644
--- a/src/Lucene.Net/Store/BufferedIndexInput.cs
+++ b/src/Lucene.Net/Store/BufferedIndexInput.cs
@@ -80,7 +80,7 @@ namespace Lucene.Net.Store
/// Change the buffer size used by this <see cref="IndexInput"/> </summary>
public void SetBufferSize(int newSize)
{
- if (Debugging.AssertsEnabled) Debugging.Assert(m_buffer == null || bufferSize == m_buffer.Length,"buffer={0} bufferSize={1}", m_buffer, bufferSize + " buffer.length=" + (m_buffer != null ? m_buffer.Length : 0));
+ if (Debugging.AssertsEnabled) Debugging.Assert(m_buffer == null || bufferSize == m_buffer.Length, "buffer={0} bufferSize={1} buffer.length={2}", m_buffer, bufferSize, (m_buffer != null ? m_buffer.Length : 0));
if (newSize != bufferSize)
{
CheckBufferSize(newSize);
diff --git a/src/Lucene.Net/Util/Automaton/BasicOperations.cs b/src/Lucene.Net/Util/Automaton/BasicOperations.cs
index 5c6cd4a..ddbc4e9 100644
--- a/src/Lucene.Net/Util/Automaton/BasicOperations.cs
+++ b/src/Lucene.Net/Util/Automaton/BasicOperations.cs
@@ -868,7 +868,7 @@ namespace Lucene.Net.Util.Automaton
}
else
{
- if (Debugging.AssertsEnabled) Debugging.Assert((accCount > 0) == q.accept,"accCount={0} vs existing accept={1}", accCount, q.accept + " states=" + statesSet);
+ if (Debugging.AssertsEnabled) Debugging.Assert((accCount > 0) == q.accept, "accCount={0} vs existing accept={1} states={2}", accCount, q.accept, statesSet);
}
r.AddTransition(new Transition(lastPoint, point - 1, q));
diff --git a/src/Lucene.Net/Util/BroadWord.cs b/src/Lucene.Net/Util/BroadWord.cs
index a60308e..430c02e 100644
--- a/src/Lucene.Net/Util/BroadWord.cs
+++ b/src/Lucene.Net/Util/BroadWord.cs
@@ -71,7 +71,7 @@ namespace Lucene.Net.Util
long b = (long)((ulong)(((long)((ulong)SmallerUpTo7_8(s, (r * L8_L)) >> 7)) * L8_L) >> 53); // & (~7L); // Step 3, side ways addition for byte number times 8
long l = r - (((long)((ulong)(s << 8) >> (int)b)) & 0xFFL); // Step 4, byte wise rank, subtract the rank with byte at b-8, or zero for b=0;
- if (Debugging.AssertsEnabled) Debugging.Assert(0L <= 1, l.ToString(CultureInfo.InvariantCulture));
+ if (Debugging.AssertsEnabled) Debugging.Assert(0L <= 1, "{0}", l);
//assert l < 8 : l; //fails when bit r is not available.
// Select bit l from byte (x >>> b):
diff --git a/src/Lucene.Net/Util/BytesRefHash.cs b/src/Lucene.Net/Util/BytesRefHash.cs
index 184c763..d0c6c24 100644
--- a/src/Lucene.Net/Util/BytesRefHash.cs
+++ b/src/Lucene.Net/Util/BytesRefHash.cs
@@ -552,7 +552,7 @@ namespace Lucene.Net.Util
if (Debugging.AssertsEnabled)
{
Debugging.Assert(bytesStart != null, "bytesStart is null - not initialized");
- Debugging.Assert(bytesID >= 0 && bytesID < count, bytesID.ToString());
+ Debugging.Assert(bytesID >= 0 && bytesID < count, "{0}", bytesID);
}
return bytesStart[bytesID];
}
diff --git a/src/Lucene.Net/Util/FixedBitSet.cs b/src/Lucene.Net/Util/FixedBitSet.cs
index f16d48d..002e9ca 100644
--- a/src/Lucene.Net/Util/FixedBitSet.cs
+++ b/src/Lucene.Net/Util/FixedBitSet.cs
@@ -259,7 +259,7 @@ namespace Lucene.Net.Util
public bool Get(int index)
{
- if (Debugging.AssertsEnabled) Debugging.Assert(index >= 0 && index < numBits, $"index={index}, numBits={numBits}");
+ if (Debugging.AssertsEnabled) Debugging.Assert(index >= 0 && index < numBits, "index={0}, numBits={1}", index, numBits);
int i = index >> 6; // div 64
// signed shift will keep a negative index and force an
// array-index-out-of-bounds-exception, removing the need for an explicit check.
@@ -270,7 +270,7 @@ namespace Lucene.Net.Util
public void Set(int index)
{
- if (Debugging.AssertsEnabled) Debugging.Assert(index >= 0 && index < numBits, $"index={index}, numBits={numBits}");
+ if (Debugging.AssertsEnabled) Debugging.Assert(index >= 0 && index < numBits, "index={0}, numBits={1}", index, numBits);
int wordNum = index >> 6; // div 64
int bit = index & 0x3f; // mod 64
long bitmask = 1L << bit;
@@ -279,7 +279,7 @@ namespace Lucene.Net.Util
public bool GetAndSet(int index)
{
- if (Debugging.AssertsEnabled) Debugging.Assert(index >= 0 && index < numBits, $"index={index}, numBits={numBits}");
+ if (Debugging.AssertsEnabled) Debugging.Assert(index >= 0 && index < numBits, "index={0}, numBits={1}", index, numBits);
int wordNum = index >> 6; // div 64
int bit = index & 0x3f; // mod 64
long bitmask = 1L << bit;
@@ -290,7 +290,7 @@ namespace Lucene.Net.Util
public void Clear(int index)
{
- if (Debugging.AssertsEnabled) Debugging.Assert(index >= 0 && index < numBits, $"index={index}, numBits={numBits}");
+ if (Debugging.AssertsEnabled) Debugging.Assert(index >= 0 && index < numBits, "index={0}, numBits={1}", index, numBits);
int wordNum = index >> 6;
int bit = index & 0x03f;
long bitmask = 1L << bit;
@@ -299,7 +299,7 @@ namespace Lucene.Net.Util
public bool GetAndClear(int index)
{
- if (Debugging.AssertsEnabled) Debugging.Assert(index >= 0 && index < numBits, $"index={index}, numBits={numBits}");
+ if (Debugging.AssertsEnabled) Debugging.Assert(index >= 0 && index < numBits, "index={0}, numBits={1}", index, numBits);
int wordNum = index >> 6; // div 64
int bit = index & 0x3f; // mod 64
long bitmask = 1L << bit;
@@ -314,7 +314,7 @@ namespace Lucene.Net.Util
/// </summary>
public int NextSetBit(int index)
{
- if (Debugging.AssertsEnabled) Debugging.Assert(index >= 0 && index < numBits, $"index={index}, numBits={numBits}");
+ if (Debugging.AssertsEnabled) Debugging.Assert(index >= 0 && index < numBits, "index={0}, numBits={1}", index, numBits);
int i = index >> 6;
int subIndex = index & 0x3f; // index within the word
long word = bits[i] >> subIndex; // skip all the bits to the right of index
@@ -342,7 +342,7 @@ namespace Lucene.Net.Util
/// </summary>
public int PrevSetBit(int index)
{
- if (Debugging.AssertsEnabled) Debugging.Assert(index >= 0 && index < numBits, $"index={index} numBits={numBits}");
+ if (Debugging.AssertsEnabled) Debugging.Assert(index >= 0 && index < numBits, "index={0}, numBits={1}", index, numBits);
int i = index >> 6;
int subIndex = index & 0x3f; // index within the word
long word = (bits[i] << (63 - subIndex)); // skip all the bits to the left of index
@@ -405,7 +405,7 @@ namespace Lucene.Net.Util
private void Or(long[] otherArr, int otherNumWords)
{
- if (Debugging.AssertsEnabled) Debugging.Assert(otherNumWords <= numWords, $"numWords={numWords}, otherNumWords={otherNumWords}");
+ if (Debugging.AssertsEnabled) Debugging.Assert(otherNumWords <= numWords, "numWords={0}, otherNumWords={1}", numWords, otherNumWords);
long[] thisArr = this.bits;
int pos = Math.Min(numWords, otherNumWords);
while (--pos >= 0)
@@ -418,7 +418,7 @@ namespace Lucene.Net.Util
/// this = this XOR other </summary>
public void Xor(FixedBitSet other)
{
- if (Debugging.AssertsEnabled) Debugging.Assert(other.numWords <= numWords, $"numWords={numWords}, other.numWords={other.numWords}");
+ if (Debugging.AssertsEnabled) Debugging.Assert(other.numWords <= numWords, "numWords={0}, other.numWords={1}", numWords, other.numWords);
long[] thisBits = this.bits;
long[] otherBits = other.bits;
int pos = Math.Min(numWords, other.numWords);
@@ -663,8 +663,8 @@ namespace Lucene.Net.Util
{
if (Debugging.AssertsEnabled)
{
- Debugging.Assert(startIndex >= 0 && startIndex < numBits, $"startIndex={startIndex}, numBits={numBits}");
- Debugging.Assert(endIndex >= 0 && endIndex <= numBits, $"endIndex={endIndex}, numBits={numBits}");
+ Debugging.Assert(startIndex >= 0 && startIndex < numBits, "startIndex={0}, numBits={1}", startIndex, numBits);
+ Debugging.Assert(endIndex >= 0 && endIndex <= numBits, "endIndex={0}, numBits={1}", endIndex, numBits);
}
if (endIndex <= startIndex)
{
diff --git a/src/Lucene.Net/Util/Fst/Builder.cs b/src/Lucene.Net/Util/Fst/Builder.cs
index d1705ec..3662c35 100644
--- a/src/Lucene.Net/Util/Fst/Builder.cs
+++ b/src/Lucene.Net/Util/Fst/Builder.cs
@@ -372,7 +372,7 @@ namespace Lucene.Net.Util.Fst
if (Debugging.AssertsEnabled)
{
- Debugging.Assert(lastInput.Length == 0 || input.CompareTo(lastInput) >= 0,"inputs are added out of order lastInput={0} vs input={1}", lastInput, input);
+ Debugging.Assert(lastInput.Length == 0 || input.CompareTo(lastInput) >= 0, "inputs are added out of order lastInput={0} vs input={1}", lastInput, input);
Debugging.Assert(ValidOutput(output));
}
@@ -673,7 +673,7 @@ namespace Lucene.Net.Util.Fst
if (Debugging.AssertsEnabled) Debugging.Assert(label >= 0);
if (NumArcs != 0)
{
- if (Debugging.AssertsEnabled) Debugging.Assert(label > Arcs[NumArcs - 1].Label,"arc[-1].Label={0} new label={1}", Arcs[NumArcs - 1].Label, label + " numArcs=" + NumArcs);
+ if (Debugging.AssertsEnabled) Debugging.Assert(label > Arcs[NumArcs - 1].Label, "arc[-1].Label={0} new label={1} numArcs={2}", Arcs[NumArcs - 1].Label, label, NumArcs);
}
if (NumArcs == Arcs.Length)
{
diff --git a/src/Lucene.Net/Util/Fst/ByteSequenceOutputs.cs b/src/Lucene.Net/Util/Fst/ByteSequenceOutputs.cs
index f4935ce..48e7f5a 100644
--- a/src/Lucene.Net/Util/Fst/ByteSequenceOutputs.cs
+++ b/src/Lucene.Net/Util/Fst/ByteSequenceOutputs.cs
@@ -103,7 +103,7 @@ namespace Lucene.Net.Util.Fst
{
if (Debugging.AssertsEnabled)
{
- Debugging.Assert(inc.Length < output.Length,"inc.length={0} vs output.length={1}", inc.Length, output.Length);
+ Debugging.Assert(inc.Length < output.Length, "inc.length={0} vs output.length={1}", inc.Length, output.Length);
Debugging.Assert(inc.Length > 0);
}
return new BytesRef(output.Bytes, output.Offset + inc.Length, output.Length - inc.Length);
diff --git a/src/Lucene.Net/Util/Fst/BytesStore.cs b/src/Lucene.Net/Util/Fst/BytesStore.cs
index ed1a8a1..6d58829 100644
--- a/src/Lucene.Net/Util/Fst/BytesStore.cs
+++ b/src/Lucene.Net/Util/Fst/BytesStore.cs
@@ -131,7 +131,7 @@ namespace Lucene.Net.Util.Fst
internal virtual void WriteBytes(long dest, byte[] b, int offset, int len)
{
//System.out.println(" BS.writeBytes dest=" + dest + " offset=" + offset + " len=" + len);
- if (Debugging.AssertsEnabled) Debugging.Assert(dest + len <= Position,"dest={0} pos={1}", dest, Position + " len=" + len);
+ if (Debugging.AssertsEnabled) Debugging.Assert(dest + len <= Position, "dest={0} pos={1} len={2}", dest, Position, len);
// Note: weird: must go "backwards" because copyBytes
// calls us with overlapping src/dest. If we
@@ -475,7 +475,7 @@ namespace Lucene.Net.Util.Fst
nextBuffer = bufferIndex + 1;
current = outerInstance.blocks[bufferIndex];
nextRead = (int)(value & outerInstance.blockMask);
- if (Debugging.AssertsEnabled) Debugging.Assert(this.Position == value,"pos={0} Position={1}", value, this.Position);
+ if (Debugging.AssertsEnabled) Debugging.Assert(this.Position == value,"value={0} Position={1}", value, this.Position);
}
}
diff --git a/src/Lucene.Net/Util/Fst/FST.cs b/src/Lucene.Net/Util/Fst/FST.cs
index c1dd8b0..380a425 100644
--- a/src/Lucene.Net/Util/Fst/FST.cs
+++ b/src/Lucene.Net/Util/Fst/FST.cs
@@ -807,7 +807,7 @@ namespace Lucene.Net.Util.Fst
if (srcPos != destPos)
{
//System.out.println(" copy len=" + bytesPerArc[arcIdx]);
- if (Debugging.AssertsEnabled) Debugging.Assert(destPos > srcPos,"destPos={0} srcPos={1}", destPos, srcPos + " arcIdx=" + arcIdx + " maxBytesPerArc=" + maxBytesPerArc + " bytesPerArc[arcIdx]=" + bytesPerArc[arcIdx] + " nodeIn.numArcs=" + nodeIn.NumArcs);
+ if (Debugging.AssertsEnabled) Debugging.Assert(destPos > srcPos, "destPos={0} srcPos={1} arcIdx={2} maxBytesPerArc={3} bytesPerArc[arcIdx]={4} nodeIn.numArcs={5}", destPos, srcPos, arcIdx, maxBytesPerArc, bytesPerArc[arcIdx], nodeIn.NumArcs);
bytes.CopyBytes(srcPos, destPos, bytesPerArc[arcIdx]);
}
}
diff --git a/src/Lucene.Net/Util/Fst/FSTEnum.cs b/src/Lucene.Net/Util/Fst/FSTEnum.cs
index 7449a89..dd5e9f7 100644
--- a/src/Lucene.Net/Util/Fst/FSTEnum.cs
+++ b/src/Lucene.Net/Util/Fst/FSTEnum.cs
@@ -202,7 +202,7 @@ namespace Lucene.Net.Util.Fst
if (Debugging.AssertsEnabled)
{
Debugging.Assert(arc.ArcIdx == mid);
- Debugging.Assert(arc.Label == targetLabel,"arc.label={0} vs targetLabel={1}", arc.Label, targetLabel + " mid=" + mid);
+ Debugging.Assert(arc.Label == targetLabel, "arc.label={0} vs targetLabel={1} mid={2}", arc.Label, targetLabel, mid);
}
m_output[m_upto] = m_fst.Outputs.Add(m_output[m_upto - 1], arc.Output);
if (targetLabel == FST.END_LABEL)
@@ -376,7 +376,7 @@ namespace Lucene.Net.Util.Fst
if (Debugging.AssertsEnabled)
{
Debugging.Assert(arc.ArcIdx == mid);
- Debugging.Assert(arc.Label == targetLabel,"arc.label={0} vs targetLabel={1}", arc.Label, targetLabel + " mid=" + mid);
+ Debugging.Assert(arc.Label == targetLabel, "arc.label={0} vs targetLabel={1} mid={2}", arc.Label, targetLabel, mid);
}
m_output[m_upto] = m_fst.Outputs.Add(m_output[m_upto - 1], arc.Output);
if (targetLabel == FST.END_LABEL)
diff --git a/src/Lucene.Net/Util/Fst/NoOutputs.cs b/src/Lucene.Net/Util/Fst/NoOutputs.cs
index 28c0ebe..c74d426 100644
--- a/src/Lucene.Net/Util/Fst/NoOutputs.cs
+++ b/src/Lucene.Net/Util/Fst/NoOutputs.cs
@@ -86,7 +86,7 @@ namespace Lucene.Net.Util.Fst
{
if (Debugging.AssertsEnabled)
{
- Debugging.Assert(prefix == NO_OUTPUT,"got {0}", prefix);
+ Debugging.Assert(prefix == NO_OUTPUT, "got {0}", prefix);
Debugging.Assert(output == NO_OUTPUT);
}
return NO_OUTPUT;
diff --git a/src/Lucene.Net/Util/Fst/NodeHash.cs b/src/Lucene.Net/Util/Fst/NodeHash.cs
index cbff33e..caa7937 100644
--- a/src/Lucene.Net/Util/Fst/NodeHash.cs
+++ b/src/Lucene.Net/Util/Fst/NodeHash.cs
@@ -162,7 +162,12 @@ namespace Lucene.Net.Util.Fst
// freeze & add
long node = fst.AddNode(nodeIn);
//System.out.println(" now freeze node=" + node);
- if (Debugging.AssertsEnabled) Debugging.Assert(Hash(node) == h,"frozenHash={0} vs h={1}", Hash(node), h);
+ if (Debugging.AssertsEnabled)
+ {
+ // LUCENENET specific - store hash value and reuse it, since it might be expensive to create
+ long hash = Hash(node);
+ Debugging.Assert(hash == h, "frozenHash={0} vs h={1}", hash, h);
+ }
count++;
table.Set(pos, node);
// Rehash at 2/3 occupancy:
diff --git a/src/Lucene.Net/Util/LongBitSet.cs b/src/Lucene.Net/Util/LongBitSet.cs
index 34539f3..ae4f116 100644
--- a/src/Lucene.Net/Util/LongBitSet.cs
+++ b/src/Lucene.Net/Util/LongBitSet.cs
@@ -122,7 +122,7 @@ namespace Lucene.Net.Util
public bool Get(long index)
{
- if (Debugging.AssertsEnabled) Debugging.Assert(index >= 0 && index < numBits,"index={0}", index);
+ if (Debugging.AssertsEnabled) Debugging.Assert(index >= 0 && index < numBits, "index={0}", index);
int i = (int)(index >> 6); // div 64
// signed shift will keep a negative index and force an
// array-index-out-of-bounds-exception, removing the need for an explicit check.
@@ -133,7 +133,7 @@ namespace Lucene.Net.Util
public void Set(long index)
{
- if (Debugging.AssertsEnabled) Debugging.Assert(index >= 0 && index < numBits,"index={0} numBits={1}", index, numBits);
+ if (Debugging.AssertsEnabled) Debugging.Assert(index >= 0 && index < numBits, "index={0} numBits={1}", index, numBits);
int wordNum = (int)(index >> 6); // div 64
int bit = (int)(index & 0x3f); // mod 64
long bitmask = 1L << bit;
@@ -205,7 +205,7 @@ namespace Lucene.Net.Util
/// </summary>
public long PrevSetBit(long index)
{
- if (Debugging.AssertsEnabled) Debugging.Assert(index >= 0 && index < numBits,"index={0} numBits={1}", index, numBits);
+ if (Debugging.AssertsEnabled) Debugging.Assert(index >= 0 && index < numBits, "index={0} numBits={1}", index, numBits);
int i = (int)(index >> 6);
int subIndex = (int)(index & 0x3f); // index within the word
long word = (bits[i] << (63 - subIndex)); // skip all the bits to the left of index
@@ -231,7 +231,7 @@ namespace Lucene.Net.Util
/// this = this OR other </summary>
public void Or(Int64BitSet other)
{
- if (Debugging.AssertsEnabled) Debugging.Assert(other.numWords <= numWords,"numWords={0}, other.numWords={1}", numWords, other.numWords);
+ if (Debugging.AssertsEnabled) Debugging.Assert(other.numWords <= numWords, "numWords={0}, other.numWords={1}", numWords, other.numWords);
int pos = Math.Min(numWords, other.numWords);
while (--pos >= 0)
{
@@ -243,7 +243,7 @@ namespace Lucene.Net.Util
/// this = this XOR other </summary>
public void Xor(Int64BitSet other)
{
- if (Debugging.AssertsEnabled) Debugging.Assert(other.numWords <= numWords,"numWords={0}, other.numWords={1}", numWords, other.numWords);
+ if (Debugging.AssertsEnabled) Debugging.Assert(other.numWords <= numWords, "numWords={0}, other.numWords={1}", numWords, other.numWords);
int pos = Math.Min(numWords, other.numWords);
while (--pos >= 0)
{
diff --git a/src/Lucene.Net/Util/OfflineSorter.cs b/src/Lucene.Net/Util/OfflineSorter.cs
index b5ee554..459f12a 100644
--- a/src/Lucene.Net/Util/OfflineSorter.cs
+++ b/src/Lucene.Net/Util/OfflineSorter.cs
@@ -652,7 +652,7 @@ namespace Lucene.Net.Util
}
#pragma warning restore CA1031 // Do not catch general exception types
- if (Debugging.AssertsEnabled) Debugging.Assert(length >= 0,"Sanity: sequence length < 0: {0}", length);
+ if (Debugging.AssertsEnabled) Debugging.Assert(length >= 0, "Sanity: sequence length < 0: {0}", length);
byte[] result = new byte[length];
inputStream.ReadBytes(result, 0, length);
return result;
diff --git a/src/Lucene.Net/Util/PForDeltaDocIdSet.cs b/src/Lucene.Net/Util/PForDeltaDocIdSet.cs
index 93f58cf..f59debe 100644
--- a/src/Lucene.Net/Util/PForDeltaDocIdSet.cs
+++ b/src/Lucene.Net/Util/PForDeltaDocIdSet.cs
@@ -316,7 +316,7 @@ namespace Lucene.Net.Util
++numBlocks;
- if (Debugging.AssertsEnabled) Debugging.Assert(data.Length - originalLength == blockSize, (data.Length - originalLength) + " <> " + blockSize);
+ if (Debugging.AssertsEnabled) Debugging.Assert(data.Length - originalLength == blockSize, "{0} <> {1}", (data.Length - originalLength), blockSize);
}
/// <summary>
diff --git a/src/Lucene.Net/Util/Packed/EliasFanoDecoder.cs b/src/Lucene.Net/Util/Packed/EliasFanoDecoder.cs
index c883728..5092132 100644
--- a/src/Lucene.Net/Util/Packed/EliasFanoDecoder.cs
+++ b/src/Lucene.Net/Util/Packed/EliasFanoDecoder.cs
@@ -126,7 +126,7 @@ namespace Lucene.Net.Util.Packed
/// <returns> The low value for the current decoding index. </returns>
private long CurrentLowValue()
{
- if (Debugging.AssertsEnabled) Debugging.Assert(((efIndex >= 0) && (efIndex < numEncoded)), "efIndex {0}", efIndex.ToString(CultureInfo.InvariantCulture));
+ if (Debugging.AssertsEnabled) Debugging.Assert(((efIndex >= 0) && (efIndex < numEncoded)), "efIndex {0}", efIndex);
return UnPackValue(efEncoder.lowerLongs, efEncoder.numLowBits, efIndex, efEncoder.lowerBitsMask);
}
diff --git a/src/Lucene.Net/Util/Packed/EliasFanoEncoder.cs b/src/Lucene.Net/Util/Packed/EliasFanoEncoder.cs
index d64bcce..3cb2699 100644
--- a/src/Lucene.Net/Util/Packed/EliasFanoEncoder.cs
+++ b/src/Lucene.Net/Util/Packed/EliasFanoEncoder.cs
@@ -220,7 +220,7 @@ namespace Lucene.Net.Util.Packed
/// </summary>
private static long NumInt64sForBits(long numBits) // Note: int version in FixedBitSet.bits2words()
{
- if (Debugging.AssertsEnabled) Debugging.Assert(numBits >= 0, numBits.ToString(CultureInfo.InvariantCulture));
+ if (Debugging.AssertsEnabled) Debugging.Assert(numBits >= 0, "{0}", numBits);
return (long)((ulong)(numBits + (sizeof(long) * 8 - 1)) >> LOG2_INT64_SIZE);
}
diff --git a/src/Lucene.Net/Util/Packed/Packed64.cs b/src/Lucene.Net/Util/Packed/Packed64.cs
index 194dbff..3d62878 100644
--- a/src/Lucene.Net/Util/Packed/Packed64.cs
+++ b/src/Lucene.Net/Util/Packed/Packed64.cs
@@ -74,17 +74,7 @@ namespace Lucene.Net.Util.Packed
PackedInt32s.Format format = PackedInt32s.Format.PACKED;
int longCount = format.Int64Count(PackedInt32s.VERSION_CURRENT, valueCount, bitsPerValue);
this.blocks = new long[longCount];
- // MaskRight = ~0L << (int)((uint)(BLOCK_SIZE - bitsPerValue) >> (BLOCK_SIZE - bitsPerValue)); //original
- // MaskRight = (uint)(~0L << (BLOCK_SIZE - bitsPerValue)) >> (BLOCK_SIZE - bitsPerValue); //mod
-
- /*var a = ~0L << (int)((uint)(BLOCK_SIZE - bitsPerValue) >> (BLOCK_SIZE - bitsPerValue)); //original
- var b = (uint)(~0L << (BLOCK_SIZE - bitsPerValue)) >> (BLOCK_SIZE - bitsPerValue); //mod
- if (Debugging.AssertsEnabled) Debugging.Assert(a == b, "a: " + a, ", b: " + b);*/
-
- maskRight = (long)((ulong)(~0L << (BLOCK_SIZE - bitsPerValue)) >> (BLOCK_SIZE - bitsPerValue)); //mod
-
- //if (Debugging.AssertsEnabled) Debugging.Assert((long)((ulong)(~0L << (BLOCK_SIZE - bitsPerValue)) >> (BLOCK_SIZE - bitsPerValue)) == (uint)(~0L << (BLOCK_SIZE - bitsPerValue)) >> (BLOCK_SIZE - bitsPerValue));
-
+ maskRight = (long)((ulong)(~0L << (BLOCK_SIZE - bitsPerValue)) >> (BLOCK_SIZE - bitsPerValue));
bpvMinusBlockSize = bitsPerValue - BLOCK_SIZE;
}
@@ -130,7 +120,6 @@ namespace Lucene.Net.Util.Packed
long majorBitPos = (long)index * m_bitsPerValue;
// The index in the backing long-array
int elementPos = (int)(((ulong)majorBitPos) >> BLOCK_BITS);
- //int elementPos = (int)((long)((ulong)majorBitPos >> BLOCK_BITS));
// The number of value-bits in the second long
long endBits = (majorBitPos & MOD_MASK) + bpvMinusBlockSize;
@@ -142,36 +131,6 @@ namespace Lucene.Net.Util.Packed
return ((blocks[elementPos] << (int)endBits) | ((long)((ulong)blocks[elementPos + 1] >> (int)(BLOCK_SIZE - endBits)))) & maskRight;
}
- /*/// <param name="index"> the position of the value. </param>
- /// <returns> the value at the given index. </returns>
- public override long Get(int index)
- {
- // The abstract index in a bit stream
- long majorBitPos = (long)index * bitsPerValue;
- // The index in the backing long-array
- int elementPos = (int)((long)((ulong)majorBitPos >> BLOCK_BITS));
- // The number of value-bits in the second long
- long endBits = (majorBitPos & MOD_MASK) + BpvMinusBlockSize;
-
- if (endBits <= 0) // Single block
- {
- var mod = (long) ((ulong) (Blocks[elementPos]) >> (int) (-endBits)) & MaskRight;
- var og = ((long) ((ulong) Blocks[elementPos] >> (int) -endBits)) & MaskRight;
- if (Debugging.AssertsEnabled) Debugging.Assert(mod == og);
-
- //return (long)((ulong)(Blocks[elementPos]) >> (int)(-endBits)) & MaskRight;
- return ((long)((ulong)Blocks[elementPos] >> (int)-endBits)) & MaskRight;
- }
- // Two blocks
- var a = (((Blocks[elementPos] << (int)endBits) | (long)(((ulong)(Blocks[elementPos + 1])) >> (int)(BLOCK_SIZE - endBits))) & MaskRight);
- var b = ((Blocks[elementPos] << (int)endBits) | ((long)((ulong)Blocks[elementPos + 1] >> (int)(BLOCK_SIZE - endBits)))) & MaskRight;
-
- if (Debugging.AssertsEnabled) Debugging.Assert(a == b);
-
- //return (((Blocks[elementPos] << (int)endBits) | (long)(((ulong)(Blocks[elementPos + 1])) >> (int)(BLOCK_SIZE - endBits))) & MaskRight);
- return ((Blocks[elementPos] << (int)endBits) | ((long)((ulong)Blocks[elementPos + 1] >> (int)(BLOCK_SIZE - endBits)))) & MaskRight;
- }*/
-
public override int Get(int index, long[] arr, int off, int len)
{
if (Debugging.AssertsEnabled) Debugging.Assert(len > 0, "len must be > 0 (got {0})", len);
diff --git a/src/Lucene.Net/Util/Packed/PackedDataInput.cs b/src/Lucene.Net/Util/Packed/PackedDataInput.cs
index a335a6d..005fe25 100644
--- a/src/Lucene.Net/Util/Packed/PackedDataInput.cs
+++ b/src/Lucene.Net/Util/Packed/PackedDataInput.cs
@@ -53,7 +53,7 @@ namespace Lucene.Net.Util.Packed
/// </summary>
public long ReadInt64(int bitsPerValue)
{
- if (Debugging.AssertsEnabled) Debugging.Assert(bitsPerValue > 0 && bitsPerValue <= 64, bitsPerValue.ToString(CultureInfo.InvariantCulture));
+ if (Debugging.AssertsEnabled) Debugging.Assert(bitsPerValue > 0 && bitsPerValue <= 64, "{0}", bitsPerValue);
long r = 0;
while (bitsPerValue > 0)
{
diff --git a/src/Lucene.Net/Util/Packed/PackedInts.cs b/src/Lucene.Net/Util/Packed/PackedInts.cs
index d65b9da..7f4dd71 100644
--- a/src/Lucene.Net/Util/Packed/PackedInts.cs
+++ b/src/Lucene.Net/Util/Packed/PackedInts.cs
@@ -205,7 +205,7 @@ namespace Lucene.Net.Util.Packed
/// </summary>
public virtual long ByteCount(int packedIntsVersion, int valueCount, int bitsPerValue)
{
- if (Debugging.AssertsEnabled) Debugging.Assert(bitsPerValue >= 0 && bitsPerValue <= 64, bitsPerValue.ToString(CultureInfo.InvariantCulture));
+ if (Debugging.AssertsEnabled) Debugging.Assert(bitsPerValue >= 0 && bitsPerValue <= 64, "{0}", bitsPerValue);
// assume long-aligned
return 8L * Int64Count(packedIntsVersion, valueCount, bitsPerValue);
}
@@ -218,7 +218,7 @@ namespace Lucene.Net.Util.Packed
/// </summary>
public virtual int Int64Count(int packedIntsVersion, int valueCount, int bitsPerValue)
{
- if (Debugging.AssertsEnabled) Debugging.Assert(bitsPerValue >= 0 && bitsPerValue <= 64, bitsPerValue.ToString(CultureInfo.InvariantCulture));
+ if (Debugging.AssertsEnabled) Debugging.Assert(bitsPerValue >= 0 && bitsPerValue <= 64, "{0}", bitsPerValue);
long byteCount = ByteCount(packedIntsVersion, valueCount, bitsPerValue);
if (Debugging.AssertsEnabled) Debugging.Assert(byteCount < 8L * int.MaxValue);
if ((byteCount % 8) == 0)
@@ -746,7 +746,7 @@ namespace Lucene.Net.Util.Packed
protected ReaderImpl(int valueCount, int bitsPerValue)
{
this.m_bitsPerValue = bitsPerValue;
- if (Debugging.AssertsEnabled) Debugging.Assert(bitsPerValue > 0 && bitsPerValue <= 64,"bitsPerValue={0}", bitsPerValue);
+ if (Debugging.AssertsEnabled) Debugging.Assert(bitsPerValue > 0 && bitsPerValue <= 64, "bitsPerValue={0}", bitsPerValue);
this.m_valueCount = valueCount;
}
@@ -765,7 +765,7 @@ namespace Lucene.Net.Util.Packed
protected MutableImpl(int valueCount, int bitsPerValue)
{
this.m_valueCount = valueCount;
- if (Debugging.AssertsEnabled) Debugging.Assert(bitsPerValue > 0 && bitsPerValue <= 64,"bitsPerValue={0}", bitsPerValue);
+ if (Debugging.AssertsEnabled) Debugging.Assert(bitsPerValue > 0 && bitsPerValue <= 64, "bitsPerValue={0}", bitsPerValue);
this.m_bitsPerValue = bitsPerValue;
}
@@ -986,7 +986,7 @@ namespace Lucene.Net.Util.Packed
{
int version = CodecUtil.CheckHeader(@in, CODEC_NAME, VERSION_START, VERSION_CURRENT);
int bitsPerValue = @in.ReadVInt32();
- if (Debugging.AssertsEnabled) Debugging.Assert(bitsPerValue > 0 && bitsPerValue <= 64,"bitsPerValue={0}", bitsPerValue);
+ if (Debugging.AssertsEnabled) Debugging.Assert(bitsPerValue > 0 && bitsPerValue <= 64, "bitsPerValue={0}", bitsPerValue);
int valueCount = @in.ReadVInt32();
Format format = Format.ById(@in.ReadVInt32());
@@ -1028,7 +1028,7 @@ namespace Lucene.Net.Util.Packed
{
int version = CodecUtil.CheckHeader(@in, CODEC_NAME, VERSION_START, VERSION_CURRENT);
int bitsPerValue = @in.ReadVInt32();
- if (Debugging.AssertsEnabled) Debugging.Assert(bitsPerValue > 0 && bitsPerValue <= 64,"bitsPerValue={0}", bitsPerValue);
+ if (Debugging.AssertsEnabled) Debugging.Assert(bitsPerValue > 0 && bitsPerValue <= 64, "bitsPerValue={0}", bitsPerValue);
int valueCount = @in.ReadVInt32();
Format format = Format.ById(@in.ReadVInt32());
return GetReaderIteratorNoHeader(@in, format, version, valueCount, bitsPerValue, mem);
@@ -1151,7 +1151,7 @@ namespace Lucene.Net.Util.Packed
{
int version = CodecUtil.CheckHeader(@in, CODEC_NAME, VERSION_START, VERSION_CURRENT);
int bitsPerValue = @in.ReadVInt32();
- if (Debugging.AssertsEnabled) Debugging.Assert(bitsPerValue > 0 && bitsPerValue <= 64,"bitsPerValue={0}", bitsPerValue);
+ if (Debugging.AssertsEnabled) Debugging.Assert(bitsPerValue > 0 && bitsPerValue <= 64, "bitsPerValue={0}", bitsPerValue);
int valueCount = @in.ReadVInt32();
Format format = Format.ById(@in.ReadVInt32());
return GetDirectReaderNoHeader(@in, format, version, valueCount, bitsPerValue);
@@ -1428,7 +1428,7 @@ namespace Lucene.Net.Util.Packed
{
int version = CodecUtil.CheckHeader(@in, CODEC_NAME, VERSION_START, VERSION_CURRENT);
int bitsPerValue = @in.ReadVInt32();
- if (Debugging.AssertsEnabled) Debugging.Assert(bitsPerValue > 0 && bitsPerValue <= 64,"bitsPerValue={0}", bitsPerValue);
+ if (Debugging.AssertsEnabled) Debugging.Assert(bitsPerValue > 0 && bitsPerValue <= 64, "bitsPerValue={0}", bitsPerValue);
int valueCount = @in.ReadVInt32();
Format format = Format.ById(@in.ReadVInt32());
return new Header(format, valueCount, bitsPerValue, version);
diff --git a/src/Lucene.Net/Util/Packed/PackedWriter.cs b/src/Lucene.Net/Util/Packed/PackedWriter.cs
index c568133..ee71f34 100644
--- a/src/Lucene.Net/Util/Packed/PackedWriter.cs
+++ b/src/Lucene.Net/Util/Packed/PackedWriter.cs
@@ -58,7 +58,7 @@ namespace Lucene.Net.Util.Packed
{
if (Debugging.AssertsEnabled)
{
- Debugging.Assert(m_bitsPerValue == 64 || (v >= 0 && v <= PackedInt32s.MaxValue(m_bitsPerValue)), m_bitsPerValue.ToString(CultureInfo.InvariantCulture));
+ Debugging.Assert(m_bitsPerValue == 64 || (v >= 0 && v <= PackedInt32s.MaxValue(m_bitsPerValue)), "{0}", m_bitsPerValue);
Debugging.Assert(!finished);
}
if (m_valueCount != -1 && written >= m_valueCount)
diff --git a/src/Lucene.Net/Util/PagedBytes.cs b/src/Lucene.Net/Util/PagedBytes.cs
index 178ca24..ba267c6 100644
--- a/src/Lucene.Net/Util/PagedBytes.cs
+++ b/src/Lucene.Net/Util/PagedBytes.cs
@@ -166,7 +166,7 @@ namespace Lucene.Net.Util
/// </summary>
public PagedBytes(int blockBits)
{
- if (Debugging.AssertsEnabled) Debugging.Assert(blockBits > 0 && blockBits <= 31, blockBits.ToString(CultureInfo.InvariantCulture));
+ if (Debugging.AssertsEnabled) Debugging.Assert(blockBits > 0 && blockBits <= 31, "{0}", blockBits);
this.blockSize = 1 << blockBits;
this.blockBits = blockBits;
blockMask = blockSize - 1;
diff --git a/src/Lucene.Net/Util/RamUsageEstimator.cs b/src/Lucene.Net/Util/RamUsageEstimator.cs
index f40b3da..4928e99 100644
--- a/src/Lucene.Net/Util/RamUsageEstimator.cs
+++ b/src/Lucene.Net/Util/RamUsageEstimator.cs
@@ -909,7 +909,7 @@ namespace Lucene.Net.Util
if (Debugging.AssertsEnabled)
{
Debugging.Assert(current > 0 && ((current & (current - 1)) == 0), "Capacity must be a power of two.");
- Debugging.Assert((current << 1) > 0,"Maximum capacity exceeded ({0}", ((int)((uint)0x80000000 >> 1)) + ").");
+ Debugging.Assert((current << 1) > 0, "Maximum capacity exceeded ({0}).", ((int)((uint)0x80000000 >> 1)));
}
if (current < MIN_CAPACITY / 2)
diff --git a/src/Lucene.Net/Util/RecyclingByteBlockAllocator.cs b/src/Lucene.Net/Util/RecyclingByteBlockAllocator.cs
index 1b25bd1..21b8554 100644
--- a/src/Lucene.Net/Util/RecyclingByteBlockAllocator.cs
+++ b/src/Lucene.Net/Util/RecyclingByteBlockAllocator.cs
@@ -129,7 +129,7 @@ namespace Lucene.Net.Util
/// <returns> The number of actually removed buffers. </returns>
public int FreeBlocks(int num)
{
- if (Debugging.AssertsEnabled) Debugging.Assert(num >= 0,"free blocks must be >= 0 but was: {0}", num);
+ if (Debugging.AssertsEnabled) Debugging.Assert(num >= 0, "free blocks must be >= 0 but was: {0}", num);
int stop;
int count;
if (num > freeBlocks)
diff --git a/src/Lucene.Net/Util/RecyclingIntBlockAllocator.cs b/src/Lucene.Net/Util/RecyclingIntBlockAllocator.cs
index aa4a1ff..6487d5e 100644
--- a/src/Lucene.Net/Util/RecyclingIntBlockAllocator.cs
+++ b/src/Lucene.Net/Util/RecyclingIntBlockAllocator.cs
@@ -140,7 +140,7 @@ namespace Lucene.Net.Util
/// <returns> The number of actually removed buffers. </returns>
public int FreeBlocks(int num)
{
- if (Debugging.AssertsEnabled) Debugging.Assert(num >= 0,"free blocks must be >= 0 but was: {0}", num);
+ if (Debugging.AssertsEnabled) Debugging.Assert(num >= 0, "free blocks must be >= 0 but was: {0}", num);
int stop;
int count;
if (num > freeBlocks)
diff --git a/src/Lucene.Net/Util/RollingBuffer.cs b/src/Lucene.Net/Util/RollingBuffer.cs
index bd3ac0e..8ee93d1 100644
--- a/src/Lucene.Net/Util/RollingBuffer.cs
+++ b/src/Lucene.Net/Util/RollingBuffer.cs
@@ -157,7 +157,7 @@ namespace Lucene.Net.Util
if (Debugging.AssertsEnabled)
{
Debugging.Assert(toFree >= 0);
- Debugging.Assert(toFree <= count,"toFree={0} count={1}", toFree, count);
+ Debugging.Assert(toFree <= count, "toFree={0} count={1}", toFree, count);
}
int index = nextWrite - count;
if (index < 0)
diff --git a/src/Lucene.Net/Util/UnicodeUtil.cs b/src/Lucene.Net/Util/UnicodeUtil.cs
index b2dfcbd..663fcc0 100644
--- a/src/Lucene.Net/Util/UnicodeUtil.cs
+++ b/src/Lucene.Net/Util/UnicodeUtil.cs
@@ -844,7 +844,7 @@ namespace Lucene.Net.Util
}
else
{
- if (Debugging.AssertsEnabled) Debugging.Assert(b < 0xf8,"b = 0x{0}", b.ToString("x"));
+ if (Debugging.AssertsEnabled) Debugging.Assert(b < 0xf8, "b = 0x{0:x}", b);
int ch = ((b & 0x7) << 18) + ((utf8[offset] & 0x3f) << 12) + ((utf8[offset + 1] & 0x3f) << 6) + (utf8[offset + 2] & 0x3f);
offset += 3;
if (ch < UNI_MAX_BMP)