You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucenenet.apache.org by ni...@apache.org on 2020/08/24 18:29:53 UTC
[lucenenet] 06/13: SWEEP: Reviewed and added missing asserts and
moved some assert conditions to be not run when asserts are disabled
This is an automated email from the ASF dual-hosted git repository.
nightowl888 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/lucenenet.git
commit 447bc866fe030951e090d648d475333d2b7226a6
Author: Shad Storhaug <sh...@shadstorhaug.com>
AuthorDate: Fri Aug 14 10:01:41 2020 +0700
SWEEP: Reviewed and added missing asserts and moved some assert conditions to be not run when asserts are disabled
---
.../Analysis/CharFilter/BaseCharFilter.cs | 3 +-
.../Analysis/Pt/RSLPStemmerBase.cs | 3 +-
.../SimpleText/SimpleTextDocValuesReader.cs | 6 +--
.../SimpleText/SimpleTextFieldsReader.cs | 3 +-
.../SimpleText/SimpleTextTermVectorsReader.cs | 14 +-----
.../Index/TestFlushByRamOrCountsPolicy.cs | 4 +-
src/Lucene.Net/Analysis/Tokenizer.cs | 4 +-
src/Lucene.Net/Codecs/BlockTreeTermsReader.cs | 7 ++-
src/Lucene.Net/Codecs/BlockTreeTermsWriter.cs | 3 +-
.../Codecs/Compressing/CompressionMode.cs | 2 +
.../Codecs/Lucene3x/Lucene3xTermVectorsReader.cs | 11 +----
src/Lucene.Net/Codecs/Lucene3x/TermInfosReader.cs | 2 +-
src/Lucene.Net/Codecs/Lucene40/BitVector.cs | 6 +--
.../Codecs/Lucene40/Lucene40TermVectorsReader.cs | 11 +----
src/Lucene.Net/Index/CheckIndex.cs | 38 ++++++++-------
src/Lucene.Net/Index/ConcurrentMergeScheduler.cs | 3 +-
src/Lucene.Net/Index/DirectoryReader.cs | 1 -
src/Lucene.Net/Index/DocFieldProcessor.cs | 1 -
src/Lucene.Net/Index/DocTermOrds.cs | 3 +-
src/Lucene.Net/Index/DocValuesFieldUpdates.cs | 4 +-
src/Lucene.Net/Index/DocumentsWriter.cs | 22 ++++-----
src/Lucene.Net/Index/DocumentsWriterDeleteQueue.cs | 1 -
.../Index/DocumentsWriterFlushControl.cs | 9 ++--
src/Lucene.Net/Index/DocumentsWriterFlushQueue.cs | 12 ++---
.../Index/DocumentsWriterPerThreadPool.cs | 56 +++++++++++++++-------
.../Index/DocumentsWriterStallControl.cs | 15 +++---
.../Index/FreqProxTermsWriterPerField.cs | 3 +-
src/Lucene.Net/Index/IndexFileDeleter.cs | 6 +--
src/Lucene.Net/Index/IndexWriter.cs | 37 ++++----------
src/Lucene.Net/Index/PrefixCodedTerms.cs | 3 ++
src/Lucene.Net/Index/ReadersAndUpdates.cs | 17 ++++---
src/Lucene.Net/Index/SortedDocValuesWriter.cs | 1 +
src/Lucene.Net/Index/SortedSetDocValuesWriter.cs | 2 +-
src/Lucene.Net/Index/StoredFieldsProcessor.cs | 11 ++---
src/Lucene.Net/Index/TermVectorsConsumer.cs | 3 +-
.../Index/TermVectorsConsumerPerField.cs | 6 +--
.../ThreadAffinityDocumentsWriterThreadPool.cs | 6 +--
src/Lucene.Net/Search/FieldCacheRangeFilter.cs | 3 +-
src/Lucene.Net/Search/FieldComparator.cs | 4 +-
src/Lucene.Net/Search/ReferenceManager.cs | 4 +-
src/Lucene.Net/Store/ByteBufferIndexInput.cs | 2 +
src/Lucene.Net/Store/NIOFSDirectory.cs | 13 ++---
src/Lucene.Net/Store/SimpleFSDirectory.cs | 4 +-
src/Lucene.Net/Util/BroadWord.cs | 3 +-
src/Lucene.Net/Util/Fst/FST.cs | 5 +-
src/Lucene.Net/Util/Fst/NodeHash.cs | 3 +-
src/Lucene.Net/Util/Packed/EliasFanoEncoder.cs | 3 +-
src/Lucene.Net/Util/Packed/PackedDataInput.cs | 3 +-
src/Lucene.Net/Util/Packed/PackedInts.cs | 11 ++---
src/Lucene.Net/Util/Packed/PackedWriter.cs | 3 +-
src/Lucene.Net/Util/PagedBytes.cs | 3 +-
src/Lucene.Net/Util/RamUsageEstimator.cs | 2 +-
src/Lucene.Net/Util/RollingBuffer.cs | 2 +
53 files changed, 190 insertions(+), 217 deletions(-)
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/BaseCharFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/BaseCharFilter.cs
index 420822b..bd3fa5b 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/BaseCharFilter.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/BaseCharFilter.cs
@@ -114,7 +114,8 @@ namespace Lucene.Net.Analysis.CharFilters
}
int offset = offsets[(size == 0) ? 0 : size - 1];
- Debugging.Assert(() => size == 0 || off >= offset, () => "Offset #" + size + "(" + off + ") is less than the last recorded offset " + offset + "\n" + Arrays.ToString(offsets) + "\n" + Arrays.ToString(diffs));
+ Debugging.Assert(() => size == 0 || off >= offset,
+ () => "Offset #" + size + "(" + off + ") is less than the last recorded offset " + offset + "\n" + Arrays.ToString(offsets) + "\n" + Arrays.ToString(diffs));
if (size == 0 || off != offsets[size - 1])
{
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Pt/RSLPStemmerBase.cs b/src/Lucene.Net.Analysis.Common/Analysis/Pt/RSLPStemmerBase.cs
index 942e6d5..f1b4d6a 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Pt/RSLPStemmerBase.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Pt/RSLPStemmerBase.cs
@@ -1,6 +1,7 @@
using J2N.Collections.Generic.Extensions;
using J2N.Text;
using Lucene.Net.Analysis.Util;
+using Lucene.Net.Diagnostics;
using Lucene.Net.Util;
using System;
using System.Collections.Generic;
@@ -304,7 +305,7 @@ namespace Lucene.Net.Analysis.Pt
{
throw new Exception("Illegal Step header specified at line " /*+ r.LineNumber*/); // TODO Line number
}
- //Debugging.Assert(headerPattern.GetGroupNumbers().Length == 4);
+ //Debugging.Assert(() => headerPattern.GetGroupNumbers().Length == 4); // Not possible to read the number of groups that matched in .NET
string name = matcher.Groups[1].Value;
int min = int.Parse(matcher.Groups[2].Value, CultureInfo.InvariantCulture);
int type = int.Parse(matcher.Groups[3].Value, CultureInfo.InvariantCulture);
diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesReader.cs b/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesReader.cs
index 1b53e2d..151afe8 100644
--- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesReader.cs
+++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesReader.cs
@@ -85,10 +85,8 @@ namespace Lucene.Net.Codecs.SimpleText
ReadLine();
Debugging.Assert(() => StartsWith(SimpleTextDocValuesWriter.TYPE), () => scratch.Utf8ToString());
- var dvType =
- (DocValuesType)
- Enum.Parse(typeof (DocValuesType), StripPrefix(SimpleTextDocValuesWriter.TYPE));
-
+ var dvType = (DocValuesType)Enum.Parse(typeof(DocValuesType), StripPrefix(SimpleTextDocValuesWriter.TYPE));
+ // Debugging.Assert(() => dvType != null); // LUCENENET: Not possible for an enum to be null in .NET
if (dvType == DocValuesType.NUMERIC)
{
ReadLine();
diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldsReader.cs b/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldsReader.cs
index 04f6522..a4432a8 100644
--- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldsReader.cs
+++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldsReader.cs
@@ -159,6 +159,7 @@ namespace Lucene.Net.Codecs.SimpleText
public override BytesRef Next()
{
+ //Debugging.Assert(() => !ended); // LUCENENET: Ended field is never set, so this can never fail
var result = _fstEnum.Next();
if (result == null) return null;
@@ -317,7 +318,7 @@ namespace Lucene.Net.Codecs.SimpleText
Debugging.Assert(
() => StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.TERM) || StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.FIELD) ||
// LUCENENET TODO: This assert fails sometimes, which in turns causes _scratch.Utf8ToString() to throw an index out of range exception
- StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.END) /*, "scratch=" + _scratch.Utf8ToString()*/);
+ StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.END), () => "scratch=" + _scratch.Utf8ToString());
if (!first && (_liveDocs == null || _liveDocs.Get(_docId)))
{
diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextTermVectorsReader.cs b/src/Lucene.Net.Codecs/SimpleText/SimpleTextTermVectorsReader.cs
index 778219e..95abd7f 100644
--- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextTermVectorsReader.cs
+++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextTermVectorsReader.cs
@@ -540,18 +540,8 @@ namespace Lucene.Net.Codecs.SimpleText
public override int NextPosition()
{
- //Debugging.Assert((_positions != null && _nextPos < _positions.Length) ||
- // _startOffsets != null && _nextPos < _startOffsets.Length);
-
- // LUCENENET: The above assertion was for control flow when testing. In Java, it would throw an AssertionError, which is
- // caught by the BaseTermVectorsFormatTestCase.assertEquals(RandomTokenStream tk, FieldType ft, Terms terms) method in the
- // part that is checking for an error after reading to the end of the enumerator.
-
- // Since there is no way to turn on assertions in a release build in .NET, we are throwing an InvalidOperationException
- // in this case, which matches the behavior of Lucene 8. See #267.
-
- if (((_positions != null && _nextPos < _positions.Length) || _startOffsets != null && _nextPos < _startOffsets.Length) == false)
- throw new InvalidOperationException("Read past last position");
+ Debugging.Assert(() => (_positions != null && _nextPos < _positions.Length) ||
+ _startOffsets != null && _nextPos < _startOffsets.Length);
if (_positions != null)
{
diff --git a/src/Lucene.Net.Tests/Index/TestFlushByRamOrCountsPolicy.cs b/src/Lucene.Net.Tests/Index/TestFlushByRamOrCountsPolicy.cs
index c870dda..4bdfe89 100644
--- a/src/Lucene.Net.Tests/Index/TestFlushByRamOrCountsPolicy.cs
+++ b/src/Lucene.Net.Tests/Index/TestFlushByRamOrCountsPolicy.cs
@@ -305,9 +305,9 @@ namespace Lucene.Net.Index
while (allActiveThreads.MoveNext())
{
ThreadState next = allActiveThreads.Current;
- if (next.DocumentsWriterPerThread != null)
+ if (next.dwpt != null)
{
- bytesUsed += next.DocumentsWriterPerThread.BytesUsed;
+ bytesUsed += next.dwpt.BytesUsed;
}
}
Assert.AreEqual(bytesUsed, flushControl.ActiveBytes);
diff --git a/src/Lucene.Net/Analysis/Tokenizer.cs b/src/Lucene.Net/Analysis/Tokenizer.cs
index 25a136a..4edf011 100644
--- a/src/Lucene.Net/Analysis/Tokenizer.cs
+++ b/src/Lucene.Net/Analysis/Tokenizer.cs
@@ -1,5 +1,5 @@
+using Lucene.Net.Diagnostics;
using System;
-using System.Diagnostics;
using System.IO;
namespace Lucene.Net.Analysis
@@ -115,7 +115,7 @@ namespace Lucene.Net.Analysis
throw new InvalidOperationException("TokenStream contract violation: Close() call missing");
}
this.inputPending = input;
- if (Lucene.Net.Diagnostics.Debugging.AssertsEnabled) SetReaderTestPoint();
+ Debugging.Assert(SetReaderTestPoint);
}
public override void Reset()
diff --git a/src/Lucene.Net/Codecs/BlockTreeTermsReader.cs b/src/Lucene.Net/Codecs/BlockTreeTermsReader.cs
index 2be0607..30064e4 100644
--- a/src/Lucene.Net/Codecs/BlockTreeTermsReader.cs
+++ b/src/Lucene.Net/Codecs/BlockTreeTermsReader.cs
@@ -4,7 +4,6 @@ using Lucene.Net.Support;
using Lucene.Net.Util.Fst;
using System;
using System.Collections.Generic;
-using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
using System.Text;
using JCG = J2N.Collections.Generic;
@@ -29,7 +28,6 @@ namespace Lucene.Net.Codecs
*/
using ArrayUtil = Lucene.Net.Util.ArrayUtil;
- using IBits = Lucene.Net.Util.IBits;
using ByteArrayDataInput = Lucene.Net.Store.ByteArrayDataInput;
using ByteSequenceOutputs = Lucene.Net.Util.Fst.ByteSequenceOutputs;
using BytesRef = Lucene.Net.Util.BytesRef;
@@ -40,6 +38,7 @@ namespace Lucene.Net.Codecs
using DocsEnum = Lucene.Net.Index.DocsEnum;
using FieldInfo = Lucene.Net.Index.FieldInfo;
using FieldInfos = Lucene.Net.Index.FieldInfos;
+ using IBits = Lucene.Net.Util.IBits;
using IndexFileNames = Lucene.Net.Index.IndexFileNames;
using IndexInput = Lucene.Net.Store.IndexInput;
using IndexOptions = Lucene.Net.Index.IndexOptions;
@@ -2467,7 +2466,7 @@ namespace Lucene.Net.Codecs
if (currentFrame.ord == 0)
{
//if (DEBUG) System.out.println(" return null");
- Debugging.Assert(() => SetEOF());
+ Debugging.Assert(SetEOF);
term.Length = 0;
validIndexPrefix = 0;
currentFrame.Rewind();
@@ -2579,7 +2578,7 @@ namespace Lucene.Net.Codecs
// if (DEBUG) {
// System.out.println("BTTR.seekExact termState seg=" + segment + " target=" + target.utf8ToString() + " " + target + " state=" + otherState);
// }
- Debugging.Assert(() => ClearEOF());
+ Debugging.Assert(ClearEOF);
if (target.CompareTo(term) != 0 || !termExists)
{
Debugging.Assert(() => otherState != null && otherState is BlockTermState);
diff --git a/src/Lucene.Net/Codecs/BlockTreeTermsWriter.cs b/src/Lucene.Net/Codecs/BlockTreeTermsWriter.cs
index 424bd4e..2e8fc86 100644
--- a/src/Lucene.Net/Codecs/BlockTreeTermsWriter.cs
+++ b/src/Lucene.Net/Codecs/BlockTreeTermsWriter.cs
@@ -479,8 +479,7 @@ namespace Lucene.Net.Codecs
public void CompileIndex(IList<PendingBlock> floorBlocks, RAMOutputStream scratchBytes)
{
// LUCENENET specific - we use a custom wrapper function to display floorBlocks, since
- // it might contain garbage that cannot be converted into text. This is compiled out
- // of the relese, though.
+ // it might contain garbage that cannot be converted into text.
Debugging.Assert(
() => (IsFloor && floorBlocks != null && floorBlocks.Count != 0) || (!IsFloor && floorBlocks == null),
() => "isFloor=" + IsFloor + " floorBlocks=" + ToString(floorBlocks));
diff --git a/src/Lucene.Net/Codecs/Compressing/CompressionMode.cs b/src/Lucene.Net/Codecs/Compressing/CompressionMode.cs
index 5ca2019..fe3e226 100644
--- a/src/Lucene.Net/Codecs/Compressing/CompressionMode.cs
+++ b/src/Lucene.Net/Codecs/Compressing/CompressionMode.cs
@@ -261,6 +261,8 @@ namespace Lucene.Net.Codecs.Compressing
public override void Compress(byte[] bytes, int off, int len, DataOutput output)
{
+ // LUCENENET specific - since DeflateStream works a bit differently than Java's Deflate class,
+ // we are unable to assert the total count
byte[] resultArray = null;
using (MemoryStream compressionMemoryStream = new MemoryStream())
{
diff --git a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xTermVectorsReader.cs b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xTermVectorsReader.cs
index 7f2f5a6..4822cd5 100644
--- a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xTermVectorsReader.cs
+++ b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xTermVectorsReader.cs
@@ -737,16 +737,7 @@ namespace Lucene.Net.Codecs.Lucene3x
public override int NextPosition()
{
- //Debugging.Assert((positions != null && nextPos < positions.Length) || startOffsets != null && nextPos < startOffsets.Length);
-
- // LUCENENET: The above assertion was for control flow when testing. In Java, it would throw an AssertionError, which is
- // caught by the BaseTermVectorsFormatTestCase.assertEquals(RandomTokenStream tk, FieldType ft, Terms terms) method in the
- // part that is checking for an error after reading to the end of the enumerator.
-
- // Since there is no way to turn on assertions in a release build in .NET, we are throwing an InvalidOperationException
- // in this case, which matches the behavior of Lucene 8. See #267.
- if (((positions != null && nextPos < positions.Length) || startOffsets != null && nextPos < startOffsets.Length) == false)
- throw new InvalidOperationException("Read past last position");
+ Debugging.Assert(() => (positions != null && nextPos < positions.Length) || startOffsets != null && nextPos < startOffsets.Length);
if (positions != null)
{
diff --git a/src/Lucene.Net/Codecs/Lucene3x/TermInfosReader.cs b/src/Lucene.Net/Codecs/Lucene3x/TermInfosReader.cs
index cd77fbc..82adf13 100644
--- a/src/Lucene.Net/Codecs/Lucene3x/TermInfosReader.cs
+++ b/src/Lucene.Net/Codecs/Lucene3x/TermInfosReader.cs
@@ -294,7 +294,7 @@ namespace Lucene.Net.Codecs.Lucene3x
else
{
Debugging.Assert(() => SameTermInfo(ti, tiOrd, enumerator));
- Debugging.Assert(() => enumerator.position == tiOrd.termOrd);
+ Debugging.Assert(() => (int)enumerator.position == tiOrd.termOrd);
}
}
}
diff --git a/src/Lucene.Net/Codecs/Lucene40/BitVector.cs b/src/Lucene.Net/Codecs/Lucene40/BitVector.cs
index e8265ce..f9409ae 100644
--- a/src/Lucene.Net/Codecs/Lucene40/BitVector.cs
+++ b/src/Lucene.Net/Codecs/Lucene40/BitVector.cs
@@ -275,8 +275,7 @@ namespace Lucene.Net.Codecs.Lucene40
WriteBits(output);
}
CodecUtil.WriteFooter(output);
- bool verified = VerifyCount();
- Debugging.Assert(() => verified);
+ Debugging.Assert(VerifyCount);
}
finally
{
@@ -475,8 +474,7 @@ namespace Lucene.Net.Codecs.Lucene40
Debugging.Assert(() => count != -1);
int countSav = count;
count = -1;
- bool checkCount = countSav == Count();
- Debugging.Assert(() => checkCount, () => "saved count was " + countSav + " but recomputed count is " + count);
+ Debugging.Assert(() => countSav == Count(), () => "saved count was " + countSav + " but recomputed count is " + count);
return true;
}
diff --git a/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsReader.cs b/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsReader.cs
index ceca2e2..8251d2e 100644
--- a/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsReader.cs
+++ b/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsReader.cs
@@ -730,16 +730,7 @@ namespace Lucene.Net.Codecs.Lucene40
public override int NextPosition()
{
- //Debugging.Assert((positions != null && nextPos < positions.Length) || startOffsets != null && nextPos < startOffsets.Length);
-
- // LUCENENET: The above assertion was for control flow when testing. In Java, it would throw an AssertionError, which is
- // caught by the BaseTermVectorsFormatTestCase.assertEquals(RandomTokenStream tk, FieldType ft, Terms terms) method in the
- // part that is checking for an error after reading to the end of the enumerator.
-
- // Since there is no way to turn on assertions in a release build in .NET, we are throwing an InvalidOperationException
- // in this case, which matches the behavior of Lucene 8. See #267.
- if (((positions != null && nextPos < positions.Length) || startOffsets != null && nextPos < startOffsets.Length) == false)
- throw new InvalidOperationException("Read past last position");
+ Debugging.Assert(() => (positions != null && nextPos < positions.Length) || startOffsets != null && nextPos < startOffsets.Length);
if (positions != null)
{
diff --git a/src/Lucene.Net/Index/CheckIndex.cs b/src/Lucene.Net/Index/CheckIndex.cs
index 2d58581..e2e30fd 100644
--- a/src/Lucene.Net/Index/CheckIndex.cs
+++ b/src/Lucene.Net/Index/CheckIndex.cs
@@ -1235,13 +1235,14 @@ namespace Lucene.Net.Index
}
lastPos = pos;
BytesRef payload = postings.GetPayload();
- if (payload != null)
+ // LUCENENET specific - restructured to reduce number of checks in production
+ if (!(payload is null))
{
Debugging.Assert(payload.IsValid);
- }
- if (payload != null && payload.Length < 1)
- {
- throw new Exception("term " + term + ": doc " + doc + ": pos " + pos + " payload length is out of bounds " + payload.Length);
+ if (payload.Length < 1)
+ {
+ throw new Exception("term " + term + ": doc " + doc + ": pos " + pos + " payload length is out of bounds " + payload.Length);
+ }
}
if (hasOffsets)
{
@@ -2356,19 +2357,20 @@ namespace Lucene.Net.Index
result.NewSegments.Commit(result.Dir);
}
- private static bool assertsOn;
+ // LUCENENET: Not used
+ //private static bool assertsOn;
- private static bool TestAsserts()
- {
- assertsOn = true;
- return true;
- }
+ //private static bool TestAsserts()
+ //{
+ // assertsOn = true;
+ // return true;
+ //}
- private static bool AssertsOn()
- {
- Debugging.Assert(TestAsserts);
- return assertsOn;
- }
+ //private static bool AssertsOn()
+ //{
+ // Debugging.Assert(TestAsserts);
+ // return assertsOn;
+ //}
///// Command-line interface to check and fix an index.
/////
@@ -2473,12 +2475,14 @@ namespace Lucene.Net.Index
//Environment.Exit(1);
}
- // LUCENENET specific - doesn't apply
+ // LUCENENET specific - rather than having the user specify whether to enable asserts, we always run with them enabled.
+ Debugging.AssertsEnabled = true;
//if (!AssertsOn())
//{
// Console.WriteLine("\nNOTE: testing will be more thorough if you run java with '-ea:org.apache.lucene...', so assertions are enabled");
//}
+
if (onlySegments.Count == 0)
{
onlySegments = null;
diff --git a/src/Lucene.Net/Index/ConcurrentMergeScheduler.cs b/src/Lucene.Net/Index/ConcurrentMergeScheduler.cs
index f394b4f..c6d0913 100644
--- a/src/Lucene.Net/Index/ConcurrentMergeScheduler.cs
+++ b/src/Lucene.Net/Index/ConcurrentMergeScheduler.cs
@@ -1,5 +1,6 @@
#if FEATURE_CONCURRENTMERGESCHEDULER
using J2N.Threading;
+using Lucene.Net.Diagnostics;
using System;
using System.Collections.Generic;
using System.Runtime.CompilerServices;
@@ -394,7 +395,7 @@ namespace Lucene.Net.Index
{
lock (this)
{
- //Debugging.Assert(!Thread.holdsLock(writer));
+ Debugging.Assert(() => !Monitor.IsEntered(writer));
this.m_writer = writer;
diff --git a/src/Lucene.Net/Index/DirectoryReader.cs b/src/Lucene.Net/Index/DirectoryReader.cs
index f34c2e5..4bb5997 100644
--- a/src/Lucene.Net/Index/DirectoryReader.cs
+++ b/src/Lucene.Net/Index/DirectoryReader.cs
@@ -1,7 +1,6 @@
using Lucene.Net.Diagnostics;
using System;
using System.Collections.Generic;
-using System.Diagnostics;
using System.IO;
namespace Lucene.Net.Index
diff --git a/src/Lucene.Net/Index/DocFieldProcessor.cs b/src/Lucene.Net/Index/DocFieldProcessor.cs
index 2cdce5e..177b450 100644
--- a/src/Lucene.Net/Index/DocFieldProcessor.cs
+++ b/src/Lucene.Net/Index/DocFieldProcessor.cs
@@ -2,7 +2,6 @@ using J2N.Text;
using Lucene.Net.Diagnostics;
using System;
using System.Collections.Generic;
-using System.Diagnostics;
using System.Runtime.CompilerServices;
using JCG = J2N.Collections.Generic;
diff --git a/src/Lucene.Net/Index/DocTermOrds.cs b/src/Lucene.Net/Index/DocTermOrds.cs
index eaa41ca..79121c4 100644
--- a/src/Lucene.Net/Index/DocTermOrds.cs
+++ b/src/Lucene.Net/Index/DocTermOrds.cs
@@ -1,7 +1,6 @@
using Lucene.Net.Diagnostics;
using System;
using System.Collections.Generic;
-using System.Diagnostics;
using System.IO;
namespace Lucene.Net.Index
@@ -23,9 +22,9 @@ namespace Lucene.Net.Index
* limitations under the License.
*/
- using IBits = Lucene.Net.Util.IBits;
using BytesRef = Lucene.Net.Util.BytesRef;
using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
+ using IBits = Lucene.Net.Util.IBits;
using PagedBytes = Lucene.Net.Util.PagedBytes;
using PostingsFormat = Lucene.Net.Codecs.PostingsFormat; // javadocs
using SeekStatus = Lucene.Net.Index.TermsEnum.SeekStatus;
diff --git a/src/Lucene.Net/Index/DocValuesFieldUpdates.cs b/src/Lucene.Net/Index/DocValuesFieldUpdates.cs
index b2eaf0a..983914a 100644
--- a/src/Lucene.Net/Index/DocValuesFieldUpdates.cs
+++ b/src/Lucene.Net/Index/DocValuesFieldUpdates.cs
@@ -114,14 +114,14 @@ namespace Lucene.Net.Index
{
case DocValuesFieldUpdatesType.NUMERIC:
NumericDocValuesFieldUpdates numericUpdates;
- Debugging.Assert(() => !numericDVUpdates.TryGetValue(field, out numericUpdates));
+ Debugging.Assert(() => !numericDVUpdates.ContainsKey(field));
numericUpdates = new NumericDocValuesFieldUpdates(field, maxDoc);
numericDVUpdates[field] = numericUpdates;
return numericUpdates;
case DocValuesFieldUpdatesType.BINARY:
BinaryDocValuesFieldUpdates binaryUpdates;
- Debugging.Assert(() => !binaryDVUpdates.TryGetValue(field, out binaryUpdates));
+ Debugging.Assert(() => !binaryDVUpdates.ContainsKey(field));
binaryUpdates = new BinaryDocValuesFieldUpdates(field, maxDoc);
binaryDVUpdates[field] = binaryUpdates;
return binaryUpdates;
diff --git a/src/Lucene.Net/Index/DocumentsWriter.cs b/src/Lucene.Net/Index/DocumentsWriter.cs
index 7264902..4707b40 100644
--- a/src/Lucene.Net/Index/DocumentsWriter.cs
+++ b/src/Lucene.Net/Index/DocumentsWriter.cs
@@ -3,10 +3,8 @@ using Lucene.Net.Diagnostics;
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
-using System.Diagnostics;
-using System.Threading;
-using System.Reflection;
using System.Runtime.CompilerServices;
+using System.Threading;
using JCG = J2N.Collections.Generic;
namespace Lucene.Net.Index
@@ -32,8 +30,8 @@ namespace Lucene.Net.Index
using BinaryDocValuesUpdate = Lucene.Net.Index.DocValuesUpdate.BinaryDocValuesUpdate;
using BytesRef = Lucene.Net.Util.BytesRef;
using Directory = Lucene.Net.Store.Directory;
- using IEvent = Lucene.Net.Index.IndexWriter.IEvent;
using FlushedSegment = Lucene.Net.Index.DocumentsWriterPerThread.FlushedSegment;
+ using IEvent = Lucene.Net.Index.IndexWriter.IEvent;
using InfoStream = Lucene.Net.Util.InfoStream;
using NumericDocValuesUpdate = Lucene.Net.Index.DocValuesUpdate.NumericDocValuesUpdate;
using Query = Lucene.Net.Search.Query;
@@ -246,7 +244,7 @@ namespace Lucene.Net.Index
{
lock (this)
{
- //Debugging.Assert(!Thread.HoldsLock(writer), "IndexWriter lock should never be hold when aborting");
+ Debugging.Assert(() => !Monitor.IsEntered(writer), () => "IndexWriter lock should never be hold when aborting");
bool success = false;
JCG.HashSet<string> newFilesSet = new JCG.HashSet<string>();
try
@@ -289,7 +287,7 @@ namespace Lucene.Net.Index
{
lock (this)
{
- //Debugging.Assert(indexWriter.HoldsFullFlushLock());
+ Debugging.Assert(() => indexWriter.HoldsFullFlushLock);
if (infoStream.IsEnabled("DW"))
{
infoStream.Message("DW", "lockAndAbortAll");
@@ -329,7 +327,7 @@ namespace Lucene.Net.Index
private void AbortThreadState(ThreadState perThread, ISet<string> newFiles)
{
- //Debugging.Assert(perThread.HeldByCurrentThread);
+ Debugging.Assert(() => perThread.IsHeldByCurrentThread);
if (perThread.IsActive) // we might be closed
{
if (perThread.IsInitialized)
@@ -360,7 +358,7 @@ namespace Lucene.Net.Index
{
lock (this)
{
- //Debugging.Assert(indexWriter.HoldsFullFlushLock());
+ Debugging.Assert(() => indexWriter.HoldsFullFlushLock);
if (infoStream.IsEnabled("DW"))
{
infoStream.Message("DW", "unlockAll");
@@ -371,10 +369,10 @@ namespace Lucene.Net.Index
try
{
ThreadState perThread = perThreadPool.GetThreadState(i);
- //if (perThread.HeldByCurrentThread)
- //{
- perThread.Unlock();
- //}
+ if (perThread.IsHeldByCurrentThread)
+ {
+ perThread.Unlock();
+ }
}
catch (Exception e)
{
diff --git a/src/Lucene.Net/Index/DocumentsWriterDeleteQueue.cs b/src/Lucene.Net/Index/DocumentsWriterDeleteQueue.cs
index 0eee1e0..b79a7be 100644
--- a/src/Lucene.Net/Index/DocumentsWriterDeleteQueue.cs
+++ b/src/Lucene.Net/Index/DocumentsWriterDeleteQueue.cs
@@ -2,7 +2,6 @@ using Lucene.Net.Diagnostics;
using Lucene.Net.Support;
using Lucene.Net.Support.Threading;
using System;
-using System.Diagnostics;
using System.Threading;
namespace Lucene.Net.Index
diff --git a/src/Lucene.Net/Index/DocumentsWriterFlushControl.cs b/src/Lucene.Net/Index/DocumentsWriterFlushControl.cs
index 527413e..20582bb 100644
--- a/src/Lucene.Net/Index/DocumentsWriterFlushControl.cs
+++ b/src/Lucene.Net/Index/DocumentsWriterFlushControl.cs
@@ -3,7 +3,6 @@ using J2N.Threading.Atomic;
using Lucene.Net.Diagnostics;
using System;
using System.Collections.Generic;
-using System.Diagnostics;
using System.Threading;
using JCG = J2N.Collections.Generic;
@@ -153,7 +152,11 @@ namespace Lucene.Net.Index
* fail. To prevent this we only assert if the the largest document seen
* is smaller than the 1/2 of the maxRamBufferMB
*/
- Debugging.Assert(() => ram <= expected, () => "actual mem: " + ram + " byte, expected mem: " + expected + " byte, flush mem: " + flushBytes + ", active mem: " + activeBytes + ", pending DWPT: " + numPending + ", flushing DWPT: " + NumFlushingDWPT + ", blocked DWPT: " + NumBlockedFlushes + ", peakDelta mem: " + peakDelta + " byte");
+ Debugging.Assert(() => ram <= expected, () => "actual mem: " + ram + " byte, expected mem: " + expected
+ + " byte, flush mem: " + flushBytes + ", active mem: " + activeBytes
+ + ", pending DWPT: " + numPending + ", flushing DWPT: "
+ + NumFlushingDWPT + ", blocked DWPT: " + NumBlockedFlushes
+ + ", peakDelta mem: " + peakDelta + " byte");
}
}
return true;
@@ -290,7 +293,7 @@ namespace Lucene.Net.Index
private bool UpdateStallState()
{
- //Debugging.Assert(Thread.holdsLock(this));
+ Debugging.Assert(() => Monitor.IsEntered(this));
long limit = StallLimitBytes;
/*
* we block indexing threads if net byte grows due to slow flushes
diff --git a/src/Lucene.Net/Index/DocumentsWriterFlushQueue.cs b/src/Lucene.Net/Index/DocumentsWriterFlushQueue.cs
index 67350ce..49f535b 100644
--- a/src/Lucene.Net/Index/DocumentsWriterFlushQueue.cs
+++ b/src/Lucene.Net/Index/DocumentsWriterFlushQueue.cs
@@ -2,7 +2,7 @@ using J2N.Threading.Atomic;
using Lucene.Net.Diagnostics;
using Lucene.Net.Support.Threading;
using System.Collections.Generic;
-using System.Diagnostics;
+using System.Threading;
namespace Lucene.Net.Index
{
@@ -128,7 +128,7 @@ namespace Lucene.Net.Index
private int InnerPurge(IndexWriter writer)
{
- //Debugging.Assert(PurgeLock.HeldByCurrentThread);
+ Debugging.Assert(() => purgeLock.IsHeldByCurrentThread);
int numPurged = 0;
while (true)
{
@@ -173,8 +173,8 @@ namespace Lucene.Net.Index
internal virtual int ForcePurge(IndexWriter writer)
{
- //Debugging.Assert(!Thread.HoldsLock(this));
- //Debugging.Assert(!Thread.holdsLock(writer));
+ Debugging.Assert(() => !Monitor.IsEntered(this));
+ Debugging.Assert(() => !Monitor.IsEntered(writer));
purgeLock.@Lock();
try
{
@@ -188,8 +188,8 @@ namespace Lucene.Net.Index
internal virtual int TryPurge(IndexWriter writer)
{
- //Debugging.Assert(!Thread.holdsLock(this));
- //Debugging.Assert(!Thread.holdsLock(writer));
+ Debugging.Assert(() => !Monitor.IsEntered(this));
+ Debugging.Assert(() => !Monitor.IsEntered(writer));
if (purgeLock.TryLock())
{
try
diff --git a/src/Lucene.Net/Index/DocumentsWriterPerThreadPool.cs b/src/Lucene.Net/Index/DocumentsWriterPerThreadPool.cs
index b96f063..ef4deb7 100644
--- a/src/Lucene.Net/Index/DocumentsWriterPerThreadPool.cs
+++ b/src/Lucene.Net/Index/DocumentsWriterPerThreadPool.cs
@@ -80,14 +80,14 @@ namespace Lucene.Net.Index
/// <seealso cref="IsActive"/>
internal void Deactivate() // LUCENENET NOTE: Made internal because it is called outside of this context
{
- //Debugging.Assert(this.HeldByCurrentThread);
+ Debugging.Assert(() => this.IsHeldByCurrentThread);
isActive = false;
Reset();
}
internal void Reset() // LUCENENET NOTE: Made internal because it is called outside of this context
{
- //Debugging.Assert(this.HeldByCurrentThread);
+ Debugging.Assert(() => this.IsHeldByCurrentThread);
this.dwpt = null;
this.bytesUsed = 0;
this.flushPending = false;
@@ -98,30 +98,52 @@ namespace Lucene.Net.Index
/// only return <c>false</c> iff the DW has been disposed and this
/// <see cref="ThreadState"/> is already checked out for flush.
/// </summary>
- internal bool IsActive =>
- //Debugging.Assert(this.HeldByCurrentThread);
- isActive;
+ internal bool IsActive
+ {
+ get
+ {
+ Debugging.Assert(() => this.IsHeldByCurrentThread);
+ return isActive;
+ }
+
+ }
- internal bool IsInitialized =>
- //Debugging.Assert(this.HeldByCurrentThread);
- IsActive && dwpt != null;
+ internal bool IsInitialized
+ {
+ get
+ {
+ Debugging.Assert(() => this.IsHeldByCurrentThread);
+ return IsActive && dwpt != null;
+ }
+ }
+
/// <summary>
/// Returns the number of currently active bytes in this ThreadState's
/// <see cref="DocumentsWriterPerThread"/>
/// </summary>
- public long BytesUsedPerThread =>
- //Debugging.Assert(this.HeldByCurrentThread);
- // public for FlushPolicy
- bytesUsed;
+ public long BytesUsedPerThread
+ {
+ get
+ {
+ Debugging.Assert(() => this.IsHeldByCurrentThread);
+ // public for FlushPolicy
+ return bytesUsed;
+ }
+ }
/// <summary>
/// Returns this <see cref="ThreadState"/>s <see cref="DocumentsWriterPerThread"/>
/// </summary>
- public DocumentsWriterPerThread DocumentsWriterPerThread =>
- //Debugging.Assert(this.HeldByCurrentThread);
- // public for FlushPolicy
- dwpt;
+ public DocumentsWriterPerThread DocumentsWriterPerThread
+ {
+ get
+ {
+ Debugging.Assert(() => this.IsHeldByCurrentThread);
+ // public for FlushPolicy
+ return dwpt;
+ }
+ }
/// <summary>
/// Returns <c>true</c> iff this <see cref="ThreadState"/> is marked as flush
@@ -270,7 +292,7 @@ namespace Lucene.Net.Index
internal virtual DocumentsWriterPerThread Reset(ThreadState threadState, bool closed)
{
- //Debugging.Assert(threadState.HeldByCurrentThread);
+ Debugging.Assert(() => threadState.IsHeldByCurrentThread);
DocumentsWriterPerThread dwpt = threadState.dwpt;
if (!closed)
{
diff --git a/src/Lucene.Net/Index/DocumentsWriterStallControl.cs b/src/Lucene.Net/Index/DocumentsWriterStallControl.cs
index 11c1f1b..ab3bb0d 100644
--- a/src/Lucene.Net/Index/DocumentsWriterStallControl.cs
+++ b/src/Lucene.Net/Index/DocumentsWriterStallControl.cs
@@ -85,13 +85,13 @@ namespace Lucene.Net.Index
// try
// {
//#endif
- // make sure not to run IncWaiters / DecrWaiters in Debug.Assert as that gets
- // removed at compile time if built in Release mode
+ // LUCENENET: make sure not to run IncWaiters / DecrWaiters in Debugging.Assert as that gets
+ // disabled in production
var result = IncWaiters();
- Debugging.Assert(() => result);
- Monitor.Wait(this);
- result = DecrWaiters();
- Debugging.Assert(() => result);
+ Debugging.Assert(() => result);
+ Monitor.Wait(this);
+ result = DecrWaiters();
+ Debugging.Assert(() => result);
//#if !NETSTANDARD1_6 // LUCENENET NOTE: Senseless to catch and rethrow the same exception type
// }
// catch (ThreadInterruptedException e)
@@ -112,8 +112,7 @@ namespace Lucene.Net.Index
private bool IncWaiters()
{
numWaiting++;
- bool existed = waiting.ContainsKey(ThreadJob.CurrentThread);
- Debugging.Assert(() => !existed);
+ Debugging.Assert(() => !waiting.ContainsKey(ThreadJob.CurrentThread));
waiting[ThreadJob.CurrentThread] = true;
return numWaiting > 0;
diff --git a/src/Lucene.Net/Index/FreqProxTermsWriterPerField.cs b/src/Lucene.Net/Index/FreqProxTermsWriterPerField.cs
index 8e961a7..e5757ea 100644
--- a/src/Lucene.Net/Index/FreqProxTermsWriterPerField.cs
+++ b/src/Lucene.Net/Index/FreqProxTermsWriterPerField.cs
@@ -197,8 +197,7 @@ namespace Lucene.Net.Index
{
// First time we're seeing this term since the last
// flush
- // LUCENENET: .NET doesn't support asserts in release mode
- if (Lucene.Net.Diagnostics.Debugging.AssertsEnabled) docState.TestPoint("FreqProxTermsWriterPerField.newTerm start");
+ Debugging.Assert(() => docState.TestPoint("FreqProxTermsWriterPerField.newTerm start"));
FreqProxPostingsArray postings = (FreqProxPostingsArray)termsHashPerField.postingsArray;
postings.lastDocIDs[termID] = docState.docID;
diff --git a/src/Lucene.Net/Index/IndexFileDeleter.cs b/src/Lucene.Net/Index/IndexFileDeleter.cs
index 6b38589..df29f18 100644
--- a/src/Lucene.Net/Index/IndexFileDeleter.cs
+++ b/src/Lucene.Net/Index/IndexFileDeleter.cs
@@ -532,7 +532,7 @@ namespace Lucene.Net.Index
{
Debugging.Assert(() => IsLocked);
- //Debugging.Assert(Thread.holdsLock(Writer));
+ Debugging.Assert(() => Monitor.IsEntered(writer));
long t0 = 0;
if (infoStream.IsEnabled("IFD"))
{
@@ -650,7 +650,7 @@ namespace Lucene.Net.Index
{
Debugging.Assert(() => IsLocked);
// LUCENENET: Using TryGetValue to eliminate extra lookup
- return refCounts.TryGetValue(fileName, out RefCount value) ? value.count > 0 : false;
+ return refCounts.TryGetValue(fileName, out RefCount value) && value.count > 0;
}
private RefCount GetRefCount(string fileName)
@@ -724,7 +724,7 @@ namespace Lucene.Net.Index
// the file is open in another process, and queue
// the file for subsequent deletion.
- //Debugging.Assert(e.Message.Contains("cannot delete"));
+ //Debugging.Assert(() => e.Message.Contains("cannot delete"));
if (infoStream.IsEnabled("IFD"))
{
diff --git a/src/Lucene.Net/Index/IndexWriter.cs b/src/Lucene.Net/Index/IndexWriter.cs
index 0c23c0c..8d3d47f 100644
--- a/src/Lucene.Net/Index/IndexWriter.cs
+++ b/src/Lucene.Net/Index/IndexWriter.cs
@@ -6,7 +6,6 @@ using Lucene.Net.Support;
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
-using System.Diagnostics;
using System.Globalization;
using System.IO;
using System.Runtime.CompilerServices;
@@ -708,7 +707,7 @@ namespace Lucene.Net.Index
}
else
{
- Debugging.Assert(() => rld.Info == info, () => "Infos are not equal");//, "rld.info=" + rld.Info + " info=" + info + " isLive?=" + InfoIsLive(rld.Info) + " vs " + InfoIsLive(info));
+ Debugging.Assert(() => rld.Info == info, () => "rld.info=" + rld.Info + " info=" + info + " isLive?=" + InfoIsLive(rld.Info) + " vs " + InfoIsLive(info));
}
if (create)
@@ -2570,8 +2569,7 @@ namespace Lucene.Net.Index
infoStream.Message("IW", "rollback: infos=" + SegString(segmentInfos.Segments));
}
- // LUCENENET: .NET doesn't support asserts in release mode
- if (Lucene.Net.Diagnostics.Debugging.AssertsEnabled) TestPoint("rollback before checkpoint");
+ Debugging.Assert(() => TestPoint("rollback before checkpoint"));
// Ask deleter to locate unreferenced files & remove
// them:
@@ -3816,12 +3814,8 @@ namespace Lucene.Net.Index
/// </summary>
private readonly object fullFlushLock = new object();
- // LUCENENET NOTE: Not possible in .NET
- //// for assert
- //internal virtual bool HoldsFullFlushLock()
- //{
- // return Thread.holdsLock(FullFlushLock);
- //}
+ // for assert
+ internal virtual bool HoldsFullFlushLock => Monitor.IsEntered(fullFlushLock);
/// <summary>
/// Flush all in-memory buffered updates (adds and deletes)
@@ -3978,16 +3972,7 @@ namespace Lucene.Net.Index
// for testing only
internal virtual DocumentsWriter DocsWriter
- {
- get
- {
- bool test = false;
- // LUCENENET NOTE: Must set test outside of Debug.Assert!!
- bool isTest = test = true;
- Debugging.Assert(() => isTest);
- return test ? docWriter : null;
- }
- }
+ => Debugging.AssertsEnabled ? docWriter : null; // LUCENENET specific - just read the status, simpler than using Assert() to set a local variable
/// <summary>
/// Expert: Return the number of documents currently
@@ -4098,8 +4083,7 @@ namespace Lucene.Net.Index
{
lock (this)
{
- // LUCENENET: .NET doesn't support asserts in release mode
- if (Lucene.Net.Diagnostics.Debugging.AssertsEnabled) TestPoint("startCommitMergeDeletes");
+ Debugging.Assert(() => TestPoint("startCommitMergeDeletes"));
IList<SegmentCommitInfo> sourceSegments = merge.Segments;
@@ -4335,8 +4319,7 @@ namespace Lucene.Net.Index
{
lock (this)
{
- // LUCENENET: .NET doesn't support asserts in release mode
- if (Lucene.Net.Diagnostics.Debugging.AssertsEnabled) TestPoint("startCommitMerge");
+ Debugging.Assert(() => TestPoint("startCommitMerge"));
if (hitOOM)
{
@@ -5511,8 +5494,7 @@ namespace Lucene.Net.Index
infoStream.Message("IW", "done all syncs: " + string.Format(J2N.Text.StringFormatter.InvariantCulture, "{0}", filesToSync));
}
- // LUCENENET: .NET doesn't support asserts in release mode
- if (Lucene.Net.Diagnostics.Debugging.AssertsEnabled) TestPoint("midStartCommitSuccess");
+ Debugging.Assert(() => TestPoint("midStartCommitSuccess"));
}
finally
{
@@ -5542,8 +5524,7 @@ namespace Lucene.Net.Index
{
HandleOOM(oom, "startCommit");
}
- // LUCENENET: .NET doesn't support asserts in release mode
- if (Lucene.Net.Diagnostics.Debugging.AssertsEnabled) TestPoint("finishStartCommit");
+ Debugging.Assert(() => TestPoint("finishStartCommit"));
}
/// <summary>
diff --git a/src/Lucene.Net/Index/PrefixCodedTerms.cs b/src/Lucene.Net/Index/PrefixCodedTerms.cs
index b914b5f..a0f03b9 100644
--- a/src/Lucene.Net/Index/PrefixCodedTerms.cs
+++ b/src/Lucene.Net/Index/PrefixCodedTerms.cs
@@ -92,6 +92,9 @@ namespace Lucene.Net.Index
public virtual bool MoveNext()
{
+ // LUCENENET specific - Since there is no way to check for a next element
+ // without calling this method in .NET, the assert is redundant and ineffective.
+ //Debugging.Assert(() => input.GetFilePointer() < input.Length); // Has next
if (input.GetFilePointer() < input.Length)
{
try
diff --git a/src/Lucene.Net/Index/ReadersAndUpdates.cs b/src/Lucene.Net/Index/ReadersAndUpdates.cs
index 976d412..7f8b360 100644
--- a/src/Lucene.Net/Index/ReadersAndUpdates.cs
+++ b/src/Lucene.Net/Index/ReadersAndUpdates.cs
@@ -6,6 +6,7 @@ using System.Collections.Generic;
using System.Globalization;
using System.Runtime.CompilerServices;
using System.Text;
+using System.Threading;
namespace Lucene.Net.Index
{
@@ -230,7 +231,7 @@ namespace Lucene.Net.Index
lock (this)
{
Debugging.Assert(() => liveDocs != null);
- //Debugging.Assert(Thread.holdsLock(Writer));
+ Debugging.Assert(() => Monitor.IsEntered(writer));
Debugging.Assert(() => docID >= 0 && docID < liveDocs.Length, () => "out of bounds: docid=" + docID + " liveDocsLength=" + liveDocs.Length + " seg=" + Info.Info.Name + " docCount=" + Info.Info.DocCount);
Debugging.Assert(() => !liveDocsShared);
bool didDelete = liveDocs.Get(docID);
@@ -318,7 +319,7 @@ namespace Lucene.Net.Index
{
lock (this)
{
- //Debugging.Assert(Thread.holdsLock(Writer));
+ Debugging.Assert(() => Monitor.IsEntered(writer));
Debugging.Assert(() => Info.Info.DocCount > 0);
//System.out.println("initWritableLivedocs seg=" + info + " liveDocs=" + liveDocs + " shared=" + shared);
if (liveDocsShared)
@@ -348,7 +349,7 @@ namespace Lucene.Net.Index
{
lock (this)
{
- //Debugging.Assert(Thread.holdsLock(Writer));
+ Debugging.Assert(() => Monitor.IsEntered(writer));
return liveDocs;
}
}
@@ -359,7 +360,7 @@ namespace Lucene.Net.Index
lock (this)
{
//System.out.println("getROLiveDocs seg=" + info);
- //Debugging.Assert(Thread.holdsLock(Writer));
+ Debugging.Assert(() => Monitor.IsEntered(writer));
liveDocsShared = true;
//if (liveDocs != null) {
//System.out.println(" liveCount=" + liveDocs.count());
@@ -393,7 +394,7 @@ namespace Lucene.Net.Index
{
lock (this)
{
- //Debugging.Assert(Thread.holdsLock(Writer));
+ Debugging.Assert(() => Monitor.IsEntered(writer));
//System.out.println("rld.writeLiveDocs seg=" + info + " pendingDelCount=" + pendingDeleteCount + " numericUpdates=" + numericUpdates);
if (pendingDeleteCount == 0)
{
@@ -458,7 +459,7 @@ namespace Lucene.Net.Index
{
lock (this)
{
- //Debugging.Assert(Thread.holdsLock(Writer));
+ Debugging.Assert(() => Monitor.IsEntered(writer));
//System.out.println("rld.writeFieldUpdates: seg=" + info + " numericFieldUpdates=" + numericFieldUpdates);
Debugging.Assert(dvUpdates.Any);
@@ -692,6 +693,7 @@ namespace Lucene.Net.Index
}
else
{ // no update for this document
+ Debugging.Assert(() => curDoc < updateDoc);
if (currentValues != null && DocsWithField.Get(curDoc))
{
// only read the current value if the document had a value before
@@ -724,6 +726,7 @@ namespace Lucene.Net.Index
}
else
{ // no update for this document
+ Debugging.Assert(() => curDoc < updateDoc);
if (currentValues != null && DocsWithField.Get(curDoc))
{
// only read the current value if the document had a value before
@@ -746,7 +749,7 @@ namespace Lucene.Net.Index
{
lock (this)
{
- //Debugging.Assert(Thread.holdsLock(Writer));
+ Debugging.Assert(() => Monitor.IsEntered(writer));
// must execute these two statements as atomic operation, otherwise we
// could lose updates if e.g. another thread calls writeFieldUpdates in
// between, or the updates are applied to the obtained reader, but then
diff --git a/src/Lucene.Net/Index/SortedDocValuesWriter.cs b/src/Lucene.Net/Index/SortedDocValuesWriter.cs
index 17072c5..08c7985 100644
--- a/src/Lucene.Net/Index/SortedDocValuesWriter.cs
+++ b/src/Lucene.Net/Index/SortedDocValuesWriter.cs
@@ -148,6 +148,7 @@ namespace Lucene.Net.Index
private IEnumerable<long?> GetOrdsEnumberable(int maxDoc, int[] ordMap)
{
AppendingDeltaPackedInt64Buffer.Iterator iter = pending.GetIterator();
+ Debugging.Assert(() => pending.Count == maxDoc);
for (int i = 0; i < maxDoc; ++i)
{
diff --git a/src/Lucene.Net/Index/SortedSetDocValuesWriter.cs b/src/Lucene.Net/Index/SortedSetDocValuesWriter.cs
index a85d348..3a4460a 100644
--- a/src/Lucene.Net/Index/SortedSetDocValuesWriter.cs
+++ b/src/Lucene.Net/Index/SortedSetDocValuesWriter.cs
@@ -203,7 +203,7 @@ namespace Lucene.Net.Index
{
AppendingDeltaPackedInt64Buffer.Iterator iter = pendingCounts.GetIterator();
- Debugging.Assert(() => maxDoc == pendingCounts.Count, () => "MaxDoc: " + maxDoc + ", pending.Count: " + pending.Count);
+ Debugging.Assert(() => pendingCounts.Count == maxDoc, () => "MaxDoc: " + maxDoc + ", pending.Count: " + pending.Count);
for (int i = 0; i < maxDoc; ++i)
{
diff --git a/src/Lucene.Net/Index/StoredFieldsProcessor.cs b/src/Lucene.Net/Index/StoredFieldsProcessor.cs
index aac5b12..fb75321 100644
--- a/src/Lucene.Net/Index/StoredFieldsProcessor.cs
+++ b/src/Lucene.Net/Index/StoredFieldsProcessor.cs
@@ -1,6 +1,6 @@
+using Lucene.Net.Diagnostics;
using Lucene.Net.Support;
using System;
-using System.Diagnostics;
using System.Runtime.CompilerServices;
namespace Lucene.Net.Index
@@ -139,8 +139,7 @@ namespace Lucene.Net.Index
[MethodImpl(MethodImplOptions.NoInlining)]
internal override void FinishDocument()
{
- // LUCENENET: .NET doesn't support asserts in release mode
- if (Lucene.Net.Diagnostics.Debugging.AssertsEnabled) docWriter.TestPoint("StoredFieldsWriter.finishDocument start");
+ Debugging.Assert(() => docWriter.TestPoint("StoredFieldsWriter.finishDocument start"));
InitFieldsWriter(IOContext.DEFAULT);
Fill(docState.docID);
@@ -157,8 +156,7 @@ namespace Lucene.Net.Index
}
Reset();
- // LUCENENET: .NET doesn't support asserts in release mode
- if (Lucene.Net.Diagnostics.Debugging.AssertsEnabled) docWriter.TestPoint("StoredFieldsWriter.finishDocument end");
+ Debugging.Assert(() => docWriter.TestPoint("StoredFieldsWriter.finishDocument end"));
}
public override void AddField(int docID, IIndexableField field, FieldInfo fieldInfo)
@@ -181,8 +179,7 @@ namespace Lucene.Net.Index
fieldInfos[numStoredFields] = fieldInfo;
numStoredFields++;
- // LUCENENET: .NET doesn't support asserts in release mode
- if (Lucene.Net.Diagnostics.Debugging.AssertsEnabled) docState.TestPoint("StoredFieldsWriterPerThread.processFields.writeField");
+ Debugging.Assert(() => docState.TestPoint("StoredFieldsWriterPerThread.processFields.writeField"));
}
}
}
diff --git a/src/Lucene.Net/Index/TermVectorsConsumer.cs b/src/Lucene.Net/Index/TermVectorsConsumer.cs
index 94c48e7..30fb1da 100644
--- a/src/Lucene.Net/Index/TermVectorsConsumer.cs
+++ b/src/Lucene.Net/Index/TermVectorsConsumer.cs
@@ -114,8 +114,7 @@ namespace Lucene.Net.Index
[MethodImpl(MethodImplOptions.NoInlining)]
internal override void FinishDocument(TermsHash termsHash)
{
- // LUCENENET: .NET doesn't support asserts in release mode
- if (Lucene.Net.Diagnostics.Debugging.AssertsEnabled) docWriter.TestPoint("TermVectorsTermsWriter.finishDocument start");
+ Debugging.Assert(() => docWriter.TestPoint("TermVectorsTermsWriter.finishDocument start"));
if (!hasVectors)
{
diff --git a/src/Lucene.Net/Index/TermVectorsConsumerPerField.cs b/src/Lucene.Net/Index/TermVectorsConsumerPerField.cs
index f8de489..032bb04 100644
--- a/src/Lucene.Net/Index/TermVectorsConsumerPerField.cs
+++ b/src/Lucene.Net/Index/TermVectorsConsumerPerField.cs
@@ -293,8 +293,7 @@ namespace Lucene.Net.Index
internal override void NewTerm(int termID)
{
- // LUCENENET: .NET doesn't support asserts in release mode
- if (Lucene.Net.Diagnostics.Debugging.AssertsEnabled) docState.TestPoint("TermVectorsTermsWriterPerField.newTerm start");
+ Debugging.Assert(() => docState.TestPoint("TermVectorsTermsWriterPerField.newTerm start"));
TermVectorsPostingsArray postings = (TermVectorsPostingsArray)termsHashPerField.postingsArray;
postings.freqs[termID] = 1;
@@ -306,8 +305,7 @@ namespace Lucene.Net.Index
internal override void AddTerm(int termID)
{
- // LUCENENET: .NET doesn't support asserts in release mode
- if (Lucene.Net.Diagnostics.Debugging.AssertsEnabled) docState.TestPoint("TermVectorsTermsWriterPerField.addTerm start");
+ Debugging.Assert(() => docState.TestPoint("TermVectorsTermsWriterPerField.addTerm start"));
TermVectorsPostingsArray postings = (TermVectorsPostingsArray)termsHashPerField.postingsArray;
postings.freqs[termID]++;
diff --git a/src/Lucene.Net/Index/ThreadAffinityDocumentsWriterThreadPool.cs b/src/Lucene.Net/Index/ThreadAffinityDocumentsWriterThreadPool.cs
index 3327ee9..5b46c72 100644
--- a/src/Lucene.Net/Index/ThreadAffinityDocumentsWriterThreadPool.cs
+++ b/src/Lucene.Net/Index/ThreadAffinityDocumentsWriterThreadPool.cs
@@ -50,9 +50,7 @@ namespace Lucene.Net.Index
public override ThreadState GetAndLock(Thread requestingThread, DocumentsWriter documentsWriter)
{
- ThreadState threadState;
- threadBindings.TryGetValue(requestingThread, out threadState);
- if (threadState != null && threadState.TryLock())
+ if (threadBindings.TryGetValue(requestingThread, out ThreadState threadState) && threadState.TryLock())
{
return threadState;
}
@@ -67,7 +65,7 @@ namespace Lucene.Net.Index
ThreadState newState = NewThreadState(); // state is already locked if non-null
if (newState != null)
{
- //Debugging.Assert(newState.HeldByCurrentThread);
+ Debugging.Assert(() => newState.IsHeldByCurrentThread);
threadBindings[requestingThread] = newState;
return newState;
}
diff --git a/src/Lucene.Net/Search/FieldCacheRangeFilter.cs b/src/Lucene.Net/Search/FieldCacheRangeFilter.cs
index 13438e0..de70b6f 100644
--- a/src/Lucene.Net/Search/FieldCacheRangeFilter.cs
+++ b/src/Lucene.Net/Search/FieldCacheRangeFilter.cs
@@ -230,7 +230,8 @@ namespace Lucene.Net.Search
return null; ;
}
- //assert inclusiveLowerPoint >= 0 && inclusiveUpperPoint >= 0;
+ Debugging.Assert(() => inclusiveLowerPoint >= 0 && inclusiveUpperPoint >= 0);
+
return new AnonymousClassFieldCacheDocIdSet(fcsi, inclusiveLowerPoint, inclusiveUpperPoint, context.AtomicReader.MaxDoc, acceptDocs);
}
}
diff --git a/src/Lucene.Net/Search/FieldComparator.cs b/src/Lucene.Net/Search/FieldComparator.cs
index ae30170..9555bee 100644
--- a/src/Lucene.Net/Search/FieldComparator.cs
+++ b/src/Lucene.Net/Search/FieldComparator.cs
@@ -981,7 +981,7 @@ namespace Lucene.Net.Search
// LUCENENET specific special case:
// In case of zero, we may have a "positive 0" or "negative 0"
- // to tie-break. So, we use JCG.Comparer<double> to do the comparison.
+ // to tie-break. So, we use JCG.Comparer<float> to do the comparison.
return JCG.Comparer<float>.Default.Compare(second, first);
}
@@ -992,7 +992,7 @@ namespace Lucene.Net.Search
// LUCENENET specific special case:
// In case of zero, we may have a "positive 0" or "negative 0"
- // to tie-break. So, we use JCG.Comparer<double> to do the comparison.
+ // to tie-break. So, we use JCG.Comparer<float> to do the comparison.
return JCG.Comparer<float>.Default.Compare(docValue, topValue);
}
}
diff --git a/src/Lucene.Net/Search/ReferenceManager.cs b/src/Lucene.Net/Search/ReferenceManager.cs
index 06f6292..d9ab9c8 100644
--- a/src/Lucene.Net/Search/ReferenceManager.cs
+++ b/src/Lucene.Net/Search/ReferenceManager.cs
@@ -200,7 +200,7 @@ namespace Lucene.Net.Search
G newReference = RefreshIfNeeded(reference);
if (newReference != null)
{
- Debugging.Assert(() => (object)newReference != (object)reference, () => "refreshIfNeeded should return null if refresh wasn't needed");
+ Debugging.Assert(() => !ReferenceEquals(newReference, reference), () => "refreshIfNeeded should return null if refresh wasn't needed");
try
{
SwapReference(newReference);
@@ -311,7 +311,7 @@ namespace Lucene.Net.Search
/// <exception cref="IOException"> If the release operation on the given resource throws an <see cref="IOException"/> </exception>
public void Release(G reference)
{
- Debugging.Assert(() => reference != null);
+ Debugging.Assert(() => !(reference is null));
DecRef(reference);
}
diff --git a/src/Lucene.Net/Store/ByteBufferIndexInput.cs b/src/Lucene.Net/Store/ByteBufferIndexInput.cs
index 821ce70..8402ad9 100644
--- a/src/Lucene.Net/Store/ByteBufferIndexInput.cs
+++ b/src/Lucene.Net/Store/ByteBufferIndexInput.cs
@@ -1,5 +1,6 @@
using J2N.IO;
using Lucene.Net.Diagnostics;
+using Lucene.Net.Util.Fst;
using System;
using System.IO;
using System.Runtime.CompilerServices;
@@ -390,6 +391,7 @@ namespace Lucene.Net.Store
#if FEATURE_CONDITIONALWEAKTABLE_ENUMERATOR
foreach (var pair in clones)
{
+ Debugging.Assert(() => pair.Key.isClone);
pair.Key.UnsetBuffers();
}
this.clones.Clear();
diff --git a/src/Lucene.Net/Store/NIOFSDirectory.cs b/src/Lucene.Net/Store/NIOFSDirectory.cs
index bf26d21..8eefa14 100644
--- a/src/Lucene.Net/Store/NIOFSDirectory.cs
+++ b/src/Lucene.Net/Store/NIOFSDirectory.cs
@@ -257,16 +257,9 @@ namespace Lucene.Net.Store
{
while (readLength > 0)
{
- int limit;
- if (readLength > CHUNK_SIZE)
- {
- limit = readOffset + CHUNK_SIZE;
- }
- else
- {
- limit = readOffset + readLength;
- }
- bb.Limit = limit;
+ int toRead = Math.Min(CHUNK_SIZE, readLength);
+ bb.Limit = readOffset + toRead;
+ Debugging.Assert(() => bb.Remaining == toRead);
int i = m_channel.Read(bb, pos);
if (i <= 0) // be defensive here, even though we checked before hand, something could have changed
{
diff --git a/src/Lucene.Net/Store/SimpleFSDirectory.cs b/src/Lucene.Net/Store/SimpleFSDirectory.cs
index 8006f06..35167d9 100644
--- a/src/Lucene.Net/Store/SimpleFSDirectory.cs
+++ b/src/Lucene.Net/Store/SimpleFSDirectory.cs
@@ -1,5 +1,5 @@
+using Lucene.Net.Diagnostics;
using System;
-using System.Diagnostics;
using System.IO;
namespace Lucene.Net.Store
@@ -235,7 +235,7 @@ namespace Lucene.Net.Store
// all we need to do is Read().
total = m_file.Read(b, offset, len);
- //Debugging.Assert(total == len);
+ Debugging.Assert(() => total == len);
}
catch (IOException ioe)
{
diff --git a/src/Lucene.Net/Util/BroadWord.cs b/src/Lucene.Net/Util/BroadWord.cs
index c70029c..db7241c 100644
--- a/src/Lucene.Net/Util/BroadWord.cs
+++ b/src/Lucene.Net/Util/BroadWord.cs
@@ -1,5 +1,6 @@
using J2N.Numerics;
using Lucene.Net.Diagnostics;
+using System.Globalization;
namespace Lucene.Net.Util
{
@@ -70,7 +71,7 @@ namespace Lucene.Net.Util
long b = (long)((ulong)(((long)((ulong)SmallerUpTo7_8(s, (r * L8_L)) >> 7)) * L8_L) >> 53); // & (~7L); // Step 3, side ways addition for byte number times 8
long l = r - (((long)((ulong)(s << 8) >> (int)b)) & 0xFFL); // Step 4, byte wise rank, subtract the rank with byte at b-8, or zero for b=0;
- Debugging.Assert(() => 0L <= 1);
+ Debugging.Assert(() => 0L <= 1, () => l.ToString(CultureInfo.InvariantCulture));
//assert l < 8 : l; //fails when bit r is not available.
// Select bit l from byte (x >>> b):
diff --git a/src/Lucene.Net/Util/Fst/FST.cs b/src/Lucene.Net/Util/Fst/FST.cs
index 2eec5f7..86bd528 100644
--- a/src/Lucene.Net/Util/Fst/FST.cs
+++ b/src/Lucene.Net/Util/Fst/FST.cs
@@ -376,8 +376,7 @@ namespace Lucene.Net.Util.Fst
cachedRootArcs = (FST.Arc<T>[])new FST.Arc<T>[0x80];
ReadRootArcs(cachedRootArcs);
- bool set = SetAssertingRootArcs(cachedRootArcs);
- Debugging.Assert(() => set);
+ Debugging.Assert(() => SetAssertingRootArcs(cachedRootArcs));
Debugging.Assert(AssertRootArcs);
}
@@ -409,7 +408,7 @@ namespace Lucene.Net.Util.Fst
}
}
- private bool SetAssertingRootArcs(FST.Arc<T>[] arcs)
+ private bool SetAssertingRootArcs(FST.Arc<T>[] arcs) // Only called from assert
{
assertingCachedRootArcs = (FST.Arc<T>[])new FST.Arc<T>[arcs.Length];
ReadRootArcs(assertingCachedRootArcs);
diff --git a/src/Lucene.Net/Util/Fst/NodeHash.cs b/src/Lucene.Net/Util/Fst/NodeHash.cs
index ad402d0..8e6460f 100644
--- a/src/Lucene.Net/Util/Fst/NodeHash.cs
+++ b/src/Lucene.Net/Util/Fst/NodeHash.cs
@@ -162,8 +162,7 @@ namespace Lucene.Net.Util.Fst
// freeze & add
long node = fst.AddNode(nodeIn);
//System.out.println(" now freeze node=" + node);
- long hashNode = Hash(node);
- Debugging.Assert(() => hashNode == h, () => "frozenHash=" + hashNode + " vs h=" + h);
+ Debugging.Assert(() => Hash(node) == h, () => "frozenHash=" + Hash(node) + " vs h=" + h);
count++;
table.Set(pos, node);
// Rehash at 2/3 occupancy:
diff --git a/src/Lucene.Net/Util/Packed/EliasFanoEncoder.cs b/src/Lucene.Net/Util/Packed/EliasFanoEncoder.cs
index 1fb241c..470b76e 100644
--- a/src/Lucene.Net/Util/Packed/EliasFanoEncoder.cs
+++ b/src/Lucene.Net/Util/Packed/EliasFanoEncoder.cs
@@ -3,6 +3,7 @@ using Lucene.Net.Diagnostics;
using Lucene.Net.Support;
using System;
using System.Diagnostics.CodeAnalysis;
+using System.Globalization;
using System.Text;
namespace Lucene.Net.Util.Packed
@@ -219,7 +220,7 @@ namespace Lucene.Net.Util.Packed
/// </summary>
private static long NumInt64sForBits(long numBits) // Note: int version in FixedBitSet.bits2words()
{
- Debugging.Assert(() => numBits >= 0, numBits.ToString);
+ Debugging.Assert(() => numBits >= 0, () => numBits.ToString(CultureInfo.InvariantCulture));
return (long)((ulong)(numBits + (sizeof(long) * 8 - 1)) >> LOG2_INT64_SIZE);
}
diff --git a/src/Lucene.Net/Util/Packed/PackedDataInput.cs b/src/Lucene.Net/Util/Packed/PackedDataInput.cs
index bf09213..89e266a 100644
--- a/src/Lucene.Net/Util/Packed/PackedDataInput.cs
+++ b/src/Lucene.Net/Util/Packed/PackedDataInput.cs
@@ -1,5 +1,6 @@
using Lucene.Net.Diagnostics;
using System;
+using System.Globalization;
namespace Lucene.Net.Util.Packed
{
@@ -52,7 +53,7 @@ namespace Lucene.Net.Util.Packed
/// </summary>
public long ReadInt64(int bitsPerValue)
{
- Debugging.Assert(() => bitsPerValue > 0 && bitsPerValue <= 64, bitsPerValue.ToString);
+ Debugging.Assert(() => bitsPerValue > 0 && bitsPerValue <= 64, () => bitsPerValue.ToString(CultureInfo.InvariantCulture));
long r = 0;
while (bitsPerValue > 0)
{
diff --git a/src/Lucene.Net/Util/Packed/PackedInts.cs b/src/Lucene.Net/Util/Packed/PackedInts.cs
index c9da5f8..fac1a53 100644
--- a/src/Lucene.Net/Util/Packed/PackedInts.cs
+++ b/src/Lucene.Net/Util/Packed/PackedInts.cs
@@ -3,6 +3,7 @@ using Lucene.Net.Diagnostics;
using Lucene.Net.Support;
using System;
using System.Collections.Generic;
+using System.Globalization;
using System.IO;
namespace Lucene.Net.Util.Packed
@@ -141,8 +142,8 @@ namespace Lucene.Net.Util.Packed
/// </summary>
public override float OverheadPerValue(int bitsPerValue)
{
+ Debugging.Assert(() => IsSupported(bitsPerValue));
int valuesPerBlock = 64 / bitsPerValue;
-
int overhead = 64 % bitsPerValue;
return (float)overhead / valuesPerBlock;
}
@@ -204,6 +205,7 @@ namespace Lucene.Net.Util.Packed
/// </summary>
public virtual long ByteCount(int packedIntsVersion, int valueCount, int bitsPerValue)
{
+ Debugging.Assert(() => bitsPerValue >= 0 && bitsPerValue <= 64, () => bitsPerValue.ToString(CultureInfo.InvariantCulture));
// assume long-aligned
return 8L * Int64Count(packedIntsVersion, valueCount, bitsPerValue);
}
@@ -216,16 +218,13 @@ namespace Lucene.Net.Util.Packed
/// </summary>
public virtual int Int64Count(int packedIntsVersion, int valueCount, int bitsPerValue)
{
+ Debugging.Assert(() => bitsPerValue >= 0 && bitsPerValue <= 64, () => bitsPerValue.ToString(CultureInfo.InvariantCulture));
long byteCount = ByteCount(packedIntsVersion, valueCount, bitsPerValue);
-
+ Debugging.Assert(() => byteCount < 8L * int.MaxValue);
if ((byteCount % 8) == 0)
- {
return (int)(byteCount / 8);
- }
else
- {
return (int)(byteCount / 8 + 1);
- }
}
/// <summary>
diff --git a/src/Lucene.Net/Util/Packed/PackedWriter.cs b/src/Lucene.Net/Util/Packed/PackedWriter.cs
index aaea243..2df619f 100644
--- a/src/Lucene.Net/Util/Packed/PackedWriter.cs
+++ b/src/Lucene.Net/Util/Packed/PackedWriter.cs
@@ -1,5 +1,6 @@
using Lucene.Net.Diagnostics;
using Lucene.Net.Support;
+using System.Globalization;
using System.IO;
using System.Runtime.CompilerServices;
@@ -55,7 +56,7 @@ namespace Lucene.Net.Util.Packed
public override void Add(long v)
{
- Debugging.Assert(() => m_bitsPerValue == 64 || (v >= 0 && v <= PackedInt32s.MaxValue(m_bitsPerValue)), m_bitsPerValue.ToString);
+ Debugging.Assert(() => m_bitsPerValue == 64 || (v >= 0 && v <= PackedInt32s.MaxValue(m_bitsPerValue)), () => m_bitsPerValue.ToString(CultureInfo.InvariantCulture));
Debugging.Assert(() => !finished);
if (m_valueCount != -1 && written >= m_valueCount)
{
diff --git a/src/Lucene.Net/Util/PagedBytes.cs b/src/Lucene.Net/Util/PagedBytes.cs
index e80ab68..dd392cb 100644
--- a/src/Lucene.Net/Util/PagedBytes.cs
+++ b/src/Lucene.Net/Util/PagedBytes.cs
@@ -2,6 +2,7 @@ using Lucene.Net.Diagnostics;
using Lucene.Net.Support;
using System;
using System.Collections.Generic;
+using System.Globalization;
namespace Lucene.Net.Util
{
@@ -162,7 +163,7 @@ namespace Lucene.Net.Util
/// </summary>
public PagedBytes(int blockBits)
{
- Debugging.Assert(() => blockBits > 0 && blockBits <= 31, blockBits.ToString);
+ Debugging.Assert(() => blockBits > 0 && blockBits <= 31, () => blockBits.ToString(CultureInfo.InvariantCulture));
this.blockSize = 1 << blockBits;
this.blockBits = blockBits;
blockMask = blockSize - 1;
diff --git a/src/Lucene.Net/Util/RamUsageEstimator.cs b/src/Lucene.Net/Util/RamUsageEstimator.cs
index 493104b..298009b 100644
--- a/src/Lucene.Net/Util/RamUsageEstimator.cs
+++ b/src/Lucene.Net/Util/RamUsageEstimator.cs
@@ -507,7 +507,7 @@ namespace Lucene.Net.Util
seen.Add(ob);
Type obClazz = ob.GetType();
-
+ // LUCENENET specific - .NET cannot return a null type for an object, so no need to assert it
if (obClazz.Equals(typeof(string)))
{
// LUCENENET specific - we can get a closer estimate of a string
diff --git a/src/Lucene.Net/Util/RollingBuffer.cs b/src/Lucene.Net/Util/RollingBuffer.cs
index c6479ca..7558150 100644
--- a/src/Lucene.Net/Util/RollingBuffer.cs
+++ b/src/Lucene.Net/Util/RollingBuffer.cs
@@ -140,6 +140,8 @@ namespace Lucene.Net.Util
}
Debugging.Assert(() => InBounds(pos));
int index = GetIndex(pos);
+ //System.out.println(" pos=" + pos + " nextPos=" + nextPos + " -> index=" + index);
+ //assert buffer[index].pos == pos;
return buffer[index];
}