You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucenenet.apache.org by ni...@apache.org on 2020/11/10 15:57:21 UTC

[lucenenet] branch master updated: Fixes several code quality issues (Code Analysis) (#387)

This is an automated email from the ASF dual-hosted git repository.

nightowl888 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/lucenenet.git


The following commit(s) were added to refs/heads/master by this push:
     new 5eb0949  Fixes several code quality issues (Code Analysis) (#387)
5eb0949 is described below

commit 5eb0949c680039db82aef868659f2b90c79134b9
Author: Shad Storhaug <sh...@shadstorhaug.com>
AuthorDate: Tue Nov 10 22:47:42 2020 +0700

    Fixes several code quality issues (Code Analysis) (#387)
    
    * BUG: Lucene.Net.Document.CompressionTools: Pass the offset and length to the underlying MemoryStream
    
    * BUG: Lucene.Net.Analysis.Common.Tartarus.Snowball.Among: Fixed MethodObject property to reference private field instead of itself
    
    * BUG: Lucene.Net.TestFramework.Analysis.MockTokenizer: Pass the AttributeFactory argument that is provided as per the documentation comment. Note this bug exists in Lucene 4.8.0, also.
    
    * TestTargetFramework.props: Disable IDE0052 and IDE0060 in test projects
    
    * lucene-cli: Disabled IDE0060 warnings
    
    * Lucene.Net.Demo: Disabled IDE0060 warnings
    
    * PERFORMANCE: Fixes IDE0052 Remove unread private member, fixes IDE0060 Remove unused parameter
    
    * BUG: Lucene.Net.Util.ExceptionExtensions.GetSuppressedAsList(): Use J2N.Collections.Generic.List<T> so the call to ToString() will automatically list the exception messages
    
    * Lucene.Net.Tests.Support.TestApiConsistency: Added exclusion for System.Runtime.CompilerServices to skip our nullable attribute replacements for platforms that don't support them
    
    * Fixes CA2213 Disposable fields should be disposed (except for IndexWriter and subclasses which need more work)
    
    * Fixes CA1063: Implement IDisposable Properly (except for IndexWriter). Partially addresses #265.
    
    * PERFORMANCE: Lucene.Net.Index: Changed FieldInfos, FreqProxTermsWriterPerField, IndexWriter, LogMergePolicy, SegmentCoreReaders, and SegmentReader to take advantage of the fact that TryGetValue returns a boolean
    
    * PERFORMANCE: Lucene.Net.Index.DocValuesProducer: Optimized checks in AddXXXField() methods
    
    * Fixes IDE0018: Inline variable declaration
    
    * Fixes CA1507: Use nameof in place of string
    
    * BUG: Fixed formatting in ArgumentException message for all analyzer factories so it will display the dictionary contents
    
    * Removed pattern matching warning exclusions for IDE0019, IDE0020, and IDE0038
    
    * Fixes IDE0019: Use pattern matching to avoid 'is' check followed by a cast
    
    * Simplified reuse logic of TermsEnum classes
    
    * Fixes IDE0038: Use pattern matching to avoid 'is' check followed by a cast
    
    * Fixes IDE0020: Use pattern matching to avoid 'is' check followed by a cast
    
    * Fixes CA1802: Use Literals Where Appropriate
    
    * TestTargetFramework.props: Disabled IDE0044 warnings in test projects
    
    * TestTargetFramework.props: Disabled IDE0019, IDE0020, IDE0038 from tests
    
    * TestTargetFramework.props: Added CA1822 and IDE0059 and changed list to be in lexographical order by ID
    
    * TestTargetFramework.props: Disabled IDE1006 (Naming rule violation) warnings in tests
    
    * TestTargetFraework.props: Disabled CA1034 (Nested types should not be visible) warnings in tests
    
    * TestTargetFramework.props: Disabled CA1825 (Avoid zero-length array allocations) warnings in tests
    
    * TestTargetFramework.props: Disabled IDE0017 and IDE0028 warnings in tests (object and collection initialization can be simplified)
    
    * TestTargetFramework.props: Disabled IDE0051 (Remove unused private member) warnings in tests
    
    * Fixes IDE0044: Add readonly modifier
    
    * Directory.Build.props: Disable IDE0032 warnings (use auto property)
    
    * Fixes CA1813: Avoid unsealed attributes
    
    * PERFORMANCE: Fixes CA1822: Mark members as static
    
    * Added aggressive inlining to codecs, asserts and several other areas
    
    * Removed InitializeInstanceFields() methods in all cases except in NumericTokenStream where we need to initialize from multiple constructors
    
    * Lucene.Net.Util: Added aggressive inlining on several methods
    
    * Fixes IDE0059: Remove unnecessary value assignment
    
    * Fixes IDE0034: Simplify 'default' expression
    
    * Fixes IDE0051: Remove unused private member. Also removed dead code/commented code.
    
    * Directory.Build.props: Disabled CA1031 warnings (do not catch general exception types)
    
    * lucene-cli: Disabled CA1034 (Nested types should not be visible) warnings in tests
    
    * Fixes CA1819: Properties should not return arrays
    
    * Fixes CA1012: Abstract types should not have constructors
    
    * PERFORMANCE: Fixes CA1825: Avoid zero-length array allocations
    
    * Fixes CA1052: Static holder types should be Static or NotInheritable
    
    * Fixes IDE0028: Use collection initializers
    
    * Fixes IDE1006: Naming Styles
    
    * Fixes IDE0063: Use simple 'using' statement
    
    * Fixes IDE0016: Use throw expression
    
    * TestTargetFramework.props: Disabled IDE0031 (Use null propagation) warnings in tests
    
    * Fixes IDE0031: Use null propagation
    
    * Fixes IDE0029: Use coalesce expression
    
    * Fixes IDE0030: Use coalesce expression (nullable)
    
    * Fixes CA1820: Test for empty strings using string length
    
    * Fixes CA1810: Initialize reference type static fields inline
    
    * TestTargetFramework.props: Disabled CA2219 (Do not raise exceptions in exception clauses) warnings in tests
    
    * Directory.Build.props: Disabled IDE0070 (Use 'System.HashCode.Combine') warnings because this functionality requires .NET Standard 2.1
    
    * Fixes IDE0025: Use expression body for properties
    
    * Fixes IDE0021: Use block body for constructors
    
    * TestTargetFramework.props: Disabled IDE0040 (Add accessibility modifiers) warnings in tests
    
    * Fixes IDE0040: Add accessibility modifiers
    
    * Fixes IDE0041: Use is null check
    
    * Fixes CA1815: Override equals and operator equals on value types
    
    * TestTargetFramework.props: Disabled IDE0049 (Use language keywords instead of framework type names for type references) warnings in tests
    
    * Fixes IDE0049: Use language keywords instead of framework type names for type references
    
    * Fixes IDE0039: Use local function
    
    * Fixes IDE0071: Simplify interpolation
    
    * Fixes IDE0027: Use expression body for accessors
    
    * fixes IDE1005: Use conditional delegate call
    
    * Added .editorconfig file to control formatting and code analysis rules
    
    * Directory.Build.props: Removed all Code Analysis settings that are now controlled by .editorconfig
---
 .editorconfig                                      |  175 +
 Directory.Build.props                              |    5 +-
 Lucene.Net.sln                                     |    1 +
 TestTargetFramework.props                          |   21 +
 .../Analysis/Ar/ArabicLetterTokenizerFactory.cs    |    2 +-
 .../Ar/ArabicNormalizationFilterFactory.cs         |    2 +-
 .../Analysis/Ar/ArabicStemFilterFactory.cs         |    2 +-
 .../Analysis/Bg/BulgarianStemFilterFactory.cs      |    2 +-
 .../Analysis/Br/BrazilianStemFilterFactory.cs      |    2 +-
 .../Analysis/CharFilter/HTMLStripCharFilter.cs     |   26 +-
 .../CharFilter/HTMLStripCharFilterFactory.cs       |    4 +-
 .../CharFilter/MappingCharFilterFactory.cs         |    4 +-
 .../Analysis/Cjk/CJKBigramFilterFactory.cs         |    2 +-
 .../Analysis/Cjk/CJKTokenizerFactory.cs            |    2 +-
 .../Analysis/Cjk/CJKWidthFilterFactory.cs          |    2 +-
 .../Ckb/SoraniNormalizationFilterFactory.cs        |    2 +-
 .../Analysis/Ckb/SoraniStemFilterFactory.cs        |    2 +-
 .../Analysis/Cn/ChineseFilterFactory.cs            |    2 +-
 .../Analysis/Cn/ChineseTokenizerFactory.cs         |    2 +-
 .../CommonGrams/CommonGramsFilterFactory.cs        |    2 +-
 .../DictionaryCompoundWordTokenFilterFactory.cs    |    2 +-
 .../Compound/Hyphenation/HyphenationTree.cs        |    6 +-
 .../Analysis/Compound/Hyphenation/PatternParser.cs |   18 +-
 .../HyphenationCompoundWordTokenFilterFactory.cs   |    2 +-
 .../Analysis/Core/KeywordTokenizerFactory.cs       |    2 +-
 .../Analysis/Core/LetterTokenizerFactory.cs        |    2 +-
 .../Analysis/Core/LowerCaseFilterFactory.cs        |    2 +-
 .../Analysis/Core/LowerCaseTokenizerFactory.cs     |    2 +-
 .../Analysis/Core/StopFilterFactory.cs             |    2 +-
 .../Analysis/Core/TypeTokenFilterFactory.cs        |    2 +-
 .../Analysis/Core/UpperCaseFilterFactory.cs        |    2 +-
 .../Analysis/Core/WhitespaceTokenizerFactory.cs    |    2 +-
 .../Analysis/Cz/CzechStemFilterFactory.cs          |    2 +-
 .../Analysis/De/GermanLightStemFilterFactory.cs    |    2 +-
 .../Analysis/De/GermanMinimalStemFilterFactory.cs  |    2 +-
 .../De/GermanNormalizationFilterFactory.cs         |    2 +-
 .../Analysis/De/GermanStemFilterFactory.cs         |    2 +-
 .../Analysis/El/GreekLowerCaseFilterFactory.cs     |    2 +-
 .../Analysis/El/GreekStemFilterFactory.cs          |    2 +-
 .../Analysis/En/EnglishMinimalStemFilterFactory.cs |    2 +-
 .../Analysis/En/EnglishPossessiveFilterFactory.cs  |    2 +-
 .../Analysis/En/KStemFilterFactory.cs              |    2 +-
 .../Analysis/En/PorterStemFilterFactory.cs         |    2 +-
 .../Analysis/Es/SpanishLightStemFilterFactory.cs   |    2 +-
 .../Analysis/Fa/PersianCharFilterFactory.cs        |    2 +-
 .../Fa/PersianNormalizationFilterFactory.cs        |    2 +-
 .../Analysis/Fi/FinnishLightStemFilterFactory.cs   |    2 +-
 .../Analysis/Fr/FrenchLightStemFilterFactory.cs    |    2 +-
 .../Analysis/Fr/FrenchMinimalStemFilterFactory.cs  |    2 +-
 .../Analysis/Ga/IrishLowerCaseFilterFactory.cs     |    2 +-
 .../Gl/GalicianMinimalStemFilterFactory.cs         |    2 +-
 .../Analysis/Gl/GalicianStemFilterFactory.cs       |    2 +-
 .../Analysis/Hi/HindiNormalizationFilterFactory.cs |    2 +-
 .../Analysis/Hi/HindiStemFilterFactory.cs          |    2 +-
 .../Analysis/Hu/HungarianLightStemFilterFactory.cs |    2 +-
 .../Analysis/Hunspell/Dictionary.cs                |    9 +-
 .../Analysis/Hunspell/HunspellStemFilterFactory.cs |    2 +-
 .../Analysis/Id/IndonesianStemFilterFactory.cs     |    2 +-
 .../Analysis/In/IndicNormalizationFilterFactory.cs |    2 +-
 .../Analysis/It/ItalianLightStemFilterFactory.cs   |    2 +-
 .../Analysis/Lv/LatvianStemFilterFactory.cs        |    2 +-
 .../Miscellaneous/ASCIIFoldingFilterFactory.cs     |    2 +-
 .../Miscellaneous/CapitalizationFilterFactory.cs   |    2 +-
 .../Miscellaneous/CodepointCountFilterFactory.cs   |    2 +-
 .../Miscellaneous/HyphenatedWordsFilterFactory.cs  |    2 +-
 .../Miscellaneous/KeepWordFilterFactory.cs         |    2 +-
 .../Miscellaneous/KeywordMarkerFilterFactory.cs    |    2 +-
 .../Miscellaneous/KeywordRepeatFilterFactory.cs    |    2 +-
 .../Analysis/Miscellaneous/LengthFilterFactory.cs  |    2 +-
 .../Miscellaneous/LimitTokenCountFilterFactory.cs  |    2 +-
 .../LimitTokenPositionFilterFactory.cs             |    2 +-
 .../Miscellaneous/Lucene47WordDelimiterFilter.cs   |    2 +
 .../Miscellaneous/PrefixAwareTokenFilter.cs        |    2 +-
 .../RemoveDuplicatesTokenFilterFactory.cs          |    2 +-
 .../ScandinavianFoldingFilterFactory.cs            |    2 +-
 .../ScandinavianNormalizationFilterFactory.cs      |    2 +-
 .../Miscellaneous/StemmerOverrideFilterFactory.cs  |    2 +-
 .../Analysis/Miscellaneous/TrimFilter.cs           |    4 +-
 .../Analysis/Miscellaneous/TrimFilterFactory.cs    |    2 +-
 .../Miscellaneous/WordDelimiterFilterFactory.cs    |    4 +-
 .../Miscellaneous/WordDelimiterIterator.cs         |   46 -
 .../Analysis/NGram/EdgeNGramFilterFactory.cs       |    2 +-
 .../Analysis/NGram/EdgeNGramTokenizerFactory.cs    |    2 +-
 .../Analysis/NGram/NGramFilterFactory.cs           |    2 +-
 .../Analysis/NGram/NGramTokenizerFactory.cs        |    2 +-
 .../Analysis/No/NorwegianLightStemFilterFactory.cs |    2 +-
 .../No/NorwegianMinimalStemFilterFactory.cs        |    2 +-
 .../Analysis/No/NorwegianMinimalStemmer.cs         |    4 +-
 .../Analysis/Path/PathHierarchyTokenizerFactory.cs |    2 +-
 .../Pattern/PatternReplaceCharFilterFactory.cs     |    2 +-
 .../Pattern/PatternReplaceFilterFactory.cs         |    2 +-
 .../Analysis/Pattern/PatternTokenizerFactory.cs    |    2 +-
 .../Payloads/DelimitedPayloadTokenFilterFactory.cs |    2 +-
 .../Payloads/NumericPayloadTokenFilterFactory.cs   |    2 +-
 .../Analysis/Payloads/PayloadHelper.cs             |    2 +-
 .../TokenOffsetPayloadTokenFilterFactory.cs        |    2 +-
 .../Payloads/TypeAsPayloadTokenFilterFactory.cs    |    2 +-
 .../Analysis/Position/PositionFilterFactory.cs     |    2 +-
 .../Pt/PortugueseLightStemFilterFactory.cs         |    2 +-
 .../Pt/PortugueseMinimalStemFilterFactory.cs       |    2 +-
 .../Analysis/Pt/PortugueseStemFilterFactory.cs     |    2 +-
 .../Analysis/Reverse/ReverseStringFilterFactory.cs |    2 +-
 .../Analysis/Ru/RussianLetterTokenizerFactory.cs   |    2 +-
 .../Analysis/Ru/RussianLightStemFilterFactory.cs   |    2 +-
 .../Analysis/Shingle/ShingleFilterFactory.cs       |    2 +-
 .../Analysis/Sinks/TeeSinkTokenFilter.cs           |    2 +-
 .../Snowball/SnowballPorterFilterFactory.cs        |    2 +-
 .../Analysis/Standard/ClassicFilterFactory.cs      |    2 +-
 .../Analysis/Standard/ClassicTokenizerFactory.cs   |    2 +-
 .../Analysis/Standard/ClassicTokenizerImpl.cs      |   51 +-
 .../Analysis/Standard/StandardFilterFactory.cs     |    2 +-
 .../Analysis/Standard/StandardTokenizerFactory.cs  |    2 +-
 .../Analysis/Standard/StandardTokenizerImpl.cs     |   51 +-
 .../Standard/Std31/StandardTokenizerImpl31.cs      |   32 +-
 .../Standard/Std31/UAX29URLEmailTokenizerImpl31.cs |   32 +-
 .../Standard/Std34/StandardTokenizerImpl34.cs      |   36 +-
 .../Standard/Std34/UAX29URLEmailTokenizerImpl34.cs |   35 +-
 .../Standard/Std36/UAX29URLEmailTokenizerImpl36.cs |   35 +-
 .../Standard/Std40/StandardTokenizerImpl40.cs      |   35 +-
 .../Standard/Std40/UAX29URLEmailTokenizerImpl40.cs |   35 +-
 .../Standard/UAX29URLEmailTokenizerFactory.cs      |    2 +-
 .../Standard/UAX29URLEmailTokenizerImpl.cs         |   45 +-
 .../Analysis/Sv/SwedishLightStemFilterFactory.cs   |    2 +-
 .../Analysis/Synonym/FSTSynonymFilterFactory.cs    |    6 +-
 .../Analysis/Synonym/SlowSynonymFilterFactory.cs   |   12 +-
 .../Analysis/Th/ThaiTokenizerFactory.cs            |    2 +-
 .../Analysis/Th/ThaiWordFilterFactory.cs           |    2 +-
 .../Analysis/Tr/TurkishLowerCaseFilterFactory.cs   |    2 +-
 .../Analysis/Util/AnalysisSPILoader.cs             |    3 +-
 .../Analysis/Util/BufferedCharFilter.cs            |   13 -
 .../Analysis/Util/CharArrayMap.cs                  |  124 +-
 .../Analysis/Util/CharArraySet.cs                  |   70 +-
 .../Analysis/Util/ElisionFilterFactory.cs          |    2 +-
 .../Analysis/Util/SegmentingTokenizerBase.cs       |    4 +-
 .../Analysis/Util/StemmerUtil.cs                   |    8 +-
 .../Analysis/Util/WordlistLoader.cs                |    8 +-
 .../Wikipedia/WikipediaTokenizerFactory.cs         |    2 +-
 .../Analysis/Wikipedia/WikipediaTokenizerImpl.cs   |   35 +-
 .../Collation/CollationKeyFilterFactory.cs         |    2 +-
 .../Tartarus/Snowball/Among.cs                     |    2 +-
 .../Analysis/Icu/ICUFoldingFilterFactory.cs        |    4 +-
 .../Analysis/Icu/ICUNormalizer2CharFilter.cs       |   12 +-
 .../Icu/ICUNormalizer2CharFilterFactory.cs         |    2 +-
 .../Analysis/Icu/ICUNormalizer2FilterFactory.cs    |    2 +-
 .../Analysis/Icu/ICUTransformFilterFactory.cs      |    4 +-
 .../Icu/Segmentation/DefaultICUTokenizerConfig.cs  |   20 +-
 .../Icu/Segmentation/ICUTokenizerFactory.cs        |    4 +-
 .../Icu/TokenAttributes/ScriptAttributeImpl.cs     |    4 +-
 .../Collation/ICUCollationKeyFilter.cs             |    4 +-
 .../Collation/ICUCollationKeyFilterFactory.cs      |    8 +-
 .../Dict/BinaryDictionary.cs                       |   22 +-
 .../Dict/CharacterDefinition.cs                    |   18 +-
 .../Dict/Dictionary.cs                             |    2 +-
 .../Dict/UserDictionary.cs                         |   13 +-
 .../GraphvizFormatter.cs                           |   11 +-
 .../JapaneseBaseFormFilterFactory.cs               |    2 +-
 .../JapaneseIterationMarkCharFilter.cs             |   24 +-
 .../JapaneseIterationMarkCharFilterFactory.cs      |    6 +-
 .../JapaneseKatakanaStemFilter.cs                  |    6 +-
 .../JapaneseKatakanaStemFilterFactory.cs           |    4 +-
 .../JapanesePartOfSpeechStopFilterFactory.cs       |    2 +-
 .../JapaneseReadingFormFilter.cs                   |    4 +-
 .../JapaneseReadingFormFilterFactory.cs            |    4 +-
 .../JapaneseTokenizer.cs                           |   19 +-
 .../JapaneseTokenizerFactory.cs                    |   10 +-
 .../TokenAttributes/BaseFormAttributeImpl.cs       |    2 +-
 .../TokenAttributes/InflectionAttributeImpl.cs     |    4 +-
 .../TokenAttributes/PartOfSpeechAttributeImpl.cs   |    2 +-
 .../TokenAttributes/ReadingAttributeImpl.cs        |    4 +-
 .../Tools/BinaryDictionaryWriter.cs                |  108 +-
 .../Tools/CharacterDefinitionWriter.cs             |   24 +-
 .../Tools/ConnectionCostsBuilder.cs                |   44 +-
 .../Tools/ConnectionCostsWriter.cs                 |   28 +-
 .../Tools/DictionaryBuilder.cs                     |   14 +-
 .../Tools/TokenInfoDictionaryBuilder.cs            |   60 +-
 .../Tools/UnknownDictionaryBuilder.cs              |   72 +-
 .../Util/ToStringUtil.cs                           |    9 +-
 .../Morfologik/MorfologikFilterFactory.cs          |   12 +-
 .../MorphosyntacticTagsAttribute.cs                |    5 +-
 .../Uk/UkrainianMorfologikAnalyzer.cs              |   10 +-
 .../OpenNLPChunkerFilterFactory.cs                 |    2 +-
 .../OpenNLPLemmatizerFilter.cs                     |   26 +-
 .../OpenNLPLemmatizerFilterFactory.cs              |    2 +-
 .../OpenNLPPOSFilter.cs                            |    2 +-
 .../OpenNLPPOSFilterFactory.cs                     |    2 +-
 .../OpenNLPSentenceBreakIterator.cs                |    5 +-
 .../OpenNLPTokenizer.cs                            |    4 +-
 .../OpenNLPTokenizerFactory.cs                     |    2 +-
 .../Tools/NLPLemmatizerOp.cs                       |    2 +-
 .../Tools/OpenNLPOpsFactory.cs                     |   26 +-
 .../BeiderMorseFilterFactory.cs                    |    4 +-
 .../DoubleMetaphoneFilter.cs                       |    2 +-
 .../DoubleMetaphoneFilterFactory.cs                |    4 +-
 .../Language/AbstractCaverphone .cs                |   23 +-
 .../Language/Bm/Lang.cs                            |    5 +-
 .../Language/Bm/Languages.cs                       |    3 +-
 .../Language/Bm/PhoneticEngine.cs                  |   30 +-
 .../Language/Bm/ResourceConstants.cs               |    6 +-
 .../Language/Bm/Rule.cs                            |   17 +-
 .../Language/DaitchMokotoffSoundex.cs              |   50 +-
 .../Language/DoubleMetaphone.cs                    |   44 +-
 .../Language/MatchRatingApproachEncoder.cs         |   37 +-
 .../Language/Metaphone.cs                          |   30 +-
 .../Language/Nysiis.cs                             |   27 +-
 .../Language/RefinedSoundex.cs                     |   23 +-
 .../Language/Soundex.cs                            |   22 +-
 src/Lucene.Net.Analysis.Phonetic/PhoneticFilter.cs |    4 +-
 .../PhoneticFilterFactory.cs                       |   12 +-
 src/Lucene.Net.Analysis.SmartCn/AnalyzerProfile.cs |   63 +-
 .../HMMChineseTokenizer.cs                         |   25 +
 .../HMMChineseTokenizerFactory.cs                  |    2 +-
 .../Hhmm/AbstractDictionary.cs                     |    6 +-
 src/Lucene.Net.Analysis.SmartCn/Hhmm/BiSegGraph.cs |   17 +-
 .../Hhmm/BigramDictionary.cs                       |  202 +-
 .../Hhmm/HHMMSegmenter.cs                          |    4 +-
 src/Lucene.Net.Analysis.SmartCn/Hhmm/SegGraph.cs   |   14 +-
 src/Lucene.Net.Analysis.SmartCn/Hhmm/SegToken.cs   |    2 +
 .../Hhmm/WordDictionary.cs                         |  222 +-
 .../SentenceTokenizer.cs                           |    2 +-
 .../SmartChineseAnalyzer.cs                        |    2 +-
 .../SmartChineseSentenceTokenizerFactory.cs        |    2 +-
 .../SmartChineseWordTokenFilterFactory.cs          |    2 +-
 src/Lucene.Net.Analysis.SmartCn/Utility.cs         |    2 +-
 src/Lucene.Net.Analysis.SmartCn/WordSegmenter.cs   |    4 +-
 src/Lucene.Net.Analysis.SmartCn/WordTokenFilter.cs |   27 +-
 .../Egothor.Stemmer/Compile.cs                     |   23 +-
 .../Egothor.Stemmer/Diff.cs                        |   18 +-
 .../Egothor.Stemmer/DiffIt.cs                      |   46 +-
 .../Egothor.Stemmer/Gener.cs                       |    3 +-
 .../Egothor.Stemmer/Lift.cs                        |    4 +-
 .../Egothor.Stemmer/MultiTrie.cs                   |    2 +-
 .../Egothor.Stemmer/Trie.cs                        |    8 +-
 .../Stempel/StempelPolishStemFilterFactory.cs      |    2 +-
 .../Stempel/StempelStemmer.cs                      |    4 +-
 src/Lucene.Net.Benchmark/ByTask/Benchmark.cs       |    4 +-
 .../ByTask/Feeds/DemoHTMLParser.cs                 |    3 +-
 .../ByTask/Feeds/DirContentSource.cs               |   12 +-
 src/Lucene.Net.Benchmark/ByTask/Feeds/DocMaker.cs  |   12 +-
 .../ByTask/Feeds/EnwikiContentSource.cs            |    3 +-
 .../ByTask/Feeds/EnwikiQueryMaker.cs               |   12 +-
 .../ByTask/Feeds/LineDocSource.cs                  |    2 +-
 .../ByTask/Feeds/LongToEnglishQueryMaker.cs        |    2 +-
 .../ByTask/Feeds/ReutersContentSource.cs           |   49 +-
 .../ByTask/Feeds/ReutersQueryMaker.cs              |   12 +-
 .../ByTask/Feeds/SimpleQueryMaker.cs               |    8 +-
 .../ByTask/Feeds/SingleDocSource.cs                |    2 +-
 .../ByTask/Feeds/SortableSingleDocSource.cs        |   11 +-
 .../ByTask/Feeds/SpatialDocMaker.cs                |   11 +-
 .../ByTask/Feeds/SpatialFileQueryMaker.cs          |    4 +-
 .../ByTask/Feeds/TrecContentSource.cs              |   29 +-
 .../ByTask/Feeds/TrecDocParser.cs                  |    5 +-
 .../ByTask/Feeds/TrecFBISParser.cs                 |   12 +-
 .../ByTask/Feeds/TrecFR94Parser.cs                 |    8 +-
 .../ByTask/Feeds/TrecFTParser.cs                   |    8 +-
 .../ByTask/Feeds/TrecGov2Parser.cs                 |    6 +-
 .../ByTask/Feeds/TrecLATimesParser.cs              |   14 +-
 src/Lucene.Net.Benchmark/ByTask/PerfRunData.cs     |   32 +-
 .../ByTask/Programmatic/Sample.cs                  |   36 +-
 src/Lucene.Net.Benchmark/ByTask/Stats/Points.cs    |    4 +-
 src/Lucene.Net.Benchmark/ByTask/Stats/Report.cs    |    8 +-
 src/Lucene.Net.Benchmark/ByTask/Stats/TaskStats.cs |   10 +-
 .../ByTask/Tasks/AddIndexesTask.cs                 |   27 +-
 .../ByTask/Tasks/AnalyzerFactoryTask.cs            |   12 +-
 .../ByTask/Tasks/CloseIndexTask.cs                 |    2 +-
 .../ByTask/Tasks/CommitIndexTask.cs                |    8 +-
 .../ByTask/Tasks/ConsumeContentSourceTask.cs       |   23 +
 .../ByTask/Tasks/CreateIndexTask.cs                |    3 +-
 .../ByTask/Tasks/ForceMergeTask.cs                 |    2 +-
 .../ByTask/Tasks/NewAnalyzerTask.cs                |    3 +-
 .../ByTask/Tasks/NewLocaleTask.cs                  |    4 +-
 .../ByTask/Tasks/OpenReaderTask.cs                 |    2 +-
 src/Lucene.Net.Benchmark/ByTask/Tasks/PerfTask.cs  |    8 +-
 .../ByTask/Tasks/PrintReaderTask.cs                |    2 +-
 src/Lucene.Net.Benchmark/ByTask/Tasks/ReadTask.cs  |    6 +-
 .../ByTask/Tasks/ReadTokensTask.cs                 |   50 +-
 .../ByTask/Tasks/RepSumByNameRoundTask.cs          |    3 +-
 .../ByTask/Tasks/RepSumByNameTask.cs               |    3 +-
 .../ByTask/Tasks/RepSumByPrefRoundTask.cs          |    3 +-
 .../ByTask/Tasks/RepSumByPrefTask.cs               |    3 +-
 .../ByTask/Tasks/ReportTask.cs                     |    2 +-
 .../ByTask/Tasks/SearchWithCollectorTask.cs        |    2 +-
 .../ByTask/Tasks/TaskSequence.cs                   |   41 +-
 src/Lucene.Net.Benchmark/ByTask/Utils/Algorithm.cs |   22 +-
 src/Lucene.Net.Benchmark/ByTask/Utils/Config.cs    |   66 +-
 src/Lucene.Net.Benchmark/ByTask/Utils/FileUtils.cs |    2 +-
 src/Lucene.Net.Benchmark/ByTask/Utils/Format.cs    |    6 +-
 .../ByTask/Utils/StreamUtils.cs                    |    4 +-
 src/Lucene.Net.Benchmark/Constants.cs              |   10 +-
 src/Lucene.Net.Benchmark/Quality/QualityQuery.cs   |    7 +-
 src/Lucene.Net.Benchmark/Quality/QualityStats.cs   |   14 +-
 .../Quality/Trec/QueryDriver.cs                    |   58 +-
 .../Quality/Trec/Trec1MQReader.cs                  |    8 +-
 src/Lucene.Net.Benchmark/Quality/Trec/TrecJudge.cs |   10 +-
 .../Quality/Trec/TrecTopicsReader.cs               |    4 +-
 .../Quality/Utils/QualityQueriesFinder.cs          |   10 +-
 .../Quality/Utils/SubmissionReport.cs              |   10 +-
 .../Support/Sax/Ext/Attributes2Impl.cs             |    8 +-
 .../Support/Sax/Ext/Locator2Impl.cs                |    5 +-
 .../Support/Sax/Helpers/AttributesImpl.cs          |    4 +-
 .../Support/Sax/Helpers/NamespaceSupport.cs        |    2 +-
 .../Support/Sax/Helpers/XMLFilterImpl.cs           |    4 +-
 .../Support/Sax/SAXException.cs                    |    2 +-
 .../Support/TagSoup/HTMLScanner.cs                 |   40 +-
 .../Support/TagSoup/PYXScanner.cs                  |    2 +-
 .../Support/TagSoup/PYXWriter.cs                   |    2 +-
 src/Lucene.Net.Benchmark/Support/TagSoup/Parser.cs |   32 +-
 src/Lucene.Net.Benchmark/Support/TagSoup/Schema.cs |    2 +
 .../Support/TagSoup/XMLWriter.cs                   |    2 +-
 .../Support/Util/EnglishNumberFormatExtensions.cs  |    2 +-
 src/Lucene.Net.Benchmark/Utils/ExtractReuters.cs   |   90 +-
 src/Lucene.Net.Benchmark/Utils/ExtractWikipedia.cs |   20 +-
 .../KNearestNeighborClassifier.cs                  |    4 +-
 .../SimpleNaiveBayesClassifier.cs                  |    6 +-
 .../Appending/AppendingPostingsFormat.cs           |   24 +-
 .../BlockTerms/BlockTermsReader.cs                 |    5 +-
 .../BlockTerms/BlockTermsWriter.cs                 |   14 +-
 .../BlockTerms/FixedGapTermsIndexReader.cs         |   11 +-
 .../BlockTerms/FixedGapTermsIndexWriter.cs         |   14 +-
 .../BlockTerms/VariableGapTermsIndexReader.cs      |    4 +-
 .../BlockTerms/VariableGapTermsIndexWriter.cs      |   12 +-
 .../Bloom/BloomFilteringPostingsFormat.cs          |   19 +-
 src/Lucene.Net.Codecs/Bloom/FuzzySet.cs            |    2 +-
 .../DiskDV/DiskDocValuesFormat.cs                  |    7 +-
 src/Lucene.Net.Codecs/DiskDV/DiskNormsFormat.cs    |    8 +-
 .../IntBlock/FixedIntBlockIndexInput.cs            |    2 +-
 .../Memory/DirectDocValuesConsumer.cs              |    2 +
 .../Memory/DirectDocValuesFormat.cs                |    8 +-
 .../Memory/DirectDocValuesProducer.cs              |   35 +-
 .../Memory/DirectPostingsFormat.cs                 |  120 +-
 src/Lucene.Net.Codecs/Memory/FSTOrdTermsReader.cs  |   81 +-
 src/Lucene.Net.Codecs/Memory/FSTOrdTermsWriter.cs  |    8 +-
 src/Lucene.Net.Codecs/Memory/FSTTermsReader.cs     |   67 +-
 src/Lucene.Net.Codecs/Memory/FSTTermsWriter.cs     |    8 +-
 .../Memory/MemoryDocValuesConsumer.cs              |   11 +-
 .../Memory/MemoryDocValuesProducer.cs              |   41 +-
 .../Memory/MemoryPostingsFormat.cs                 |   49 +-
 .../Pulsing/PulsingPostingsFormat.cs               |    6 +-
 .../Pulsing/PulsingPostingsReader.cs               |   11 +-
 .../Pulsing/PulsingPostingsWriter.cs               |   66 +-
 src/Lucene.Net.Codecs/Sep/SepPostingsReader.cs     |   42 +-
 src/Lucene.Net.Codecs/Sep/SepPostingsWriter.cs     |   40 +-
 src/Lucene.Net.Codecs/Sep/SepSkipListReader.cs     |   10 +-
 src/Lucene.Net.Codecs/Sep/SepSkipListWriter.cs     |   16 +-
 .../SimpleText/SimpleTextDocValuesReader.cs        |    6 -
 .../SimpleText/SimpleTextDocValuesWriter.cs        |    2 +
 .../SimpleText/SimpleTextFieldsReader.cs           |   29 +-
 .../SimpleText/SimpleTextFieldsWriter.cs           |    2 -
 .../SimpleText/SimpleTextStoredFieldsReader.cs     |    2 +
 .../SimpleText/SimpleTextStoredFieldsWriter.cs     |    2 +
 .../SimpleText/SimpleTextTermVectorsReader.cs      |   13 +-
 .../SimpleText/SimpleTextTermVectorsWriter.cs      |    2 +
 src/Lucene.Net.Codecs/SimpleText/SimpleTextUtil.cs |    4 +-
 .../Facet/AssociationsFacetsExample.cs             |  113 +-
 src/Lucene.Net.Demo/Facet/DistanceFacetsExample.cs |  102 +-
 .../Facet/ExpressionAggregationFacetsExample.cs    |   86 +-
 .../Facet/MultiCategoryListsFacetsExample.cs       |   92 +-
 src/Lucene.Net.Demo/Facet/RangeFacetsExample.cs    |   66 +-
 src/Lucene.Net.Demo/Facet/SimpleFacetsExample.cs   |  181 +-
 .../Facet/SimpleSortedSetFacetsExample.cs          |  121 +-
 src/Lucene.Net.Demo/IndexFiles.cs                  |   82 +-
 src/Lucene.Net.Demo/Lucene.Net.Demo.csproj         |    2 +
 src/Lucene.Net.Demo/SearchFiles.cs                 |   96 +-
 src/Lucene.Net.Expressions/Bindings.cs             |    3 +-
 src/Lucene.Net.Expressions/Expression.cs           |    1 +
 src/Lucene.Net.Expressions/ExpressionComparator.cs |    2 +-
 .../ExpressionFunctionValues.cs                    |   12 +-
 .../ExpressionValueSource.cs                       |   25 +-
 .../JS/JavascriptCompiler.cs                       |   14 +-
 src/Lucene.Net.Expressions/JS/JavascriptLexer.cs   |   26 +-
 src/Lucene.Net.Expressions/JS/JavascriptParser.cs  |   26 +-
 src/Lucene.Net.Expressions/SimpleBindings.cs       |   12 +-
 src/Lucene.Net.Facet/DrillDownQuery.cs             |   12 +-
 src/Lucene.Net.Facet/DrillSidewaysQuery.cs         |    4 +-
 src/Lucene.Net.Facet/Facets.cs                     |    2 +-
 src/Lucene.Net.Facet/FacetsCollector.cs            |    2 +-
 src/Lucene.Net.Facet/FacetsConfig.cs               |   11 +-
 src/Lucene.Net.Facet/Range/LongRangeCounter.cs     |   18 +-
 .../SortedSet/SortedSetDocValuesFacetCounts.cs     |    4 +-
 src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs      |    9 +-
 .../Taxonomy/Directory/DirectoryTaxonomyWriter.cs  |   14 +-
 src/Lucene.Net.Facet/Taxonomy/OrdinalsReader.cs    |    4 +-
 .../Taxonomy/ParallelTaxonomyArrays.cs             |    2 +-
 .../Taxonomy/PrintTaxonomyStats.cs                 |   10 +-
 .../Taxonomy/TaxonomyFacetSumValueSource.cs        |    2 +-
 .../Taxonomy/WriterCache/CharBlockArray.cs         |    4 +-
 .../Taxonomy/WriterCache/CompactLabelToOrdinal.cs  |   22 +-
 .../Taxonomy/WriterCache/LabelToOrdinal.cs         |    2 +-
 .../Taxonomy/WriterCache/NameIntCacheLRU.cs        |   12 +-
 .../AbstractDistinctValuesCollector.cs             |    7 +-
 .../AbstractFirstPassGroupingCollector.cs          |    7 +-
 .../AbstractSecondPassGroupingCollector.cs         |   13 +-
 src/Lucene.Net.Grouping/BlockGroupingCollector.cs  |    4 +-
 .../Function/FunctionAllGroupHeadsCollector.cs     |    3 +-
 .../Function/FunctionDistinctValuesCollector.cs    |    3 +-
 .../FunctionSecondPassGroupingCollector.cs         |    3 +-
 src/Lucene.Net.Grouping/GroupDocs.cs               |    1 +
 src/Lucene.Net.Grouping/GroupingSearch.cs          |    6 +-
 src/Lucene.Net.Grouping/SearchGroup.cs             |    7 +-
 .../Term/TermAllGroupHeadsCollector.cs             |   16 +-
 .../Term/TermAllGroupsCollector.cs                 |    4 +-
 .../Term/TermFirstPassGroupingCollector.cs         |    2 +-
 src/Lucene.Net.Grouping/TopGroups.cs               |   16 +-
 .../Highlight/GradientFormatter.cs                 |    5 +-
 .../Highlight/Highlighter.cs                       |    8 +-
 .../Highlight/QueryScorer.cs                       |    6 +-
 .../Highlight/QueryTermExtractor.cs                |   12 +-
 .../Highlight/SimpleFragmenter.cs                  |    2 +-
 .../Highlight/SimpleHTMLFormatter.cs               |    4 +-
 .../Highlight/SimpleSpanFragmenter.cs              |    6 +-
 .../Highlight/SpanGradientFormatter.cs             |    2 +-
 src/Lucene.Net.Highlighter/Highlight/TokenGroup.cs |    6 +-
 .../Highlight/TokenSources.cs                      |    2 +-
 .../Highlight/TokenStreamFromTermPositionVector.cs |    8 +-
 .../Highlight/WeightedSpanTermExtractor.cs         |   76 +-
 .../PostingsHighlight/MultiTermHighlighting.cs     |    8 +-
 .../PostingsHighlight/PostingsHighlighter.cs       |    5 +-
 .../VectorHighlight/BaseFragListBuilder.cs         |    6 +-
 .../VectorHighlight/FieldFragList.cs               |   14 +-
 .../VectorHighlight/FieldPhraseList.cs             |  162 +-
 .../VectorHighlight/FieldQuery.cs                  |   78 +-
 .../VectorHighlight/SingleFragListBuilder.cs       |   22 +-
 src/Lucene.Net.Join/TermsIncludingScoreQuery.cs    |   32 +-
 src/Lucene.Net.Join/ToChildBlockJoinQuery.cs       |    4 +-
 src/Lucene.Net.Join/ToParentBlockJoinCollector.cs  |   16 +-
 .../ToParentBlockJoinFieldComparator.cs            |    8 +-
 src/Lucene.Net.Join/ToParentBlockJoinQuery.cs      |    4 +-
 .../MemoryIndex.MemoryIndexReader.cs               |   17 +-
 src/Lucene.Net.Memory/MemoryIndex.cs               |   50 +-
 src/Lucene.Net.Misc/Document/LazyDocument.cs       |   10 +-
 src/Lucene.Net.Misc/Index/CompoundFileExtractor.cs |   30 +-
 src/Lucene.Net.Misc/Index/IndexSplitter.cs         |    8 +-
 .../Index/MultiPassIndexSplitter.cs                |   34 +-
 .../Index/Sorter/BlockJoinComparatorSource.cs      |   12 +-
 src/Lucene.Net.Misc/Index/Sorter/Sorter.cs         |   17 +-
 .../Index/Sorter/SortingAtomicReader.cs            |   16 +-
 .../Index/Sorter/SortingMergePolicy.cs             |   12 +-
 src/Lucene.Net.Misc/Misc/GetTermInfo.cs            |    8 +-
 src/Lucene.Net.Misc/Misc/HighFreqTerms.cs          |   15 +-
 src/Lucene.Net.Misc/Misc/IndexMergeTool.cs         |   40 +-
 src/Lucene.Net.Misc/Util/Fst/ListOfOutputs.cs      |   10 +-
 .../Util/Fst/UpToTwoPositiveIntOutputs.cs          |   28 +-
 src/Lucene.Net.Queries/BooleanFilter.cs            |    2 +-
 src/Lucene.Net.Queries/BoostingQuery.cs            |    9 +-
 src/Lucene.Net.Queries/ChainedFilter.cs            |   12 +-
 src/Lucene.Net.Queries/CustomScoreQuery.cs         |    7 +-
 src/Lucene.Net.Queries/Function/BoostedQuery.cs    |    4 +-
 .../Function/DocValues/BoolDocValues.cs            |    2 +-
 .../Function/DocValues/DocTermsIndexDocValues.cs   |    2 +-
 .../Function/DocValues/DoubleDocValues.cs          |    2 +-
 .../Function/DocValues/FloatDocValues.cs           |    2 +-
 .../Function/DocValues/IntDocValues.cs             |    2 +-
 .../Function/DocValues/LongDocValues.cs            |    2 +-
 .../Function/DocValues/StrDocValues.cs             |    2 +-
 src/Lucene.Net.Queries/Function/FunctionQuery.cs   |    7 +-
 .../Function/ValueSources/ByteFieldSource.cs       |    5 +-
 .../Function/ValueSources/DefFunction.cs           |    3 -
 .../Function/ValueSources/DualFloatFunction.cs     |    2 +-
 .../Function/ValueSources/EnumFieldSource.cs       |    4 +-
 .../Function/ValueSources/FieldCacheSource.cs      |    9 +-
 .../Function/ValueSources/IDFValueSource.cs        |    8 +-
 .../Function/ValueSources/IntFieldSource.cs        |    4 +-
 .../Function/ValueSources/MultiBoolFunction.cs     |    2 +-
 .../Function/ValueSources/MultiFloatFunction.cs    |    2 +-
 .../Function/ValueSources/NormValueSource.cs       |    7 +-
 .../Function/ValueSources/OrdFieldSource.cs        |   15 +-
 .../Function/ValueSources/ReverseOrdFieldSource.cs |    7 +-
 .../Function/ValueSources/SimpleBoolFunction.cs    |    2 +-
 .../Function/ValueSources/SimpleFloatFunction.cs   |    2 +-
 .../Function/ValueSources/SingleFunction.cs        |    2 +-
 .../ValueSources/SumTotalTermFreqValueSource.cs    |    9 +-
 .../Function/ValueSources/TFValueSource.cs         |    9 +-
 .../Function/ValueSources/TermFreqValueSource.cs   |   11 +-
 .../ValueSources/TotalTermFreqValueSource.cs       |    7 +-
 src/Lucene.Net.Queries/Mlt/MoreLikeThis.cs         |    3 +-
 src/Lucene.Net.Queries/TermsFilter.cs              |    6 +-
 .../Classic/MultiFieldQueryParser.cs               |   14 +-
 src/Lucene.Net.QueryParser/Classic/QueryParser.cs  |   16 +-
 .../Classic/QueryParserBase.cs                     |   31 +-
 .../Classic/QueryParserConstants.cs                |   74 +-
 .../Classic/QueryParserTokenManager.cs             |   16 +-
 .../Classic/TokenMgrError.cs                       |    8 +-
 .../ComplexPhrase/ComplexPhraseQueryParser.cs      |   15 +-
 .../Flexible/Core/Builders/QueryTreeBuilder.cs     |    9 +-
 .../Flexible/Core/Config/AbstractQueryConfig.cs    |    5 +-
 .../Flexible/Core/Config/FieldConfig.cs            |    9 +-
 .../Flexible/Core/Nodes/AnyQueryNode.cs            |   14 +-
 .../Flexible/Core/Nodes/GroupQueryNode.cs          |    6 +-
 .../Flexible/Core/Nodes/ModifierQueryNode.cs       |    6 +-
 .../Flexible/Core/Nodes/PhraseSlopQueryNode.cs     |    8 +-
 .../Flexible/Core/Nodes/ProximityQueryNode.cs      |   12 +-
 .../Flexible/Core/Nodes/SlopQueryNode.cs           |    8 +-
 .../Core/Nodes/TokenizedPhraseQueryNode.cs         |    4 +-
 .../Core/Processors/QueryNodeProcessorImpl.cs      |    6 +-
 .../Processors/RemoveDeletedQueryNodesProcessor.cs |    2 +-
 .../Flexible/Core/QueryNodeError.cs                |    2 +-
 .../Flexible/Core/QueryParserHelper.cs             |   14 +-
 .../Flexible/Core/Util/QueryNodeOperation.cs       |   11 +-
 .../Flexible/Core/Util/UnescapedCharSequence.cs    |    8 +-
 .../Flexible/Messages/MessageImpl.cs               |    7 +-
 .../Flexible/Messages/NLS.cs                       |   22 +-
 .../BooleanModifiersQueryNodeProcessor.cs          |    6 +-
 .../Standard/Builders/BooleanQueryNodeBuilder.cs   |    3 +-
 .../Builders/MultiPhraseQueryNodeBuilder.cs        |    3 +-
 .../Standard/Builders/SlopQueryNodeBuilder.cs      |    4 +-
 .../Builders/StandardBooleanQueryNodeBuilder.cs    |    3 +-
 .../Standard/Config/FieldBoostMapFCListener.cs     |    5 +-
 .../Config/FieldDateResolutionFCListener.cs        |    2 +-
 .../Standard/Config/NumericFieldConfigListener.cs  |   10 +-
 .../Standard/Nodes/AbstractRangeQueryNode.cs       |    8 +-
 .../Standard/Nodes/MultiPhraseQueryNode.cs         |    4 +-
 .../Standard/Nodes/StandardBooleanQueryNode.cs     |    2 +-
 .../Standard/Parser/EscapeQuerySyntaxImpl.cs       |    4 +-
 .../Flexible/Standard/Parser/FastCharStream.cs     |   17 +-
 .../Flexible/Standard/Parser/ParseException.cs     |    2 +-
 .../Standard/Parser/StandardSyntaxParser.cs        |   31 +-
 .../Parser/StandardSyntaxParserTokenManager.cs     |   15 +-
 .../Flexible/Standard/Parser/TokenMgrError.cs      |    8 +-
 .../Processors/AllowLeadingWildcardProcessor.cs    |    4 +-
 .../Processors/AnalyzerQueryNodeProcessor.cs       |   20 +-
 .../BooleanQuery2ModifierNodeProcessor.cs          |   15 +-
 ...eanSingleChildOptimizationQueryNodeProcessor.cs |    4 +-
 .../Standard/Processors/BoostQueryNodeProcessor.cs |    3 +-
 .../Standard/Processors/FuzzyQueryNodeProcessor.cs |    5 +-
 .../Standard/Processors/GroupQueryNodeProcessor.cs |   16 +-
 .../Processors/MatchAllDocsQueryNodeProcessor.cs   |    4 +-
 .../Processors/MultiFieldQueryNodeProcessor.cs     |   10 +-
 .../Processors/NumericQueryNodeProcessor.cs        |    5 +-
 .../Processors/NumericRangeQueryNodeProcessor.cs   |    3 +-
 .../Processors/OpenRangeQueryNodeProcessor.cs      |    9 +-
 .../Processors/PhraseSlopQueryNodeProcessor.cs     |    4 +-
 .../RemoveEmptyNonLeafQueryNodeProcessor.cs        |    2 +-
 .../Processors/TermRangeQueryNodeProcessor.cs      |   13 +-
 .../Processors/WildcardQueryNodeProcessor.cs       |    2 +-
 .../Flexible/Standard/QueryParserUtil.cs           |    6 +-
 .../Flexible/Standard/StandardQueryParser.cs       |   35 +-
 .../Lucene.Net.QueryParser.csproj                  |    2 +
 .../Simple/SimpleQueryParser.cs                    |   17 +-
 .../Surround/Parser/QueryParser.cs                 |   12 +-
 .../Surround/Parser/QueryParserTokenManager.cs     |    9 +-
 .../Surround/Parser/TokenMgrError.cs               |    8 +-
 .../Surround/Query/BasicQueryFactory.cs            |    2 +-
 .../Surround/Query/ComposedQuery.cs                |    2 +-
 .../Surround/Query/DistanceQuery.cs                |    4 +-
 .../Surround/Query/FieldsQuery.cs                  |   10 +-
 .../Surround/Query/OrQuery.cs                      |    4 +-
 .../Surround/Query/SimpleTerm.cs                   |    4 +-
 .../Surround/Query/SpanNearClauseFactory.cs        |    8 +-
 .../Surround/Query/SrndQuery.cs                    |   11 +-
 .../Xml/Builders/BoostingQueryBuilder.cs           |    2 +-
 .../Xml/Builders/CachedFilterBuilder.cs            |    3 +-
 .../Xml/Builders/DisjunctionMaxQueryBuilder.cs     |    3 +-
 .../Xml/Builders/FuzzyLikeThisQueryBuilder.cs      |    8 +-
 .../Xml/Builders/LikeThisQueryBuilder.cs           |    6 +-
 .../Xml/Builders/SpanQueryBuilderFactory.cs        |    3 +-
 .../Xml/Builders/UserInputQueryBuilder.cs          |    6 +-
 src/Lucene.Net.QueryParser/Xml/DOMUtils.cs         |    5 +-
 .../Xml/FilterBuilderFactory.cs                    |    8 +-
 .../Xml/QueryBuilderFactory.cs                     |    8 +-
 .../Xml/QueryTemplateManager.cs                    |   41 +-
 src/Lucene.Net.Replicator/Http/HttpReplicator.cs   |    6 +-
 .../Http/ReplicationService.cs                     |    6 +-
 .../IndexAndTaxonomyRevision.cs                    |   11 +-
 .../IndexReplicationHandler.cs                     |    6 +-
 src/Lucene.Net.Replicator/IndexRevision.cs         |    2 +-
 src/Lucene.Net.Replicator/LocalReplicator.cs       |    8 +-
 .../PerSessionDirectoryFactory.cs                  |    2 +-
 src/Lucene.Net.Replicator/ReplicationClient.cs     |    1 +
 src/Lucene.Net.Replicator/RevisionFile.cs          |    6 +-
 .../Queries/FuzzyLikeThisQuery.cs                  |    3 +-
 src/Lucene.Net.Sandbox/Queries/SlowFuzzyQuery.cs   |   12 +-
 .../Queries/SortedSetSortField.cs                  |    2 -
 .../Prefix/AbstractPrefixTreeFilter.cs             |    8 +-
 .../Prefix/AbstractVisitingPrefixTreeFilter.cs     |   17 +-
 .../Prefix/PrefixTreeStrategy.cs                   |   26 +-
 src/Lucene.Net.Spatial/Prefix/Tree/Cell.cs         |   10 +-
 .../Prefix/Tree/GeohashPrefixTree.cs               |    2 +-
 .../Prefix/Tree/QuadPrefixTree.cs                  |   15 +-
 .../Prefix/Tree/SpatialPrefixTree.cs               |    6 +-
 .../Prefix/Tree/SpatialPrefixTreeFactory.cs        |    9 +-
 .../Prefix/WithinPrefixTreeFilter.cs               |    7 +-
 src/Lucene.Net.Spatial/Query/SpatialArgs.cs        |    2 +-
 src/Lucene.Net.Spatial/Query/SpatialArgsParser.cs  |    5 +-
 src/Lucene.Net.Spatial/Query/SpatialOperation.cs   |    5 +-
 .../Serialized/SerializedDVStrategy.cs             |   14 +-
 src/Lucene.Net.Spatial/SpatialStrategy.cs          |    6 +-
 .../Util/CachingDoubleValueSource.cs               |    6 +-
 .../Util/ShapeFieldCacheDistanceValueSource.cs     |    5 +-
 .../Util/ShapeFieldCacheProvider.cs                |    2 +-
 src/Lucene.Net.Spatial/Util/ValueSourceFilter.cs   |    7 +-
 .../Vector/DistanceValueSource.cs                  |    9 +-
 .../Vector/PointVectorStrategy.cs                  |   30 +-
 src/Lucene.Net.Suggest/Spell/DirectSpellChecker.cs |   10 +-
 .../Spell/LuceneLevenshteinDistance.cs             |    2 +-
 src/Lucene.Net.Suggest/Spell/NGramDistance.cs      |    3 +-
 src/Lucene.Net.Suggest/Spell/SpellChecker.cs       |   52 +-
 .../Spell/WordBreakSpellChecker.cs                 |   50 +-
 .../Suggest/Analyzing/AnalyzingSuggester.cs        |    4 +-
 .../Suggest/Analyzing/BlendedInfixSuggester.cs     |   10 +-
 .../Suggest/Analyzing/FreeTextSuggester.cs         |    4 +-
 src/Lucene.Net.Suggest/Suggest/FileDictionary.cs   |    2 +-
 .../Suggest/Fst/FSTCompletion.cs                   |    3 +-
 .../Suggest/Fst/FSTCompletionBuilder.cs            |    9 +-
 .../Suggest/Jaspell/JaspellTernarySearchTrie.cs    |  100 +-
 src/Lucene.Net.Suggest/Suggest/Lookup.cs           |    2 +-
 .../Support/TestFramework/Assert.cs                |   74 +-
 .../Analysis/BaseTokenStreamTestCase.cs            |   19 +-
 .../Analysis/CollationTestBase.cs                  |  244 +-
 .../Analysis/LookaheadTokenFilter.cs               |    2 +-
 .../Analysis/MockAnalyzer.cs                       |    5 +-
 .../Analysis/MockGraphTokenFilter.cs               |    4 +-
 .../Analysis/MockTokenizer.cs                      |    2 +-
 .../Analysis/ValidatingTokenFilter.cs              |   10 +-
 .../Analysis/VocabularyAssert.cs                   |   50 +-
 .../Codecs/Asserting/AssertingDocValuesFormat.cs   |   54 +-
 .../Codecs/Asserting/AssertingPostingsFormat.cs    |    2 +-
 .../Bloom/TestBloomFilteredLucene41Postings.cs     |    2 +-
 .../Codecs/Compressing/CompressingCodec.cs         |    4 +-
 .../Codecs/Lucene3x/PreFlexRWFieldInfosReader.cs   |    8 +-
 .../Codecs/Lucene3x/PreFlexRWFieldsWriter.cs       |   11 +-
 .../Codecs/Lucene3x/PreFlexRWNormsConsumer.cs      |   10 +-
 .../Codecs/Lucene3x/PreFlexRWPostingsFormat.cs     |    7 +-
 .../Codecs/Lucene3x/PreFlexRWSkipListWriter.cs     |   12 +-
 .../Codecs/Lucene3x/PreFlexRWStoredFieldsWriter.cs |    7 +-
 .../Codecs/Lucene3x/PreFlexRWTermVectorsFormat.cs  |    7 +-
 .../Codecs/Lucene3x/PreFlexRWTermVectorsWriter.cs  |   17 +-
 .../Codecs/Lucene3x/TermInfosWriter.cs             |   10 +-
 .../Codecs/Lucene40/Lucene40DocValuesWriter.cs     |    2 +-
 .../Codecs/Lucene42/Lucene42DocValuesConsumer.cs   |   13 +-
 .../Codecs/MissingOrdRemapper.cs                   |   17 +-
 .../MockFixedIntBlockPostingsFormat.cs             |   19 +-
 .../Codecs/MockRandom/MockRandomPostingsFormat.cs  |   17 +-
 .../Codecs/RAMOnly/RAMOnlyPostingsFormat.cs        |   10 +-
 .../Index/AssertingAtomicReader.cs                 |    8 +-
 .../BaseCompressingDocValuesFormatTestCase.cs      |  139 +-
 .../Index/BaseDocValuesFormatTestCase.cs           | 4583 +++++++++-----------
 .../Index/BaseIndexFileFormatTestCase.cs           |   64 +-
 .../Index/BaseMergePolicyTestCase.cs               |   51 +-
 .../Index/BasePostingsFormatTestCase.cs            |  112 +-
 .../Index/BaseStoredFieldsFormatTestCase.cs        | 1072 +++--
 .../Index/BaseTermVectorsFormatTestCase.cs         |  270 +-
 src/Lucene.Net.TestFramework/Index/DocHelper.cs    |   21 +-
 .../Index/FieldFilterAtomicReader.cs               |    4 +-
 .../Index/MockRandomMergePolicy.cs                 |    2 +-
 src/Lucene.Net.TestFramework/Index/RandomCodec.cs  |    6 +-
 .../Index/RandomIndexWriter.cs                     |   20 +-
 .../Index/ThreadedIndexingAndSearchingTestCase.cs  |   25 +-
 .../Lucene.Net.TestFramework.csproj                |    3 +
 .../Search/AssertingBulkScorer.cs                  |    4 +-
 .../Search/AssertingIndexSearcher.cs               |    7 +-
 .../Search/AssertingQuery.cs                       |    7 +-
 src/Lucene.Net.TestFramework/Search/CheckHits.cs   |    6 +-
 .../Search/ShardSearchingTestBase.cs               |   12 +-
 .../Store/BaseDirectoryTestCase.cs                 |  524 +--
 .../Store/MockDirectoryWrapper.cs                  |  185 +-
 .../Store/MockIndexInputWrapper.cs                 |    4 +-
 .../Store/MockIndexOutputWrapper.cs                |    2 +-
 .../Support/ApiScanTestBase.cs                     |    8 +-
 .../Support/Attributes/DeadlockAttribute.cs        |    2 +-
 .../Attributes/LuceneNetSpecificAttribute.cs       |    2 +-
 .../Support/Attributes/NoOpAttribute.cs            |    2 +-
 .../Support/ExceptionSerializationTestBase.cs      |   14 +-
 .../Support/JavaCompatibility/LuceneTestCase.cs    |   54 +-
 .../Support/JavaCompatibility/RandomHelpers.cs     |    2 +
 .../JavaCompatibility/SystemTypesHelpers.cs        |    2 +
 .../Support/Util/LuceneTestFrameworkInitializer.cs |    4 +-
 .../Util/Automaton/AutomatonTestUtil.cs            |    4 +-
 src/Lucene.Net.TestFramework/Util/Fst/FSTTester.cs |    8 +-
 src/Lucene.Net.TestFramework/Util/LineFileDocs.cs  |   26 +-
 .../Util/LuceneTestCase.cs                         |   90 +-
 .../Util/TestRuleSetupAndRestoreClassEnv.cs        |   23 +-
 src/Lucene.Net.TestFramework/Util/TestUtil.cs      |   64 +-
 .../Util/ThrottledIndexOutput.cs                   |   12 +-
 .../Compound/TestCompoundWordTokenFilter.cs        |   83 +-
 .../Analysis/Core/TestAllAnalyzersHaveFactories.cs |   12 +-
 .../Analysis/Core/TestAnalyzers.cs                 |    7 +-
 .../Analysis/Core/TestClassicAnalyzer.cs           |   96 +-
 .../Analysis/Core/TestFactories.cs                 |   14 +-
 .../Analysis/Core/TestRandomChains.cs              |   46 +-
 .../Analysis/Hunspell/TestAllDictionaries.cs       |   69 +-
 .../Analysis/Hunspell/TestAllDictionaries2.cs      |   69 +-
 .../Analysis/Hunspell/TestDictionary.cs            |  166 +-
 .../Util/BaseTokenStreamFactoryTestCase.cs         |    8 +-
 .../Analysis/Util/TestFilesystemResourceLoader.cs  |   10 +-
 .../Collation/TestCollationKeyAnalyzer.cs          |    7 +-
 .../Collation/TestCollationKeyFilter.cs            |    7 +-
 .../Collation/TestCollationKeyFilterFactory.cs     |    2 +-
 .../Analysis/Icu/Segmentation/TestICUTokenizer.cs  |   20 +-
 .../Analysis/Icu/TestICUFoldingFilter.cs           |    8 +-
 .../Analysis/Icu/TestICUNormalizer2CharFilter.cs   |   55 +-
 .../Analysis/Icu/TestICUNormalizer2Filter.cs       |   18 +-
 .../Analysis/Icu/TestICUTransformFilter.cs         |   16 +-
 .../Collation/TestICUCollationKeyFilterFactory.cs  |    2 +-
 .../TestSearchMode.cs                              |    2 +-
 .../Morfologik/TestMorfologikAnalyzer.cs           |   64 +-
 .../TestOpenNLPLemmatizerFilterFactory.cs          |    1 -
 .../TestOpenNLPSentenceBreakIterator.cs            |    4 +-
 .../Language/Bm/BeiderMorseEncoderTest.cs          |    2 +-
 .../Language/MatchRatingApproachEncoderTest.cs     |   24 +-
 .../Egothor.Stemmer/TestCompile.cs                 |   61 +-
 .../Pl/TestPolishAnalyzer.cs                       |   14 +-
 .../BenchmarkTestCase.cs                           |    2 +-
 .../ByTask/Feeds/TestHtmlParser.cs                 |   16 +-
 .../ByTask/Feeds/TrecContentSourceTest.cs          |    2 +-
 .../ByTask/Tasks/WriteLineDocTaskTest.cs           |    2 +-
 .../Facet/TestDistanceFacetsExample.cs             |   20 +-
 .../Facet/TestRangeFacetsExample.cs                |   20 +-
 src/Lucene.Net.Tests.Demo/TestDemo.cs              |    8 +-
 .../JS/TestCustomFunctions.cs                      |    2 +-
 .../SortedSet/TestSortedSetDocValuesFacets.cs      |    4 +-
 .../Taxonomy/Directory/TestAddTaxonomy.cs          |   36 +-
 .../Taxonomy/TestTaxonomyFacetCounts2.cs           |    2 +
 .../WriterCache/TestCompactLabelToOrdinal.cs       |    3 +-
 .../DistinctValuesCollectorTest.cs                 |   14 +-
 .../GroupFacetCollectorTest.cs                     |    5 +-
 .../Highlight/HighlighterTest.cs                   |   14 +-
 src/Lucene.Net.Tests.Join/TestJoinUtil.cs          |   54 +-
 .../Index/Sorter/SorterTestBase.cs                 |   28 +-
 .../Index/TestPKIndexSplitter.cs                   |   88 +-
 src/Lucene.Net.Tests.Queries/BooleanFilterTest.cs  |   24 +-
 .../CommonTermsQueryTest.cs                        |   12 +-
 .../TestCustomScoreQuery.cs                        |   28 +-
 .../Analyzing/TestAnalyzingQueryParser.cs          |    8 +-
 .../Classic/TestMultiFieldQueryParser.cs           |   51 +-
 .../Flexible/Standard/TestMultiFieldQPHelper.cs    |   17 +
 .../Flexible/Standard/TestNumericQueryParser.cs    |    3 +-
 .../Surround/Query/BooleanQueryTst.cs              |    4 +-
 .../Surround/Query/SingleFieldTestDb.cs            |   16 +-
 .../Util/QueryParserTestBase.cs                    |   28 +-
 src/Lucene.Net.Tests.QueryParser/Xml/TestParser.cs |    8 +-
 .../IndexAndTaxonomyReplicationClientTest.cs       |    2 +
 .../IndexAndTaxonomyRevisionTest.cs                |   34 +-
 .../IndexReplicationClientTest.cs                  |    2 +
 .../Spell/TestSpellChecker.cs                      |  427 +-
 .../Analyzing/AnalyzingInfixSuggesterTest.cs       |  322 +-
 .../Suggest/Analyzing/AnalyzingSuggesterTest.cs    |   16 +-
 .../Suggest/Fst/BytesRefSortersTest.cs             |    4 +-
 .../Suggest/Fst/LargeInputFST.cs                   |    2 +-
 .../Analysis/TestMockAnalyzer.cs                   |   12 +-
 .../Store/TestMockDirectoryWrapper.cs              |   16 +-
 .../Analysis/TestNumericTokenStream.cs             |   37 +-
 .../Codecs/Lucene3x/TestSurrogates.cs              |    6 +-
 .../Document/TestBinaryDocument.cs                 |   39 +-
 src/Lucene.Net.Tests/Document/TestDateTools.cs     |    2 +-
 src/Lucene.Net.Tests/Index/TestAddIndexes.cs       |   52 +-
 src/Lucene.Net.Tests/Index/TestBagOfPositions.cs   |    3 +-
 src/Lucene.Net.Tests/Index/TestBagOfPostings.cs    |    3 +-
 src/Lucene.Net.Tests/Index/TestCodecs.cs           |  236 +-
 .../Index/TestConcurrentMergeScheduler.cs          |   50 +-
 src/Lucene.Net.Tests/Index/TestIndexWriter.cs      |   70 +-
 .../Index/TestIndexWriterWithThreads.cs            |   14 +-
 src/Lucene.Net.Tests/Index/TestPayloads.cs         |    3 +-
 src/Lucene.Net.Tests/Index/TestSegmentReader.cs    |    4 +-
 .../Index/TestTermVectorsReader.cs                 |    5 -
 src/Lucene.Net.Tests/Index/TestTermsEnum.cs        |  172 +-
 .../Search/FuzzyTermOnShortTermsTest.cs            |    2 +
 .../Search/TestElevationComparator.cs              |    3 +-
 .../Search/TestMultiTermConstantScore.cs           |   27 +-
 .../Search/TestPositionIncrement.cs                |    7 +-
 src/Lucene.Net.Tests/Search/TestRegexpRandom2.cs   |    7 +-
 src/Lucene.Net.Tests/Search/TestShardSearching.cs  |    3 +-
 .../Store/TestBufferedIndexInput.cs                |   11 +-
 src/Lucene.Net.Tests/Store/TestDirectory.cs        |  152 +-
 .../Store/TestFileSwitchDirectory.cs               |   18 +-
 src/Lucene.Net.Tests/Store/TestHugeRamFile.cs      |    3 +-
 .../Store/TestNRTCachingDirectory.cs               |   20 +-
 src/Lucene.Net.Tests/Store/TestWindowsMMap.cs      |   36 +-
 .../Support/Index/TestTaskMergeScheduler.cs        |   50 +-
 src/Lucene.Net.Tests/Support/TestApiConsistency.cs |    2 +-
 src/Lucene.Net.Tests/Support/TestCase.cs           |    2 +-
 src/Lucene.Net.Tests/Support/TestSerialization.cs  |   18 +-
 src/Lucene.Net.Tests/TestDemo.cs                   |   63 +-
 src/Lucene.Net.Tests/TestExternalCodecs.cs         |  130 +-
 src/Lucene.Net.Tests/Util/TestMathUtil.cs          |    3 +-
 src/Lucene.Net.Tests/Util/TestNumericUtils.cs      |    4 +-
 src/Lucene.Net.Tests/Util/TestOfflineSorter.cs     |   14 +-
 src/Lucene.Net.Tests/Util/TestPriorityQueue.cs     |    2 +-
 src/Lucene.Net/Analysis/Analyzer.cs                |   15 +-
 src/Lucene.Net/Analysis/CachingTokenFilter.cs      |   24 +
 src/Lucene.Net/Analysis/CharFilter.cs              |    4 +-
 src/Lucene.Net/Analysis/Token.cs                   |   23 +-
 .../Analysis/TokenAttributes/CharTermAttribute.cs  |   10 +-
 .../Analysis/TokenAttributes/FlagsAttribute.cs     |    4 +-
 .../Analysis/TokenAttributes/ICharTermAttribute.cs |    3 +
 .../Analysis/TokenAttributes/OffsetAttribute.cs    |    3 +-
 .../Analysis/TokenAttributes/PayloadAttribute.cs   |    3 +-
 .../TokenAttributes/PositionIncrementAttribute.cs  |    3 +-
 .../TokenAttributes/PositionLengthAttribute.cs     |    3 +-
 .../Analysis/TokenAttributes/TypeAttribute.cs      |    3 +-
 src/Lucene.Net/Analysis/TokenStreamToAutomaton.cs  |    8 +-
 src/Lucene.Net/Analysis/Tokenizer.cs               |    5 +-
 src/Lucene.Net/ChangeNotes.txt                     |    2 +-
 src/Lucene.Net/Codecs/BlockTreeTermsReader.cs      |   99 +-
 src/Lucene.Net/Codecs/BlockTreeTermsWriter.cs      |   68 +-
 src/Lucene.Net/Codecs/Codec.cs                     |    4 +-
 src/Lucene.Net/Codecs/CodecUtil.cs                 |   14 +-
 .../Compressing/CompressingStoredFieldsFormat.cs   |    4 +
 .../CompressingStoredFieldsIndexReader.cs          |    4 +
 .../CompressingStoredFieldsIndexWriter.cs          |    5 +-
 .../Compressing/CompressingStoredFieldsReader.cs   |   17 +-
 .../Compressing/CompressingStoredFieldsWriter.cs   |   15 +-
 .../Compressing/CompressingTermVectorsFormat.cs    |    3 +
 .../Compressing/CompressingTermVectorsReader.cs    |   44 +-
 .../Compressing/CompressingTermVectorsWriter.cs    |   14 +-
 .../Codecs/Compressing/CompressionMode.cs          |   50 +-
 src/Lucene.Net/Codecs/Compressing/LZ4.cs           |   38 +-
 src/Lucene.Net/Codecs/DocValuesConsumer.cs         |   11 +-
 src/Lucene.Net/Codecs/DocValuesFormat.cs           |    4 +-
 src/Lucene.Net/Codecs/DocValuesProducer.cs         |    2 +-
 src/Lucene.Net/Codecs/Lucene3x/Lucene3xFields.cs   |   66 +-
 .../Codecs/Lucene3x/Lucene3xNormsFormat.cs         |    2 +
 .../Codecs/Lucene3x/Lucene3xNormsProducer.cs       |   18 +-
 .../Codecs/Lucene3x/Lucene3xPostingsFormat.cs      |    2 +
 .../Codecs/Lucene3x/Lucene3xSegmentInfoFormat.cs   |    6 +-
 .../Codecs/Lucene3x/Lucene3xSegmentInfoReader.cs   |    8 +-
 .../Codecs/Lucene3x/Lucene3xSkipListReader.cs      |    9 +-
 .../Codecs/Lucene3x/Lucene3xStoredFieldsFormat.cs  |    2 +
 .../Codecs/Lucene3x/Lucene3xStoredFieldsReader.cs  |   26 +-
 .../Codecs/Lucene3x/Lucene3xTermVectorsReader.cs   |   87 +-
 src/Lucene.Net/Codecs/Lucene3x/SegmentTermDocs.cs  |   10 +-
 src/Lucene.Net/Codecs/Lucene3x/SegmentTermEnum.cs  |   22 +-
 .../Codecs/Lucene3x/SegmentTermPositions.cs        |   16 +-
 src/Lucene.Net/Codecs/Lucene3x/TermBuffer.cs       |   19 +-
 src/Lucene.Net/Codecs/Lucene3x/TermInfo.cs         |    3 +
 src/Lucene.Net/Codecs/Lucene3x/TermInfosReader.cs  |   20 +-
 .../Codecs/Lucene3x/TermInfosReaderIndex.cs        |   11 +-
 src/Lucene.Net/Codecs/Lucene40/BitVector.cs        |   27 +-
 src/Lucene.Net/Codecs/Lucene40/Lucene40Codec.cs    |    3 +
 .../Codecs/Lucene40/Lucene40DocValuesFormat.cs     |    2 +
 .../Codecs/Lucene40/Lucene40DocValuesReader.cs     |   68 +-
 .../Codecs/Lucene40/Lucene40FieldInfosReader.cs    |    3 +
 .../Codecs/Lucene40/Lucene40LiveDocsFormat.cs      |   11 +-
 .../Codecs/Lucene40/Lucene40NormsFormat.cs         |    2 +
 .../Codecs/Lucene40/Lucene40PostingsBaseFormat.cs  |    2 +
 .../Codecs/Lucene40/Lucene40PostingsFormat.cs      |    5 +-
 .../Codecs/Lucene40/Lucene40PostingsReader.cs      |  100 +-
 .../Codecs/Lucene40/Lucene40SegmentInfoFormat.cs   |    8 +-
 .../Codecs/Lucene40/Lucene40SkipListReader.cs      |   11 +-
 .../Codecs/Lucene40/Lucene40StoredFieldsFormat.cs  |    4 +
 .../Codecs/Lucene40/Lucene40StoredFieldsReader.cs  |   15 +-
 .../Codecs/Lucene40/Lucene40StoredFieldsWriter.cs  |   16 +-
 .../Codecs/Lucene40/Lucene40TermVectorsFormat.cs   |    4 +
 .../Codecs/Lucene40/Lucene40TermVectorsReader.cs   |   92 +-
 .../Codecs/Lucene40/Lucene40TermVectorsWriter.cs   |   19 +-
 src/Lucene.Net/Codecs/Lucene41/ForUtil.cs          |    8 +-
 src/Lucene.Net/Codecs/Lucene41/Lucene41Codec.cs    |    3 +
 .../Codecs/Lucene41/Lucene41PostingsBaseFormat.cs  |    4 +
 .../Codecs/Lucene41/Lucene41PostingsReader.cs      |   60 +-
 .../Codecs/Lucene41/Lucene41PostingsWriter.cs      |    6 +
 .../Codecs/Lucene41/Lucene41SkipReader.cs          |   13 +-
 .../Codecs/Lucene41/Lucene41SkipWriter.cs          |   12 +-
 src/Lucene.Net/Codecs/Lucene42/Lucene42Codec.cs    |    9 +-
 .../Codecs/Lucene42/Lucene42DocValuesFormat.cs     |    2 +
 .../Codecs/Lucene42/Lucene42DocValuesProducer.cs   |   29 +-
 .../Codecs/Lucene42/Lucene42NormsConsumer.cs       |    5 +-
 .../Codecs/Lucene42/Lucene42NormsFormat.cs         |    4 +
 src/Lucene.Net/Codecs/Lucene45/Lucene45Codec.cs    |    5 +
 .../Codecs/Lucene45/Lucene45DocValuesConsumer.cs   |   17 +-
 .../Codecs/Lucene45/Lucene45DocValuesFormat.cs     |    4 +
 .../Codecs/Lucene45/Lucene45DocValuesProducer.cs   |  151 +-
 src/Lucene.Net/Codecs/Lucene46/Lucene46Codec.cs    |    6 +
 .../Codecs/MappingMultiDocsAndPositionsEnum.cs     |    5 +
 src/Lucene.Net/Codecs/MappingMultiDocsEnum.cs      |    3 +-
 src/Lucene.Net/Codecs/MultiLevelSkipListReader.cs  |   23 +-
 src/Lucene.Net/Codecs/MultiLevelSkipListWriter.cs  |    8 +-
 .../Codecs/PerField/PerFieldDocValuesFormat.cs     |   42 +-
 .../Codecs/PerField/PerFieldPostingsFormat.cs      |   35 +-
 src/Lucene.Net/Codecs/PostingsFormat.cs            |    4 +-
 src/Lucene.Net/Document/CompressionTools.cs        |   14 +-
 src/Lucene.Net/Document/DateTools.cs               |   14 +-
 .../Document/DerefBytesDocValuesField.cs           |    2 +
 src/Lucene.Net/Document/Field.cs                   |    9 +-
 .../Document/SortedBytesDocValuesField.cs          |    2 +
 .../Document/StraightBytesDocValuesField.cs        |    2 +
 src/Lucene.Net/Index/AtomicReader.cs               |    9 +-
 .../Index/BinaryDocValuesFieldUpdates.cs           |   17 +-
 src/Lucene.Net/Index/BufferedUpdates.cs            |   10 +-
 src/Lucene.Net/Index/CheckIndex.cs                 |   30 +-
 src/Lucene.Net/Index/CompositeReaderContext.cs     |    3 +-
 src/Lucene.Net/Index/ConcurrentMergeScheduler.cs   |    2 +-
 src/Lucene.Net/Index/DirectoryReader.cs            |    4 +-
 src/Lucene.Net/Index/DocFieldProcessor.cs          |    4 +-
 src/Lucene.Net/Index/DocTermOrds.cs                |   23 +-
 src/Lucene.Net/Index/DocValues.cs                  |   16 +-
 src/Lucene.Net/Index/DocValuesProcessor.cs         |   47 +-
 src/Lucene.Net/Index/DocValuesUpdate.cs            |    2 +-
 .../Index/DocumentsWriterFlushControl.cs           |   16 +-
 src/Lucene.Net/Index/DocumentsWriterFlushQueue.cs  |    2 +-
 src/Lucene.Net/Index/DocumentsWriterPerThread.cs   |    7 +-
 .../Index/DocumentsWriterPerThreadPool.cs          |   26 +-
 src/Lucene.Net/Index/FieldInfo.cs                  |    6 +-
 src/Lucene.Net/Index/FieldInfos.cs                 |   33 +-
 src/Lucene.Net/Index/FilterAtomicReader.cs         |    4 +-
 src/Lucene.Net/Index/FilterDirectoryReader.cs      |    6 +-
 src/Lucene.Net/Index/FilteredTermsEnum.cs          |    4 +-
 src/Lucene.Net/Index/FreqProxTermsWriter.cs        |    2 +-
 .../Index/FreqProxTermsWriterPerField.cs           |    4 +-
 src/Lucene.Net/Index/FrozenBufferedUpdates.cs      |    3 +-
 src/Lucene.Net/Index/IndexCommit.cs                |    3 +-
 src/Lucene.Net/Index/IndexFileDeleter.cs           |   18 +-
 src/Lucene.Net/Index/IndexReader.cs                |    2 +
 src/Lucene.Net/Index/IndexUpgrader.cs              |    2 +-
 src/Lucene.Net/Index/IndexWriter.cs                |   79 +-
 src/Lucene.Net/Index/IndexWriterConfig.cs          |    5 +-
 src/Lucene.Net/Index/LogMergePolicy.cs             |   35 +-
 src/Lucene.Net/Index/MergePolicy.cs                |   13 +-
 src/Lucene.Net/Index/MergeState.cs                 |    8 +-
 src/Lucene.Net/Index/MultiDocValues.cs             |   20 +-
 src/Lucene.Net/Index/MultiFields.cs                |    3 +-
 src/Lucene.Net/Index/MultiTermsEnum.cs             |   54 +-
 src/Lucene.Net/Index/NormsConsumer.cs              |    2 +-
 src/Lucene.Net/Index/NormsConsumerPerField.cs      |    2 +-
 .../Index/NumericDocValuesFieldUpdates.cs          |   13 +-
 src/Lucene.Net/Index/NumericDocValuesWriter.cs     |    2 +-
 src/Lucene.Net/Index/ParallelAtomicReader.cs       |   36 +-
 src/Lucene.Net/Index/ParallelPostingsArray.cs      |    2 +-
 .../Index/PersistentSnapshotDeletionPolicy.cs      |    8 +-
 src/Lucene.Net/Index/PrefixCodedTerms.cs           |   27 +-
 src/Lucene.Net/Index/ReadersAndUpdates.cs          |    8 +-
 src/Lucene.Net/Index/SegmentCoreReaders.cs         |    5 +-
 src/Lucene.Net/Index/SegmentDocValues.cs           |    5 +-
 src/Lucene.Net/Index/SegmentInfo.cs                |   10 +-
 src/Lucene.Net/Index/SegmentInfos.cs               |   20 +-
 src/Lucene.Net/Index/SegmentMerger.cs              |    3 +-
 src/Lucene.Net/Index/SegmentReader.cs              |   39 +-
 src/Lucene.Net/Index/SlowCompositeReaderWrapper.cs |    9 +-
 src/Lucene.Net/Index/SnapshotDeletionPolicy.cs     |    5 +-
 src/Lucene.Net/Index/SortedDocValuesWriter.cs      |    2 +-
 src/Lucene.Net/Index/SortedSetDocValuesWriter.cs   |    4 +-
 src/Lucene.Net/Index/StandardDirectoryReader.cs    |   12 +-
 .../Index/TermVectorsConsumerPerField.cs           |    2 +-
 src/Lucene.Net/Index/Terms.cs                      |    9 +-
 src/Lucene.Net/Index/TermsEnum.cs                  |    9 +-
 src/Lucene.Net/Index/TermsHashPerField.cs          |    2 +-
 src/Lucene.Net/Index/TieredMergePolicy.cs          |   16 +-
 src/Lucene.Net/Index/TwoPhaseCommitTool.cs         |    5 +-
 src/Lucene.Net/Search/BooleanScorer.cs             |   18 +-
 src/Lucene.Net/Search/BooleanScorer2.cs            |   31 +-
 src/Lucene.Net/Search/CachingCollector.cs          |    2 +-
 src/Lucene.Net/Search/CachingWrapperFilter.cs      |    7 +-
 src/Lucene.Net/Search/ConstantScoreAutoRewrite.cs  |    7 +-
 src/Lucene.Net/Search/ConstantScoreQuery.cs        |   14 +-
 .../Search/ControlledRealTimeReopenThread.cs       |   56 +-
 src/Lucene.Net/Search/FieldCache.cs                |    5 +-
 src/Lucene.Net/Search/FieldCacheImpl.cs            |   48 +-
 src/Lucene.Net/Search/FieldComparator.cs           |   10 +-
 src/Lucene.Net/Search/FieldValueFilter.cs          |    4 +-
 src/Lucene.Net/Search/FieldValueHitQueue.cs        |    2 +-
 src/Lucene.Net/Search/FilteredDocIdSet.cs          |    4 +-
 src/Lucene.Net/Search/FilteredDocIdSetIterator.cs  |    2 +-
 src/Lucene.Net/Search/FilteredQuery.cs             |    4 +-
 src/Lucene.Net/Search/FuzzyTermsEnum.cs            |   18 +-
 src/Lucene.Net/Search/IndexSearcher.cs             |    6 +-
 src/Lucene.Net/Search/LiveFieldValues.cs           |   39 +-
 src/Lucene.Net/Search/MatchAllDocsQuery.cs         |   11 +-
 src/Lucene.Net/Search/MultiPhraseQuery.cs          |   27 +-
 src/Lucene.Net/Search/MultiTermQuery.cs            |    2 +-
 src/Lucene.Net/Search/NumericRangeQuery.cs         |    3 +-
 .../Search/Payloads/AveragePayloadFunction.cs      |    6 +-
 src/Lucene.Net/Search/Payloads/PayloadNearQuery.cs |    8 +-
 src/Lucene.Net/Search/Payloads/PayloadSpanUtil.cs  |   35 +-
 src/Lucene.Net/Search/Payloads/PayloadTermQuery.cs |    2 +-
 src/Lucene.Net/Search/PhraseQuery.cs               |    4 +-
 src/Lucene.Net/Search/QueryRescorer.cs             |    4 +-
 src/Lucene.Net/Search/QueryWrapperFilter.cs        |   13 +-
 src/Lucene.Net/Search/ReferenceManager.cs          |   27 +-
 src/Lucene.Net/Search/ReqOptSumScorer.cs           |   11 +-
 src/Lucene.Net/Search/ScoringRewrite.cs            |    7 +-
 src/Lucene.Net/Search/SearcherLifetimeManager.cs   |   51 +-
 src/Lucene.Net/Search/Similarities/AfterEffect.cs  |    2 +-
 src/Lucene.Net/Search/Similarities/BasicModel.cs   |    2 +-
 src/Lucene.Net/Search/Similarities/Distribution.cs |    2 +-
 src/Lucene.Net/Search/Similarities/LMSimilarity.cs |    4 +-
 src/Lucene.Net/Search/Similarities/Lambda.cs       |    2 +-
 .../Search/Similarities/Normalization.cs           |    2 +-
 .../Similarities/PerFieldSimilarityWrapper.cs      |    2 +-
 src/Lucene.Net/Search/Similarities/Similarity.cs   |    6 +-
 .../Search/Similarities/SimilarityBase.cs          |    6 +-
 .../Search/Similarities/TFIDFSimilarity.cs         |    2 +-
 src/Lucene.Net/Search/SloppyPhraseScorer.cs        |   28 +-
 src/Lucene.Net/Search/SortField.cs                 |    6 +-
 .../Search/Spans/FieldMaskingSpanQuery.cs          |    2 +-
 src/Lucene.Net/Search/Spans/NearSpansOrdered.cs    |    6 +-
 src/Lucene.Net/Search/Spans/NearSpansUnordered.cs  |   21 +-
 .../Search/Spans/SpanMultiTermQueryWrapper.cs      |   16 +-
 src/Lucene.Net/Search/Spans/SpanNearQuery.cs       |    2 +-
 src/Lucene.Net/Search/Spans/SpanNotQuery.cs        |   11 +-
 src/Lucene.Net/Search/Spans/SpanOrQuery.cs         |   13 +-
 .../Search/Spans/SpanPositionCheckQuery.cs         |    4 +-
 src/Lucene.Net/Search/Spans/SpanTermQuery.cs       |    4 +-
 src/Lucene.Net/Search/TermQuery.cs                 |    2 +-
 src/Lucene.Net/Search/TermRangeQuery.cs            |    8 +-
 src/Lucene.Net/Search/TimeLimitingCollector.cs     |    8 +-
 src/Lucene.Net/Search/TopDocsCollector.cs          |    3 +-
 src/Lucene.Net/Search/TopTermsRewrite.cs           |   13 +-
 src/Lucene.Net/Store/BufferedIndexInput.cs         |   20 +-
 src/Lucene.Net/Store/BufferedIndexOutput.cs        |    6 +-
 src/Lucene.Net/Store/ByteBufferIndexInput.cs       |    2 +-
 src/Lucene.Net/Store/CompoundFileDirectory.cs      |   10 +-
 src/Lucene.Net/Store/CompoundFileWriter.cs         |   12 +-
 src/Lucene.Net/Store/Directory.cs                  |   16 +-
 src/Lucene.Net/Store/FSDirectory.cs                |    7 +-
 src/Lucene.Net/Store/Lock.cs                       |   13 +-
 src/Lucene.Net/Store/LockStressTest.cs             |   95 +-
 src/Lucene.Net/Store/LockVerifyServer.cs           |  196 +-
 src/Lucene.Net/Store/MMapDirectory.cs              |    4 +-
 src/Lucene.Net/Store/NRTCachingDirectory.cs        |   12 +-
 src/Lucene.Net/Store/NativeFSLockFactory.cs        |   30 +-
 src/Lucene.Net/Store/NoLockFactory.cs              |    4 +-
 src/Lucene.Net/Store/OutputStreamDataOutput.cs     |   24 +-
 src/Lucene.Net/Store/RAMDirectory.cs               |   16 +-
 src/Lucene.Net/Store/RAMInputStream.cs             |    4 +-
 src/Lucene.Net/Store/RAMOutputStream.cs            |    4 +-
 .../Store/RateLimitedDirectoryWrapper.cs           |    6 +-
 src/Lucene.Net/Store/RateLimitedIndexOutput.cs     |    4 +-
 src/Lucene.Net/Store/SimpleFSLockFactory.cs        |    4 +-
 src/Lucene.Net/Store/SingleInstanceLockFactory.cs  |    4 +-
 src/Lucene.Net/Support/AssemblyUtils.cs            |    2 +-
 src/Lucene.Net/Support/CRC32.cs                    |   16 +-
 .../Codecs/ExcludeCodecFromScanAttribute.cs        |    2 +-
 .../ExcludeDocValuesFormatFromScanAttribute.cs     |    2 +-
 .../ExcludePostingsFormatFromScanAttribute.cs      |    2 +-
 src/Lucene.Net/Support/Collections.cs              |   78 +-
 .../Support/Compatibility/NullableAttributes.cs    |    2 +
 .../Support/ConcurrentDictionaryWrapper.cs         |    5 +-
 src/Lucene.Net/Support/ConcurrentSet.cs            |   15 +-
 .../Configuration/Base/ConfigurationReloadToken.cs |    2 +-
 .../Configuration/Base/ConfigurationRoot.cs        |    2 +-
 .../EnvironmentVariablesConfigurationProvider.cs   |    2 +-
 .../Extensions/IndexableFieldExtensions.cs         |   12 +-
 src/Lucene.Net/Support/EnumerableExtensions.cs     |   20 +-
 .../ExceptionToClassNameConventionAttribute.cs     |    2 +-
 .../ExceptionToNetNumericConventionAttribute.cs    |    2 +-
 .../ExceptionToNullableEnumConventionAttribute.cs  |    2 +-
 src/Lucene.Net/Support/IO/FileSupport.cs           |   13 +-
 src/Lucene.Net/Support/IO/SafeTextWriterWrapper.cs |   31 +-
 src/Lucene.Net/Support/IdentityWeakReference.cs    |   10 +-
 .../Extensions/IndexWriterConfigExtensions.cs      |    3 +-
 src/Lucene.Net/Support/Index/TaskMergeScheduler.cs |   33 +-
 .../Threading/ReaderWriterLockSlimExtensions.cs    |    4 +-
 .../Support/Util/BundleResourceManagerFactory.cs   |    3 +-
 src/Lucene.Net/Support/Util/ExceptionExtensions.cs |    3 +-
 src/Lucene.Net/Support/Util/ListExtensions.cs      |   10 +-
 src/Lucene.Net/Support/Util/NamedServiceFactory.cs |   15 +-
 src/Lucene.Net/Support/Util/NumberFormat.cs        |   24 +-
 .../Support/Util/ServiceNameAttribute.cs           |    2 +-
 src/Lucene.Net/Support/WeakDictionary.cs           |    2 +-
 src/Lucene.Net/Support/WritableArrayAttribute.cs   |    2 +-
 src/Lucene.Net/Util/ArrayInPlaceMergeSorter.cs     |    3 +
 src/Lucene.Net/Util/ArrayIntroSorter.cs            |    9 +-
 src/Lucene.Net/Util/ArrayTimSorter.cs              |    7 +
 src/Lucene.Net/Util/ArrayUtil.cs                   |   19 +
 src/Lucene.Net/Util/AttributeImpl.cs               |    8 +-
 src/Lucene.Net/Util/AttributeSource.cs             |   37 +-
 src/Lucene.Net/Util/Automaton/Automaton.cs         |   27 +
 src/Lucene.Net/Util/Automaton/BasicAutomata.cs     |   10 +
 src/Lucene.Net/Util/Automaton/BasicOperations.cs   |   13 +
 src/Lucene.Net/Util/Automaton/CompiledAutomaton.cs |    2 +
 .../Util/Automaton/DaciukMihovAutomatonBuilder.cs  |   11 +-
 .../Util/Automaton/LevenshteinAutomata.cs          |    5 +
 src/Lucene.Net/Util/Automaton/RegExp.cs            |   38 +
 src/Lucene.Net/Util/Automaton/RunAutomaton.cs      |    4 +
 src/Lucene.Net/Util/Automaton/SortedIntSet.cs      |    8 +
 src/Lucene.Net/Util/Automaton/SpecialOperations.cs |    5 +
 src/Lucene.Net/Util/Automaton/State.cs             |    4 +
 src/Lucene.Net/Util/Automaton/StatePair.cs         |    3 +
 src/Lucene.Net/Util/Automaton/Transition.cs        |    5 +
 src/Lucene.Net/Util/Automaton/UTF32ToUTF8.cs       |   10 +
 src/Lucene.Net/Util/BitUtil.cs                     |   10 +
 src/Lucene.Net/Util/Bits.cs                        |    3 +
 src/Lucene.Net/Util/BroadWord.cs                   |    6 +-
 src/Lucene.Net/Util/ByteBlockPool.cs               |    6 +
 src/Lucene.Net/Util/BytesRef.cs                    |   11 +
 src/Lucene.Net/Util/BytesRefArray.cs               |   11 +-
 src/Lucene.Net/Util/BytesRefHash.cs                |   25 +-
 src/Lucene.Net/Util/CharsRef.cs                    |   12 +-
 src/Lucene.Net/Util/CloseableThreadLocal.cs        |    5 +
 src/Lucene.Net/Util/CollectionUtil.cs              |   11 +
 src/Lucene.Net/Util/CommandLineUtil.cs             |    4 +
 src/Lucene.Net/Util/Constants.cs                   |   26 +-
 src/Lucene.Net/Util/Counter.cs                     |    5 +
 src/Lucene.Net/Util/DocIdBitSet.cs                 |    3 +
 src/Lucene.Net/Util/DoubleBarrelLRUCache.cs        |    3 +-
 src/Lucene.Net/Util/FieldCacheSanityChecker.cs     |   16 +-
 src/Lucene.Net/Util/FilterIterator.cs              |   11 +-
 src/Lucene.Net/Util/FixedBitSet.cs                 |   27 +-
 src/Lucene.Net/Util/Fst/Builder.cs                 |    5 +-
 src/Lucene.Net/Util/Fst/ByteSequenceOutputs.cs     |    2 +
 src/Lucene.Net/Util/Fst/BytesRefFSTEnum.cs         |    2 +
 src/Lucene.Net/Util/Fst/BytesStore.cs              |    8 +
 src/Lucene.Net/Util/Fst/CharSequenceOutputs.cs     |    2 +
 src/Lucene.Net/Util/Fst/FST.cs                     |   15 +
 src/Lucene.Net/Util/Fst/FSTEnum.cs                 |    2 +
 src/Lucene.Net/Util/Fst/ForwardBytesReader.cs      |    4 +
 src/Lucene.Net/Util/Fst/IntSequenceOutputs.cs      |    2 +
 src/Lucene.Net/Util/Fst/IntsRefFSTEnum.cs          |    2 +
 src/Lucene.Net/Util/Fst/NoOutputs.cs               |    8 +
 src/Lucene.Net/Util/Fst/NodeHash.cs                |    8 +-
 src/Lucene.Net/Util/Fst/Outputs.cs                 |    2 +
 src/Lucene.Net/Util/Fst/PairOutputs.cs             |    4 +
 src/Lucene.Net/Util/Fst/PositiveIntOutputs.cs      |    5 +
 src/Lucene.Net/Util/Fst/ReverseBytesReader.cs      |    5 +
 src/Lucene.Net/Util/Fst/Util.cs                    |    4 +
 src/Lucene.Net/Util/GrowableByteArrayDataOutput.cs |    3 +
 src/Lucene.Net/Util/IOUtils.cs                     |    9 +
 src/Lucene.Net/Util/InPlaceMergeSorter.cs          |    5 +-
 src/Lucene.Net/Util/IndexableBinaryStringTools.cs  |   10 +
 src/Lucene.Net/Util/IntBlockPool.cs                |    8 +-
 src/Lucene.Net/Util/IntroSorter.cs                 |    5 +-
 src/Lucene.Net/Util/IntsRef.cs                     |   12 +-
 src/Lucene.Net/Util/LongBitSet.cs                  |    4 +
 src/Lucene.Net/Util/LongValues.cs                  |    3 +
 src/Lucene.Net/Util/LongsRef.cs                    |   16 +-
 src/Lucene.Net/Util/MergedIterator.cs              |    4 +-
 src/Lucene.Net/Util/Mutable/MutableValueBool.cs    |    7 +
 src/Lucene.Net/Util/Mutable/MutableValueDate.cs    |    3 +
 src/Lucene.Net/Util/Mutable/MutableValueDouble.cs  |    7 +-
 src/Lucene.Net/Util/Mutable/MutableValueFloat.cs   |    7 +
 src/Lucene.Net/Util/Mutable/MutableValueInt.cs     |    7 +
 src/Lucene.Net/Util/Mutable/MutableValueLong.cs    |    7 +
 src/Lucene.Net/Util/Mutable/MutableValueStr.cs     |    7 +
 src/Lucene.Net/Util/NumericUtils.cs                |    3 +
 src/Lucene.Net/Util/OfflineSorter.cs               |    5 +
 src/Lucene.Net/Util/OpenBitSet.cs                  |   10 +
 src/Lucene.Net/Util/OpenBitSetIterator.cs          |    3 +
 src/Lucene.Net/Util/PForDeltaDocIdSet.cs           |   15 +
 .../Util/Packed/AbstractAppendingLongBuffer.cs     |    3 +
 .../Util/Packed/AbstractBlockPackedWriter.cs       |    4 +
 src/Lucene.Net/Util/Packed/AbstractPagedMutable.cs |    5 +
 .../Util/Packed/AppendingDeltaPackedLongBuffer.cs  |    4 +
 .../Util/Packed/BlockPackedReaderIterator.cs       |    4 +
 src/Lucene.Net/Util/Packed/Direct16.cs             |    6 +
 src/Lucene.Net/Util/Packed/Direct32.cs             |    6 +
 src/Lucene.Net/Util/Packed/Direct64.cs             |    6 +
 src/Lucene.Net/Util/Packed/Direct8.cs              |    8 +-
 .../Util/Packed/DirectPacked64SingleBlockReader.cs |    2 +
 src/Lucene.Net/Util/Packed/DirectPackedReader.cs   |    2 +
 src/Lucene.Net/Util/Packed/EliasFanoDecoder.cs     |    8 +
 src/Lucene.Net/Util/Packed/EliasFanoDocIdSet.cs    |    8 +
 src/Lucene.Net/Util/Packed/EliasFanoEncoder.cs     |    6 +
 src/Lucene.Net/Util/Packed/GrowableWriter.cs       |    9 +
 .../Util/Packed/MonotonicAppendingLongBuffer.cs    |    6 +
 .../Util/Packed/MonotonicBlockPackedWriter.cs      |    1 +
 src/Lucene.Net/Util/Packed/Packed16ThreeBlocks.cs  |    3 +
 src/Lucene.Net/Util/Packed/Packed64.cs             |    2 +
 src/Lucene.Net/Util/Packed/Packed64SingleBlock.cs  |    4 +
 src/Lucene.Net/Util/Packed/Packed8ThreeBlocks.cs   |    5 +-
 src/Lucene.Net/Util/Packed/PackedDataInput.cs      |    3 +-
 src/Lucene.Net/Util/Packed/PackedInts.cs           |   25 +-
 src/Lucene.Net/Util/Packed/PackedReaderIterator.cs |    2 +
 src/Lucene.Net/Util/Packed/PackedWriter.cs         |    1 -
 src/Lucene.Net/Util/Packed/PagedGrowableWriter.cs  |    5 +
 src/Lucene.Net/Util/Packed/PagedMutable.cs         |    4 +
 src/Lucene.Net/Util/PagedBytes.cs                  |    7 +-
 src/Lucene.Net/Util/PrintStreamInfoStream.cs       |    4 +-
 src/Lucene.Net/Util/PriorityQueue.cs               |    4 +
 src/Lucene.Net/Util/QueryBuilder.cs                |   13 +-
 src/Lucene.Net/Util/RamUsageEstimator.cs           |   24 +-
 src/Lucene.Net/Util/RefCount.cs                    |    4 +
 src/Lucene.Net/Util/RollingBuffer.cs               |    2 +
 src/Lucene.Net/Util/SPIClassIterator.cs            |    6 +-
 src/Lucene.Net/Util/SentinelIntSet.cs              |    4 +
 src/Lucene.Net/Util/SetOnce.cs                     |    3 +
 src/Lucene.Net/Util/SmallFloat.cs                  |    7 +
 src/Lucene.Net/Util/Sorter.cs                      |   18 +
 src/Lucene.Net/Util/StringHelper.cs                |    7 +
 src/Lucene.Net/Util/TimSorter.cs                   |   18 +
 src/Lucene.Net/Util/VirtualMethod.cs               |    4 +-
 src/Lucene.Net/Util/WAH8DocIdSet.cs                |   14 +-
 src/Lucene.Net/Util/WeakIdentityMap.cs             |    2 +-
 .../Helpers/DiagnosticResult.cs                    |    6 +-
 ...00_SealIncrementTokenMethodCSCodeFixProvider.cs |    2 +-
 .../Verifiers/CodeFixVerifier.cs                   |    2 +-
 .../Verifiers/DiagnosticVerifier.cs                |    2 +-
 .../Support/JDKBreakIterator.cs                    |   18 +-
 .../Support/TestJDKBreakIterator.cs                |    6 +-
 .../Commands/CommandTestCase.cs                    |    6 +-
 .../Commands/Index/IndexFixCommandTest.cs          |    5 +-
 .../Commands/Index/IndexSplitCommandTest.cs        |    5 +-
 .../SourceCode/SourceCodeParserTest.cs             |   64 +-
 src/dotnet/tools/lucene-cli/CommandLineOptions.cs  |    2 +
 src/dotnet/tools/lucene-cli/ConfigurationBase.cs   |    2 +-
 src/dotnet/tools/lucene-cli/Program.cs             |    2 +-
 .../lucene-cli/SourceCode/SourceCodeExporter.cs    |    8 +-
 .../SourceCode/SourceCodeSectionParser.cs          |   14 +-
 .../SourceCode/SourceCodeSectionReader.cs          |    4 +-
 .../benchmark-sample/BenchmarkSampleCommand.cs     |   11 +-
 .../lucene-cli/commands/demo/DemoConfiguration.cs  |    6 +-
 .../DemoAssociationsFacetsCommand.cs               |   11 +-
 .../DemoDistanceFacetsCommand.cs                   |   11 +-
 .../DemoExpressionAggregationFacetsCommand.cs      |   11 +-
 .../demo/demo-index-files/DemoIndexFilesCommand.cs |    8 +-
 .../DemoMultiCategoryListsFacetsCommand.cs         |   11 +-
 .../demo-range-facets/DemoRangeFacetsCommand.cs    |   11 +-
 .../demo-search-files/DemoSearchFilesCommand.cs    |    8 +-
 .../demo-simple-facets/DemoSimpleFacetsCommand.cs  |   11 +-
 .../DemoSimpleSortedSetFacetsCommand.cs            |   11 +-
 .../index/index-split/IndexSplitCommand.cs         |    8 +-
 src/dotnet/tools/lucene-cli/lucene-cli.csproj      |    5 +
 1198 files changed, 12134 insertions(+), 12980 deletions(-)

diff --git a/.editorconfig b/.editorconfig
new file mode 100644
index 0000000..bf63574
--- /dev/null
+++ b/.editorconfig
@@ -0,0 +1,175 @@
+# You can modify the rules from these initially generated values to suit your own policies
+# You can learn more about editorconfig here: https://docs.microsoft.com/en-us/visualstudio/ide/editorconfig-code-style-settings-reference
+
+[*]
+charset = utf-8
+trim_trailing_whitespace = true
+
+[*.md]
+indent_style = space
+indent_size = 4
+
+# C# files
+[*.cs]
+
+#### Core EditorConfig Options ####
+
+#Formatting - indentation
+
+#use soft tabs (spaces) for indentation
+indent_style = space
+
+#Formatting - indentation options
+
+#indent switch case contents.
+csharp_indent_case_contents = true
+#indent switch labels
+csharp_indent_switch_labels = true
+
+#Formatting - new line options
+
+#place catch statements on a new line
+csharp_new_line_before_catch = true
+#place else statements on a new line
+csharp_new_line_before_else = true
+#require finally statements to be on a new line after the closing brace
+csharp_new_line_before_finally = true
+#require members of object initializers to be on the same line
+csharp_new_line_before_members_in_object_initializers = false
+#require braces to be on a new line for control_blocks, types, properties, and methods (also known as "Allman" style)
+csharp_new_line_before_open_brace = control_blocks, types, properties, methods
+
+#Formatting - organize using options
+
+#do not place System.* using directives before other using directives
+dotnet_sort_system_directives_first = false
+
+#Formatting - spacing options
+
+#require NO space between a cast and the value
+csharp_space_after_cast = false
+#require a space before the colon for bases or interfaces in a type declaration
+csharp_space_after_colon_in_inheritance_clause = true
+#require a space after a keyword in a control flow statement such as a for loop
+csharp_space_after_keywords_in_control_flow_statements = true
+#require a space before the colon for bases or interfaces in a type declaration
+csharp_space_before_colon_in_inheritance_clause = true
+#remove space within empty argument list parentheses
+csharp_space_between_method_call_empty_parameter_list_parentheses = false
+#remove space between method call name and opening parenthesis
+csharp_space_between_method_call_name_and_opening_parenthesis = false
+#do not place space characters after the opening parenthesis and before the closing parenthesis of a method call
+csharp_space_between_method_call_parameter_list_parentheses = false
+#remove space within empty parameter list parentheses for a method declaration
+csharp_space_between_method_declaration_empty_parameter_list_parentheses = false
+#place a space character after the opening parenthesis and before the closing parenthesis of a method declaration parameter list.
+csharp_space_between_method_declaration_parameter_list_parentheses = false
+
+#Formatting - wrapping options
+
+#leave code block on separate lines
+csharp_preserve_single_line_blocks = false
+#leave statements and member declarations on the same line
+csharp_preserve_single_line_statements = true
+
+#Style - Code block preferences
+
+#prefer curly braces even for one line of code
+csharp_prefer_braces = when_multiline:silent
+
+#Style - expression bodied member options
+
+#prefer expression-bodied members for accessors
+csharp_style_expression_bodied_accessors = when_on_single_line:suggestion
+#prefer block bodies for constructors
+csharp_style_expression_bodied_constructors = false:suggestion
+#prefer expression-bodied members for indexers
+csharp_style_expression_bodied_indexers = true:suggestion
+#prefer block bodies for methods
+csharp_style_expression_bodied_methods = when_on_single_line:silent
+#prefer expression-bodied members for properties
+csharp_style_expression_bodied_properties = when_on_single_line:suggestion
+
+#Style - expression level options
+
+#prefer out variables to be declared inline in the argument list of a method call when possible
+csharp_style_inlined_variable_declaration = true:suggestion
+#prefer the language keyword for member access expressions, instead of the type name, for types that have a keyword to represent them
+dotnet_style_predefined_type_for_member_access = true:suggestion
+
+#Style - Expression-level  preferences
+
+#prefer objects to not be initialized using object initializers, but do not warn
+dotnet_style_object_initializer = true:silent
+#prefer objects to use auto properties, but turn off the warnings (we want to keep backing fields from Java for the most part)
+dotnet_style_prefer_auto_properties = true:silent
+
+#Style - implicit and explicit types
+
+#prefer explicit type over var in all cases, unless overridden by another code style rule
+csharp_style_var_elsewhere = false:silent
+#prefer explicit type over var to declare variables with built-in system types such as int
+csharp_style_var_for_built_in_types = false:silent
+#prefer explicit type over var when the type is already mentioned on the right-hand side of a declaration
+csharp_style_var_when_type_is_apparent = false:silent
+
+#Style - language keyword and framework type options
+
+#prefer the language keyword for local variables, method parameters, and class members, instead of the type name, for types that have a keyword to represent them
+dotnet_style_predefined_type_for_locals_parameters_members = true:suggestion
+
+#Style - modifier options
+
+#prefer accessibility modifiers to be declared except for public interface members. This will currently not differ from always and will act as future proofing for if C# adds default interface methods.
+dotnet_style_require_accessibility_modifiers = for_non_interface_members:suggestion
+
+#Style - Modifier preferences
+
+#when this rule is set to a list of modifiers, prefer the specified ordering.
+csharp_preferred_modifier_order = public,private,protected,internal,virtual,readonly,override,static,abstract,new,sealed,volatile:silent
+
+#Style - Pattern matching
+
+#prefer pattern matching instead of is expression with type casts
+csharp_style_pattern_matching_over_as_with_null_check = true:suggestion
+
+#Style -Pattern matcing preferences
+
+#prefer expression-style for switch case
+csharp_style_prefer_switch_expression = false:suggestion
+
+#Style - qualification options
+
+#prefer fields not to be prefaced with this. or Me. in Visual Basic
+dotnet_style_qualification_for_field = false:none
+#prefer methods not to be prefaced with this. or Me. in Visual Basic
+dotnet_style_qualification_for_method = false:none
+#prefer properties not to be prefaced with this. or Me. in Visual Basic
+dotnet_style_qualification_for_property = false:none
+
+#Style - assignment options
+#prefer compound asignment x += 1 rather than x = x + 1.
+dotnet_style_prefer_compound_assignment = true:silent
+
+#### General Code Quality Preferences ####
+
+# Warn about any performance category issues across the entire API
+dotnet_code_quality.Performance.api_surface = all:warning
+
+# CA1031: Do not catch general exception types
+dotnet_diagnostic.CA1031.severity = none
+
+# CA1034: Do not nest types
+dotnet_diagnostic.CA1034.severity = none
+
+
+# Features that require .NET Standard 2.1+
+
+# IDE0056: Use index operator
+dotnet_diagnostic.IDE0056.severity = none
+
+# IDE0057: Use range operator
+dotnet_diagnostic.IDE0057.severity = none
+
+# IDE0070: Use 'System.HashCode.Combine'
+dotnet_diagnostic.IDE0070.severity = none
\ No newline at end of file
diff --git a/Directory.Build.props b/Directory.Build.props
index 4804162..8cd9ea7 100644
--- a/Directory.Build.props
+++ b/Directory.Build.props
@@ -1,4 +1,4 @@
-<!--
+<!--
 
  Licensed to the Apache Software Foundation (ASF) under one
  or more contributor license agreements.  See the NOTICE file
@@ -23,9 +23,6 @@
 
   <PropertyGroup>
     <LangVersion>8.0</LangVersion>
-    <!-- Disabling warnings that require .NET Standard 2.1 -->
-    <NoWarn>$(NoWarn);IDE0056</NoWarn>
-    <NoWarn>$(NoWarn);IDE0057</NoWarn>
   </PropertyGroup>
   
   <!-- IMPORTANT: When these values are changed, the CI counter number should also be reset. -->
diff --git a/Lucene.Net.sln b/Lucene.Net.sln
index 24547c2..591bda9 100644
--- a/Lucene.Net.sln
+++ b/Lucene.Net.sln
@@ -63,6 +63,7 @@ EndProject
 Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution Items", "{4DF0A2A1-B9C7-4EE5-BAF0-BEEF53E34220}"
 	ProjectSection(SolutionItems) = preProject
 		.asf.yaml = .asf.yaml
+		.editorconfig = .editorconfig
 		CHANGES.txt = CHANGES.txt
 		CONTRIBUTING.md = CONTRIBUTING.md
 		Directory.Build.props = Directory.Build.props
diff --git a/TestTargetFramework.props b/TestTargetFramework.props
index 57b4430..7347dd0 100644
--- a/TestTargetFramework.props
+++ b/TestTargetFramework.props
@@ -37,5 +37,26 @@
     <TargetFrameworks Condition=" '$(TestFrameworks)' == 'true' AND $([MSBuild]::IsOsPlatform('Windows')) ">$(TargetFrameworks);net48</TargetFrameworks>
     <TargetFramework Condition=" '$(TargetFrameworks)' != '' "></TargetFramework>
   </PropertyGroup>
+
+  <PropertyGroup Label="Warnings to be Disabled in Test Projects">
+    <NoWarn Label="Nested types should not be visible">$(NoWarn);CA1034</NoWarn>
+    <NoWarn Label="Use Literals Where Appropriate">$(NoWarn);CA1802</NoWarn>
+    <NoWarn Label="Add readonly modifier">$(NoWarn);CA1822</NoWarn>
+    <NoWarn Label="Avoid zero-length array allocations">$(NoWarn);CA1825</NoWarn>
+    <NoWarn Label="Do not raise exceptions in exception clauses">$(NoWarn);CA2219</NoWarn>
+
+    <NoWarn Label="Use object initializers">$(NoWarn);IDE0017</NoWarn>
+    <NoWarn Label="Use pattern matching">$(NoWarn);IDE0019;IDE0020;IDE0038</NoWarn>
+    <NoWarn Label="Use collection initializers">$(NoWarn);IDE0028</NoWarn>
+    <NoWarn Label="Use null propagation">$(NoWarn);IDE0031</NoWarn>
+    <NoWarn Label="Add accessibility modifiers">$(NoWarn);IDE0040</NoWarn>
+    <NoWarn Label="Add readonly modifier">$(NoWarn);IDE0044</NoWarn>
+    <NoWarn Label="Use language keywords">$(NoWarn);IDE0049</NoWarn>
+    <NoWarn Label="Remove unused private member">$(NoWarn);IDE0051</NoWarn>
+    <NoWarn Label="Remove unread private member">$(NoWarn);IDE0052</NoWarn>
+    <NoWarn Label="Remove unnecessary value assignment">$(NoWarn);IDE0059</NoWarn>
+    <NoWarn Label="Remove unused parameter">$(NoWarn);IDE0060</NoWarn>
+    <NoWarn Label="Naming rule violation">$(NoWarn);IDE1006</NoWarn>
+  </PropertyGroup>
   
 </Project>
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Ar/ArabicLetterTokenizerFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Ar/ArabicLetterTokenizerFactory.cs
index eceb7d1..cd5c835 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Ar/ArabicLetterTokenizerFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Ar/ArabicLetterTokenizerFactory.cs
@@ -39,7 +39,7 @@ namespace Lucene.Net.Analysis.Ar
             AssureMatchVersion();
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Ar/ArabicNormalizationFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Ar/ArabicNormalizationFilterFactory.cs
index f7ff24d..ec2f331 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Ar/ArabicNormalizationFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Ar/ArabicNormalizationFilterFactory.cs
@@ -42,7 +42,7 @@ namespace Lucene.Net.Analysis.Ar
         {
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Ar/ArabicStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Ar/ArabicStemFilterFactory.cs
index 4181103..28013b8 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Ar/ArabicStemFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Ar/ArabicStemFilterFactory.cs
@@ -43,7 +43,7 @@ namespace Lucene.Net.Analysis.Ar
         {
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Bg/BulgarianStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Bg/BulgarianStemFilterFactory.cs
index 78916e8..c2bc098 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Bg/BulgarianStemFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Bg/BulgarianStemFilterFactory.cs
@@ -43,7 +43,7 @@ namespace Lucene.Net.Analysis.Bg
         {
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Br/BrazilianStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Br/BrazilianStemFilterFactory.cs
index 6b02802..2306f83 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Br/BrazilianStemFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Br/BrazilianStemFilterFactory.cs
@@ -42,7 +42,7 @@ namespace Lucene.Net.Analysis.Br
         {
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/HTMLStripCharFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/HTMLStripCharFilter.cs
index 7e4af70..98f7eb7 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/HTMLStripCharFilter.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/HTMLStripCharFilter.cs
@@ -30648,24 +30648,22 @@ namespace Lucene.Net.Analysis.CharFilters
         /// </summary>
         private int zzEndRead;
 
-        /// <summary>number of newlines encountered up to the start of the matched text</summary>
-        private int yyline;
+        ///// <summary>number of newlines encountered up to the start of the matched text</summary>
+        //private int yyline; // LUCENENET: Never read
 
         /// <summary>the number of characters up to the start of the matched text</summary>
         private int yychar;
 
-#pragma warning disable 169, 414
-        /// <summary>
-        /// the number of characters from the last newline up to the start of the matched text
-        /// </summary>
-        private int yycolumn;
+        ///// <summary>
+        ///// the number of characters from the last newline up to the start of the matched text
+        ///// </summary>
+        //private int yycolumn; // LUCENENET: Never read
 
-        /// <summary>
-        /// zzAtBOL == true &lt;=&gt; the scanner is currently at the beginning of a line
-        /// </summary>
-        private bool zzAtBOL = true;
+        ///// <summary>
+        ///// zzAtBOL == true &lt;=&gt; the scanner is currently at the beginning of a line
+        ///// </summary>
+        //private bool zzAtBOL = true; // LUCENENET: Never read
 
-#pragma warning restore 169, 414
 
         /// <summary>zzAtEOF == true &lt;=&gt; the scanner is at the EOF</summary>
         private bool zzAtEOF;
@@ -31068,12 +31066,12 @@ namespace Lucene.Net.Analysis.CharFilters
         private void YyReset(BufferedCharFilter reader)
         {
             zzReader = reader;
-            zzAtBOL = true;
+            //zzAtBOL = true; // LUCENENET: Never read
             zzAtEOF = false;
             zzEOFDone = false;
             zzEndRead = zzStartRead = 0;
             zzCurrentPos = zzMarkedPos = 0;
-            yyline = yychar = yycolumn = 0;
+            //yyline = yychar = yycolumn = 0; // LUCENENET: Never read
             zzLexicalState = YYINITIAL;
             if (zzBuffer.Length > ZZ_BUFFERSIZE)
                 zzBuffer = new char[ZZ_BUFFERSIZE];
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/HTMLStripCharFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/HTMLStripCharFilterFactory.cs
index c44ae50..74dc924 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/HTMLStripCharFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/HTMLStripCharFilterFactory.cs
@@ -37,7 +37,7 @@ namespace Lucene.Net.Analysis.CharFilters
     public class HTMLStripCharFilterFactory : CharFilterFactory
     {
         private readonly ICollection<string> escapedTags;
-        private static readonly Regex TAG_NAME_PATTERN = new Regex(@"[^\\s,]+", RegexOptions.Compiled);
+        //private static readonly Regex TAG_NAME_PATTERN = new Regex(@"[^\\s,]+", RegexOptions.Compiled); // LUCENENET: Never read
 
         /// <summary>
         /// Creates a new <see cref="HTMLStripCharFilterFactory"/> </summary>
@@ -46,7 +46,7 @@ namespace Lucene.Net.Analysis.CharFilters
             escapedTags = GetSet(args, "escapedTags");
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/MappingCharFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/MappingCharFilterFactory.cs
index 4d711bb..8cec3e4 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/MappingCharFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/MappingCharFilterFactory.cs
@@ -49,7 +49,7 @@ namespace Lucene.Net.Analysis.CharFilters
             mapping = Get(args, "mapping");
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
@@ -108,7 +108,7 @@ namespace Lucene.Net.Analysis.CharFilters
             }
         }
 
-        private char[] @out = new char[256];
+        private readonly char[] @out = new char[256]; // LUCENENET: marked readonly
 
         protected internal virtual string ParseString(string s)
         {
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Cjk/CJKBigramFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Cjk/CJKBigramFilterFactory.cs
index ec2c82c..5938e41 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Cjk/CJKBigramFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Cjk/CJKBigramFilterFactory.cs
@@ -67,7 +67,7 @@ namespace Lucene.Net.Analysis.Cjk
             this.outputUnigrams = GetBoolean(args, "outputUnigrams", false);
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Cjk/CJKTokenizerFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Cjk/CJKTokenizerFactory.cs
index b286af1..2ef8524 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Cjk/CJKTokenizerFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Cjk/CJKTokenizerFactory.cs
@@ -44,7 +44,7 @@ namespace Lucene.Net.Analysis.Cjk
         {
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Cjk/CJKWidthFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Cjk/CJKWidthFilterFactory.cs
index 8861020..ce12836 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Cjk/CJKWidthFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Cjk/CJKWidthFilterFactory.cs
@@ -42,7 +42,7 @@ namespace Lucene.Net.Analysis.Cjk
         {
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Ckb/SoraniNormalizationFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Ckb/SoraniNormalizationFilterFactory.cs
index 18b6afa..0b86fc4 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Ckb/SoraniNormalizationFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Ckb/SoraniNormalizationFilterFactory.cs
@@ -41,7 +41,7 @@ namespace Lucene.Net.Analysis.Ckb
         {
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Ckb/SoraniStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Ckb/SoraniStemFilterFactory.cs
index 36fd085..f202647 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Ckb/SoraniStemFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Ckb/SoraniStemFilterFactory.cs
@@ -42,7 +42,7 @@ namespace Lucene.Net.Analysis.Ckb
         {
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Cn/ChineseFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Cn/ChineseFilterFactory.cs
index 90d4a1a..17eee9a 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Cn/ChineseFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Cn/ChineseFilterFactory.cs
@@ -34,7 +34,7 @@ namespace Lucene.Net.Analysis.Cn
         {
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Cn/ChineseTokenizerFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Cn/ChineseTokenizerFactory.cs
index 454b428..f2c8f77 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Cn/ChineseTokenizerFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Cn/ChineseTokenizerFactory.cs
@@ -37,7 +37,7 @@ namespace Lucene.Net.Analysis.Cn
         {
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/CommonGrams/CommonGramsFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/CommonGrams/CommonGramsFilterFactory.cs
index 33dcd94..9720142 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/CommonGrams/CommonGramsFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/CommonGrams/CommonGramsFilterFactory.cs
@@ -51,7 +51,7 @@ namespace Lucene.Net.Analysis.CommonGrams
             ignoreCase = GetBoolean(args, "ignoreCase", false);
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Compound/DictionaryCompoundWordTokenFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Compound/DictionaryCompoundWordTokenFilterFactory.cs
index 1d0ca58..da1150d 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Compound/DictionaryCompoundWordTokenFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Compound/DictionaryCompoundWordTokenFilterFactory.cs
@@ -55,7 +55,7 @@ namespace Lucene.Net.Analysis.Compound
             onlyLongestMatch = GetBoolean(args, "onlyLongestMatch", true);
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Compound/Hyphenation/HyphenationTree.cs b/src/Lucene.Net.Analysis.Common/Analysis/Compound/Hyphenation/HyphenationTree.cs
index 6c3ed78..21d966a 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Compound/Hyphenation/HyphenationTree.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Compound/Hyphenation/HyphenationTree.cs
@@ -190,10 +190,8 @@ namespace Lucene.Net.Analysis.Compound.Hyphenation
 #endif
                 };
 
-            using (var reader = XmlReader.Create(new StreamReader(source, encoding), xmlReaderSettings))
-            {
-                LoadPatterns(reader);
-            }
+            using var reader = XmlReader.Create(new StreamReader(source, encoding), xmlReaderSettings);
+            LoadPatterns(reader);
         }
 
         /// <summary>
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Compound/Hyphenation/PatternParser.cs b/src/Lucene.Net.Analysis.Common/Analysis/Compound/Hyphenation/PatternParser.cs
index fac4261..6e73b19 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Compound/Hyphenation/PatternParser.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Compound/Hyphenation/PatternParser.cs
@@ -91,10 +91,8 @@ namespace Lucene.Net.Analysis.Compound.Hyphenation
         public virtual void Parse(string path, Encoding encoding)
         {
             var xmlReaderSettings = GetXmlReaderSettings();
-            using (var src = XmlReader.Create(new StreamReader(new FileStream(path, FileMode.Open), encoding), xmlReaderSettings))
-            {
-                Parse(src);
-            }
+            using var src = XmlReader.Create(new StreamReader(new FileStream(path, FileMode.Open), encoding), xmlReaderSettings);
+            Parse(src);
         }
 
         /// <summary>
@@ -117,10 +115,8 @@ namespace Lucene.Net.Analysis.Compound.Hyphenation
         {
             var xmlReaderSettings = GetXmlReaderSettings();
 
-            using (var src = XmlReader.Create(new StreamReader(file.OpenRead(), encoding), xmlReaderSettings))
-            {
-                Parse(src);
-            }
+            using var src = XmlReader.Create(new StreamReader(file.OpenRead(), encoding), xmlReaderSettings);
+            Parse(src);
         }
 
         /// <summary>
@@ -138,10 +134,8 @@ namespace Lucene.Net.Analysis.Compound.Hyphenation
         {
             var xmlReaderSettings = GetXmlReaderSettings();
 
-            using (var src = XmlReader.Create(xmlStream, xmlReaderSettings))
-            {
-                Parse(src);
-            }
+            using var src = XmlReader.Create(xmlStream, xmlReaderSettings);
+            Parse(src);
         }
 
         /// <summary>
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Compound/HyphenationCompoundWordTokenFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Compound/HyphenationCompoundWordTokenFilterFactory.cs
index 74b9dd3..116538a 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Compound/HyphenationCompoundWordTokenFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Compound/HyphenationCompoundWordTokenFilterFactory.cs
@@ -79,7 +79,7 @@ namespace Lucene.Net.Analysis.Compound
             onlyLongestMatch = GetBoolean(args, "onlyLongestMatch", false);
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Core/KeywordTokenizerFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Core/KeywordTokenizerFactory.cs
index 3901254..36cc7c8 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Core/KeywordTokenizerFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Core/KeywordTokenizerFactory.cs
@@ -42,7 +42,7 @@ namespace Lucene.Net.Analysis.Core
         {
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Core/LetterTokenizerFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Core/LetterTokenizerFactory.cs
index 0bdc607..0f384e1 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Core/LetterTokenizerFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Core/LetterTokenizerFactory.cs
@@ -43,7 +43,7 @@ namespace Lucene.Net.Analysis.Core
             AssureMatchVersion();
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Core/LowerCaseFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Core/LowerCaseFilterFactory.cs
index 7b8d27b..d51add8 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Core/LowerCaseFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Core/LowerCaseFilterFactory.cs
@@ -42,7 +42,7 @@ namespace Lucene.Net.Analysis.Core
             AssureMatchVersion();
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Core/LowerCaseTokenizerFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Core/LowerCaseTokenizerFactory.cs
index 7f89807..68b83b8 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Core/LowerCaseTokenizerFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Core/LowerCaseTokenizerFactory.cs
@@ -43,7 +43,7 @@ namespace Lucene.Net.Analysis.Core
             AssureMatchVersion();
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Core/StopFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Core/StopFilterFactory.cs
index afcfd2d..51361bc 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Core/StopFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Core/StopFilterFactory.cs
@@ -89,7 +89,7 @@ namespace Lucene.Net.Analysis.Core
             enablePositionIncrements = GetBoolean(args, "enablePositionIncrements", true);
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Core/TypeTokenFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Core/TypeTokenFilterFactory.cs
index 65d5b4e..8abe89a 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Core/TypeTokenFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Core/TypeTokenFilterFactory.cs
@@ -51,7 +51,7 @@ namespace Lucene.Net.Analysis.Core
             useWhitelist = GetBoolean(args, "useWhitelist", false);
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Core/UpperCaseFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Core/UpperCaseFilterFactory.cs
index 1b6c52c..78ea8dd 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Core/UpperCaseFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Core/UpperCaseFilterFactory.cs
@@ -48,7 +48,7 @@ namespace Lucene.Net.Analysis.Core
             AssureMatchVersion();
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Core/WhitespaceTokenizerFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Core/WhitespaceTokenizerFactory.cs
index f7b1e36..8e0815a 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Core/WhitespaceTokenizerFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Core/WhitespaceTokenizerFactory.cs
@@ -43,7 +43,7 @@ namespace Lucene.Net.Analysis.Core
             AssureMatchVersion();
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Cz/CzechStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Cz/CzechStemFilterFactory.cs
index a48129f..30cac93 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Cz/CzechStemFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Cz/CzechStemFilterFactory.cs
@@ -41,7 +41,7 @@ namespace Lucene.Net.Analysis.Cz
         {
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/De/GermanLightStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/De/GermanLightStemFilterFactory.cs
index eb07f6b..1eb6162 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/De/GermanLightStemFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/De/GermanLightStemFilterFactory.cs
@@ -41,7 +41,7 @@ namespace Lucene.Net.Analysis.De
         {
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/De/GermanMinimalStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/De/GermanMinimalStemFilterFactory.cs
index 1a4cc5c..cae7f58 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/De/GermanMinimalStemFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/De/GermanMinimalStemFilterFactory.cs
@@ -41,7 +41,7 @@ namespace Lucene.Net.Analysis.De
         {
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/De/GermanNormalizationFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/De/GermanNormalizationFilterFactory.cs
index 2d7035a..ae43bde 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/De/GermanNormalizationFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/De/GermanNormalizationFilterFactory.cs
@@ -40,7 +40,7 @@ namespace Lucene.Net.Analysis.De
         {
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/De/GermanStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/De/GermanStemFilterFactory.cs
index 48d9b54..1be9a0f 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/De/GermanStemFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/De/GermanStemFilterFactory.cs
@@ -41,7 +41,7 @@ namespace Lucene.Net.Analysis.De
         {
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/El/GreekLowerCaseFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/El/GreekLowerCaseFilterFactory.cs
index 9f57c99..cfe8612 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/El/GreekLowerCaseFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/El/GreekLowerCaseFilterFactory.cs
@@ -40,7 +40,7 @@ namespace Lucene.Net.Analysis.El
             AssureMatchVersion();
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/El/GreekStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/El/GreekStemFilterFactory.cs
index 80eeefc..2c551c7 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/El/GreekStemFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/El/GreekStemFilterFactory.cs
@@ -41,7 +41,7 @@ namespace Lucene.Net.Analysis.El
         {
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/En/EnglishMinimalStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/En/EnglishMinimalStemFilterFactory.cs
index 338ed1d..00a1bef 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/En/EnglishMinimalStemFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/En/EnglishMinimalStemFilterFactory.cs
@@ -40,7 +40,7 @@ namespace Lucene.Net.Analysis.En
         {
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/En/EnglishPossessiveFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/En/EnglishPossessiveFilterFactory.cs
index 3b28bdb..a9db7c9 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/En/EnglishPossessiveFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/En/EnglishPossessiveFilterFactory.cs
@@ -43,7 +43,7 @@ namespace Lucene.Net.Analysis.En
             AssureMatchVersion();
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/En/KStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/En/KStemFilterFactory.cs
index fd38cc5..1aeba71 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/En/KStemFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/En/KStemFilterFactory.cs
@@ -40,7 +40,7 @@ namespace Lucene.Net.Analysis.En
         {
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/En/PorterStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/En/PorterStemFilterFactory.cs
index 1f51cf8..a50bc23 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/En/PorterStemFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/En/PorterStemFilterFactory.cs
@@ -40,7 +40,7 @@ namespace Lucene.Net.Analysis.En
         {
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Es/SpanishLightStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Es/SpanishLightStemFilterFactory.cs
index 036aad3..8f9daa3 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Es/SpanishLightStemFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Es/SpanishLightStemFilterFactory.cs
@@ -41,7 +41,7 @@ namespace Lucene.Net.Analysis.Es
         {
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Fa/PersianCharFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Fa/PersianCharFilterFactory.cs
index e6bf9fa..d4d58fb 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Fa/PersianCharFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Fa/PersianCharFilterFactory.cs
@@ -41,7 +41,7 @@ namespace Lucene.Net.Analysis.Fa
         {
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Fa/PersianNormalizationFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Fa/PersianNormalizationFilterFactory.cs
index d27938a..d427ea5 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Fa/PersianNormalizationFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Fa/PersianNormalizationFilterFactory.cs
@@ -40,7 +40,7 @@ namespace Lucene.Net.Analysis.Fa
         {
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Fi/FinnishLightStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Fi/FinnishLightStemFilterFactory.cs
index 5e9503e..5aa3237 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Fi/FinnishLightStemFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Fi/FinnishLightStemFilterFactory.cs
@@ -40,7 +40,7 @@ namespace Lucene.Net.Analysis.Fi
         {
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Fr/FrenchLightStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Fr/FrenchLightStemFilterFactory.cs
index a1e63cb..512771d 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Fr/FrenchLightStemFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Fr/FrenchLightStemFilterFactory.cs
@@ -41,7 +41,7 @@ namespace Lucene.Net.Analysis.Fr
         {
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Fr/FrenchMinimalStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Fr/FrenchMinimalStemFilterFactory.cs
index 4c2cede..36abdb8 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Fr/FrenchMinimalStemFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Fr/FrenchMinimalStemFilterFactory.cs
@@ -41,7 +41,7 @@ namespace Lucene.Net.Analysis.Fr
         {
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Ga/IrishLowerCaseFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Ga/IrishLowerCaseFilterFactory.cs
index b8e1896..a62eb1f 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Ga/IrishLowerCaseFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Ga/IrishLowerCaseFilterFactory.cs
@@ -39,7 +39,7 @@ namespace Lucene.Net.Analysis.Ga
         {
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Gl/GalicianMinimalStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Gl/GalicianMinimalStemFilterFactory.cs
index 5fe76a3..785c397 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Gl/GalicianMinimalStemFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Gl/GalicianMinimalStemFilterFactory.cs
@@ -40,7 +40,7 @@ namespace Lucene.Net.Analysis.Gl
         {
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Gl/GalicianStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Gl/GalicianStemFilterFactory.cs
index c89b7f0..c4057f4 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Gl/GalicianStemFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Gl/GalicianStemFilterFactory.cs
@@ -41,7 +41,7 @@ namespace Lucene.Net.Analysis.Gl
         {
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
         public override TokenStream Create(TokenStream input)
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Hi/HindiNormalizationFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Hi/HindiNormalizationFilterFactory.cs
index 474ac03..09ddfec 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Hi/HindiNormalizationFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Hi/HindiNormalizationFilterFactory.cs
@@ -40,7 +40,7 @@ namespace Lucene.Net.Analysis.Hi
         {
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Hi/HindiStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Hi/HindiStemFilterFactory.cs
index 6d862ae..9da1681 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Hi/HindiStemFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Hi/HindiStemFilterFactory.cs
@@ -40,7 +40,7 @@ namespace Lucene.Net.Analysis.Hi
         {
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Hu/HungarianLightStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Hu/HungarianLightStemFilterFactory.cs
index 652b178..2dceca6 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Hu/HungarianLightStemFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Hu/HungarianLightStemFilterFactory.cs
@@ -41,7 +41,7 @@ namespace Lucene.Net.Analysis.Hu
         {
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Dictionary.cs b/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Dictionary.cs
index 0f4e984..b61ebd9 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Dictionary.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Dictionary.cs
@@ -269,10 +269,11 @@ namespace Lucene.Net.Analysis.Hunspell
         {
             JCG.SortedDictionary<string, IList<char?>> prefixes = new JCG.SortedDictionary<string, IList<char?>>(StringComparer.Ordinal);
             JCG.SortedDictionary<string, IList<char?>> suffixes = new JCG.SortedDictionary<string, IList<char?>>(StringComparer.Ordinal);
-            IDictionary<string, int?> seenPatterns = new JCG.Dictionary<string, int?>();
-
-            // zero condition -> 0 ord
-            seenPatterns[".*"] = 0;
+            IDictionary<string, int?> seenPatterns = new JCG.Dictionary<string, int?>
+            {
+                // zero condition -> 0 ord
+                [".*"] = 0
+            };
             patterns.Add(null);
 
             // zero strip -> 0 ord
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/HunspellStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/HunspellStemFilterFactory.cs
index 28890e9..7d9b4d3 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/HunspellStemFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/HunspellStemFilterFactory.cs
@@ -71,7 +71,7 @@ namespace Lucene.Net.Analysis.Hunspell
             GetInt32(args, "recursionCap", 0);
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Id/IndonesianStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Id/IndonesianStemFilterFactory.cs
index ac9b6f7..16b97f3 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Id/IndonesianStemFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Id/IndonesianStemFilterFactory.cs
@@ -45,7 +45,7 @@ namespace Lucene.Net.Analysis.Id
             stemDerivational = GetBoolean(args, "stemDerivational", true);
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/In/IndicNormalizationFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/In/IndicNormalizationFilterFactory.cs
index 4672ec4..e98ac37 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/In/IndicNormalizationFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/In/IndicNormalizationFilterFactory.cs
@@ -39,7 +39,7 @@ namespace Lucene.Net.Analysis.In
         {
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/It/ItalianLightStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/It/ItalianLightStemFilterFactory.cs
index 16beaf7..d251dff 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/It/ItalianLightStemFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/It/ItalianLightStemFilterFactory.cs
@@ -41,7 +41,7 @@ namespace Lucene.Net.Analysis.It
         {
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Lv/LatvianStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Lv/LatvianStemFilterFactory.cs
index 003d37b..9408efb 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Lv/LatvianStemFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Lv/LatvianStemFilterFactory.cs
@@ -41,7 +41,7 @@ namespace Lucene.Net.Analysis.Lv
         {
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/ASCIIFoldingFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/ASCIIFoldingFilterFactory.cs
index c5167da..7ae8788 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/ASCIIFoldingFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/ASCIIFoldingFilterFactory.cs
@@ -43,7 +43,7 @@ namespace Lucene.Net.Analysis.Miscellaneous
             preserveOriginal = GetBoolean(args, "preserveOriginal", false);
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/CapitalizationFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/CapitalizationFilterFactory.cs
index e509e42..990b4ab 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/CapitalizationFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/CapitalizationFilterFactory.cs
@@ -108,7 +108,7 @@ namespace Lucene.Net.Analysis.Miscellaneous
             culture = GetCulture(args, CULTURE, null);
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/CodepointCountFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/CodepointCountFilterFactory.cs
index 1bfee5e..7246c6c 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/CodepointCountFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/CodepointCountFilterFactory.cs
@@ -47,7 +47,7 @@ namespace Lucene.Net.Analysis.Miscellaneous
             max = RequireInt32(args, MAX_KEY);
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/HyphenatedWordsFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/HyphenatedWordsFilterFactory.cs
index 736dc81..8368e60 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/HyphenatedWordsFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/HyphenatedWordsFilterFactory.cs
@@ -40,7 +40,7 @@ namespace Lucene.Net.Analysis.Miscellaneous
         {
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/KeepWordFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/KeepWordFilterFactory.cs
index 10cc5f6..1145e96 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/KeepWordFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/KeepWordFilterFactory.cs
@@ -49,7 +49,7 @@ namespace Lucene.Net.Analysis.Miscellaneous
             enablePositionIncrements = GetBoolean(args, "enablePositionIncrements", true);
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/KeywordMarkerFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/KeywordMarkerFilterFactory.cs
index f3dedb0..f99a1e9 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/KeywordMarkerFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/KeywordMarkerFilterFactory.cs
@@ -52,7 +52,7 @@ namespace Lucene.Net.Analysis.Miscellaneous
             ignoreCase = GetBoolean(args, "ignoreCase", false);
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/KeywordRepeatFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/KeywordRepeatFilterFactory.cs
index 386c26e..6e8a4e1 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/KeywordRepeatFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/KeywordRepeatFilterFactory.cs
@@ -37,7 +37,7 @@ namespace Lucene.Net.Analysis.Miscellaneous
         {
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/LengthFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/LengthFilterFactory.cs
index 5d40b1d..c44abaa 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/LengthFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/LengthFilterFactory.cs
@@ -49,7 +49,7 @@ namespace Lucene.Net.Analysis.Miscellaneous
             enablePositionIncrements = GetBoolean(args, "enablePositionIncrements", true);
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/LimitTokenCountFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/LimitTokenCountFilterFactory.cs
index b4e95ff..8eeb80b 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/LimitTokenCountFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/LimitTokenCountFilterFactory.cs
@@ -51,7 +51,7 @@ namespace Lucene.Net.Analysis.Miscellaneous
             consumeAllTokens = GetBoolean(args, CONSUME_ALL_TOKENS_KEY, false);
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/LimitTokenPositionFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/LimitTokenPositionFilterFactory.cs
index d0b2a61..396add1 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/LimitTokenPositionFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/LimitTokenPositionFilterFactory.cs
@@ -51,7 +51,7 @@ namespace Lucene.Net.Analysis.Miscellaneous
             consumeAllTokens = GetBoolean(args, CONSUME_ALL_TOKENS_KEY, false);
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/Lucene47WordDelimiterFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/Lucene47WordDelimiterFilter.cs
index 8e0d70f..3a62bdc 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/Lucene47WordDelimiterFilter.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/Lucene47WordDelimiterFilter.cs
@@ -432,6 +432,7 @@ namespace Lucene.Net.Analysis.Miscellaneous
         /// </summary>
         /// <param name="type"> Word type to check </param>
         /// <returns> <c>true</c> if the type contains <see cref="SUBWORD_DELIM"/>, <c>false</c> otherwise </returns>
+        [System.Diagnostics.CodeAnalysis.SuppressMessage("CodeQuality", "IDE0051:Remove unused private members", Justification = "Obsolete class, anyway")]
         private static bool IsSubwordDelim(int type)
         {
             return (type & SUBWORD_DELIM) != 0;
@@ -442,6 +443,7 @@ namespace Lucene.Net.Analysis.Miscellaneous
         /// </summary>
         /// <param name="type"> Word type to check </param>
         /// <returns> <c>true</c> if the type contains <see cref="UPPER"/>, <c>false</c> otherwise </returns>
+        [System.Diagnostics.CodeAnalysis.SuppressMessage("CodeQuality", "IDE0051:Remove unused private members", Justification = "Obsolete class, anyway")]
         private static bool IsUpper(int type)
         {
             return (type & UPPER) != 0;
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/PrefixAwareTokenFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/PrefixAwareTokenFilter.cs
index d4b7020..c56938e 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/PrefixAwareTokenFilter.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/PrefixAwareTokenFilter.cs
@@ -78,7 +78,7 @@ namespace Lucene.Net.Analysis.Miscellaneous
 
         public override sealed bool IncrementToken()
         {
-            Token nextToken = null;
+            Token nextToken; // LUCENENET: IDE0059: Remove unnecessary value assignment
             if (!prefixExhausted)
             {
                 nextToken = GetNextPrefixInputToken(reusableToken);
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/RemoveDuplicatesTokenFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/RemoveDuplicatesTokenFilterFactory.cs
index 2fbb279..46e2a12 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/RemoveDuplicatesTokenFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/RemoveDuplicatesTokenFilterFactory.cs
@@ -39,7 +39,7 @@ namespace Lucene.Net.Analysis.Miscellaneous
         {
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/ScandinavianFoldingFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/ScandinavianFoldingFilterFactory.cs
index 955c5d7..2476761 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/ScandinavianFoldingFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/ScandinavianFoldingFilterFactory.cs
@@ -40,7 +40,7 @@ namespace Lucene.Net.Analysis.Miscellaneous
         {
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/ScandinavianNormalizationFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/ScandinavianNormalizationFilterFactory.cs
index 189108d..90147a6 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/ScandinavianNormalizationFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/ScandinavianNormalizationFilterFactory.cs
@@ -40,7 +40,7 @@ namespace Lucene.Net.Analysis.Miscellaneous
         {
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/StemmerOverrideFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/StemmerOverrideFilterFactory.cs
index 4f292db..b516622 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/StemmerOverrideFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/StemmerOverrideFilterFactory.cs
@@ -47,7 +47,7 @@ namespace Lucene.Net.Analysis.Miscellaneous
             ignoreCase = GetBoolean(args, "ignoreCase", false);
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/TrimFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/TrimFilter.cs
index 5928e47..c89c8f9 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/TrimFilter.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/TrimFilter.cs
@@ -76,8 +76,8 @@ namespace Lucene.Net.Analysis.Miscellaneous
             {
                 return true;
             }
-            int start = 0;
-            int end = 0;
+            int start; // LUCENENET: IDE0059: Remove unnecessary value assignment
+            int end; // LUCENENET: IDE0059: Remove unnecessary value assignment
             int endOff = 0;
 
             // eat the first characters
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/TrimFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/TrimFilterFactory.cs
index b772d2a..dd57e00 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/TrimFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/TrimFilterFactory.cs
@@ -44,7 +44,7 @@ namespace Lucene.Net.Analysis.Miscellaneous
             m_updateOffsets = GetBoolean(args, "updateOffsets", false);
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/WordDelimiterFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/WordDelimiterFilterFactory.cs
index f792a51..1588a1e 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/WordDelimiterFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/WordDelimiterFilterFactory.cs
@@ -99,7 +99,7 @@ namespace Lucene.Net.Analysis.Miscellaneous
             this.flags = flags;
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
@@ -138,7 +138,7 @@ namespace Lucene.Net.Analysis.Miscellaneous
         }
 
         // source => type
-        private static Regex typePattern = new Regex("(.*)\\s*=>\\s*(.*)\\s*$", RegexOptions.Compiled);
+        private static readonly Regex typePattern = new Regex("(.*)\\s*=>\\s*(.*)\\s*$", RegexOptions.Compiled);
 
         // parses a list of MappingCharFilter style rules into a custom byte[] type table
         private byte[] ParseTypes(IList<string> rules)
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/WordDelimiterIterator.cs b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/WordDelimiterIterator.cs
index 0a0a3ef..be656ee 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/WordDelimiterIterator.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/WordDelimiterIterator.cs
@@ -365,52 +365,6 @@ namespace Lucene.Net.Analysis.Miscellaneous
                     return WordDelimiterFilter.SUBWORD_DELIM;
 
             }
-
-            //switch (char.getType(ch))
-            //{
-            //  case char.UPPERCASE_LETTER:
-            //	  return WordDelimiterFilter.UPPER;
-            //  case char.LOWERCASE_LETTER:
-            //	  return WordDelimiterFilter.LOWER;
-
-            //  case char.TITLECASE_LETTER:
-            //  case char.MODIFIER_LETTER:
-            //  case char.OTHER_LETTER:
-            //  case char.NON_SPACING_MARK:
-            //  case char.ENCLOSING_MARK: // depends what it encloses?
-            //  case char.COMBINING_SPACING_MARK:
-            //	return WordDelimiterFilter.ALPHA;
-
-            //  case char.DECIMAL_DIGIT_NUMBER:
-            //  case char.LETTER_NUMBER:
-            //  case char.OTHER_NUMBER:
-            //	return WordDelimiterFilter.DIGIT;
-
-            //  // case Character.SPACE_SEPARATOR:
-            //  // case Character.LINE_SEPARATOR:
-            //  // case Character.PARAGRAPH_SEPARATOR:
-            //  // case Character.CONTROL:
-            //  // case Character.FORMAT:
-            //  // case Character.PRIVATE_USE:
-
-            //  case char.SURROGATE: // prevent splitting
-            //	return WordDelimiterFilter.ALPHA | WordDelimiterFilter.DIGIT;
-
-            //  // case Character.DASH_PUNCTUATION:
-            //  // case Character.START_PUNCTUATION:
-            //  // case Character.END_PUNCTUATION:
-            //  // case Character.CONNECTOR_PUNCTUATION:
-            //  // case Character.OTHER_PUNCTUATION:
-            //  // case Character.MATH_SYMBOL:
-            //  // case Character.CURRENCY_SYMBOL:
-            //  // case Character.MODIFIER_SYMBOL:
-            //  // case Character.OTHER_SYMBOL:
-            //  // case Character.INITIAL_QUOTE_PUNCTUATION:
-            //  // case Character.FINAL_QUOTE_PUNCTUATION:
-
-            //  default:
-            //	  return WordDelimiterFilter.SUBWORD_DELIM;
-            //}
         }
     }
 }
\ No newline at end of file
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/NGram/EdgeNGramFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/NGram/EdgeNGramFilterFactory.cs
index 64e90c5..bf991fe 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/NGram/EdgeNGramFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/NGram/EdgeNGramFilterFactory.cs
@@ -47,7 +47,7 @@ namespace Lucene.Net.Analysis.NGram
             side = Get(args, "side", EdgeNGramTokenFilter.Side.FRONT.ToString());
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/NGram/EdgeNGramTokenizerFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/NGram/EdgeNGramTokenizerFactory.cs
index 583bb61..c8941c6 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/NGram/EdgeNGramTokenizerFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/NGram/EdgeNGramTokenizerFactory.cs
@@ -47,7 +47,7 @@ namespace Lucene.Net.Analysis.NGram
             side = Get(args, "side", EdgeNGramTokenFilter.Side.FRONT.ToString());
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/NGram/NGramFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/NGram/NGramFilterFactory.cs
index bea6b8e..f4197d5 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/NGram/NGramFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/NGram/NGramFilterFactory.cs
@@ -45,7 +45,7 @@ namespace Lucene.Net.Analysis.NGram
             maxGramSize = GetInt32(args, "maxGramSize", NGramTokenFilter.DEFAULT_MAX_NGRAM_SIZE);
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/NGram/NGramTokenizerFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/NGram/NGramTokenizerFactory.cs
index 91f4356..118a909 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/NGram/NGramTokenizerFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/NGram/NGramTokenizerFactory.cs
@@ -46,7 +46,7 @@ namespace Lucene.Net.Analysis.NGram
             maxGramSize = GetInt32(args, "maxGramSize", NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE);
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/No/NorwegianLightStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/No/NorwegianLightStemFilterFactory.cs
index 8cbbc39..0c6782b 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/No/NorwegianLightStemFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/No/NorwegianLightStemFilterFactory.cs
@@ -60,7 +60,7 @@ namespace Lucene.Net.Analysis.No
             }
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/No/NorwegianMinimalStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/No/NorwegianMinimalStemFilterFactory.cs
index f5603d6..2abfce7 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/No/NorwegianMinimalStemFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/No/NorwegianMinimalStemFilterFactory.cs
@@ -60,7 +60,7 @@ namespace Lucene.Net.Analysis.No
             }
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/No/NorwegianMinimalStemmer.cs b/src/Lucene.Net.Analysis.Common/Analysis/No/NorwegianMinimalStemmer.cs
index ee4b07e..312c210 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/No/NorwegianMinimalStemmer.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/No/NorwegianMinimalStemmer.cs
@@ -63,7 +63,7 @@ namespace Lucene.Net.Analysis.No
     /// </summary>
     public class NorwegianMinimalStemmer
     {
-        private readonly bool useBokmaal;
+        //private readonly bool useBokmaal; // LUCENENET: Never read
         private readonly bool useNynorsk;
 
         /// <summary>
@@ -76,7 +76,7 @@ namespace Lucene.Net.Analysis.No
             {
                 throw new ArgumentException("invalid flags");
             }
-            useBokmaal = (flags & NorwegianStandard.BOKMAAL) != 0;
+            //useBokmaal = (flags & NorwegianStandard.BOKMAAL) != 0; // LUCENENET: Never read
             useNynorsk = (flags & NorwegianStandard.NYNORSK) != 0;
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Path/PathHierarchyTokenizerFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Path/PathHierarchyTokenizerFactory.cs
index 673f62c..6f1301a 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Path/PathHierarchyTokenizerFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Path/PathHierarchyTokenizerFactory.cs
@@ -85,7 +85,7 @@ namespace Lucene.Net.Analysis.Path
             skip = GetInt32(args, "skip", PathHierarchyTokenizer.DEFAULT_SKIP);
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Pattern/PatternReplaceCharFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Pattern/PatternReplaceCharFilterFactory.cs
index bd88d62..e6ada46 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Pattern/PatternReplaceCharFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Pattern/PatternReplaceCharFilterFactory.cs
@@ -60,7 +60,7 @@ namespace Lucene.Net.Analysis.Pattern
             }
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Pattern/PatternReplaceFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Pattern/PatternReplaceFilterFactory.cs
index 8a9fa0c..a6b421f 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Pattern/PatternReplaceFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Pattern/PatternReplaceFilterFactory.cs
@@ -49,7 +49,7 @@ namespace Lucene.Net.Analysis.Pattern
             replaceAll = "all".Equals(Get(args, "replace", new string[] { "all", "first" }, "all"), StringComparison.Ordinal);
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Pattern/PatternTokenizerFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Pattern/PatternTokenizerFactory.cs
index 8e6754c..63faf0a 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Pattern/PatternTokenizerFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Pattern/PatternTokenizerFactory.cs
@@ -77,7 +77,7 @@ namespace Lucene.Net.Analysis.Pattern
             m_group = GetInt32(args, GROUP, -1);
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Payloads/DelimitedPayloadTokenFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Payloads/DelimitedPayloadTokenFilterFactory.cs
index 21ec2ff..7d32ad7 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Payloads/DelimitedPayloadTokenFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Payloads/DelimitedPayloadTokenFilterFactory.cs
@@ -50,7 +50,7 @@ namespace Lucene.Net.Analysis.Payloads
             delimiter = GetChar(args, DELIMITER_ATTR, '|');
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Payloads/NumericPayloadTokenFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Payloads/NumericPayloadTokenFilterFactory.cs
index 6d7ff6f..90f9d60 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Payloads/NumericPayloadTokenFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Payloads/NumericPayloadTokenFilterFactory.cs
@@ -44,7 +44,7 @@ namespace Lucene.Net.Analysis.Payloads
             typeMatch = Require(args, "typeMatch");
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Payloads/PayloadHelper.cs b/src/Lucene.Net.Analysis.Common/Analysis/Payloads/PayloadHelper.cs
index 692bff2..32c76d4 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Payloads/PayloadHelper.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Payloads/PayloadHelper.cs
@@ -20,7 +20,7 @@
     /// <summary>
     /// Utility methods for encoding payloads.
     /// </summary>
-    public class PayloadHelper
+    public static class PayloadHelper // LUCENENET specific: CA1052 Static holder types should be Static or NotInheritable
     {
         /// <summary>
         /// NOTE: This was encodeFloat() in Lucene
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Payloads/TokenOffsetPayloadTokenFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Payloads/TokenOffsetPayloadTokenFilterFactory.cs
index 0d0c6cd..75e5501 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Payloads/TokenOffsetPayloadTokenFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Payloads/TokenOffsetPayloadTokenFilterFactory.cs
@@ -40,7 +40,7 @@ namespace Lucene.Net.Analysis.Payloads
         {
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Payloads/TypeAsPayloadTokenFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Payloads/TypeAsPayloadTokenFilterFactory.cs
index 247f817..4cdc6dc 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Payloads/TypeAsPayloadTokenFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Payloads/TypeAsPayloadTokenFilterFactory.cs
@@ -40,7 +40,7 @@ namespace Lucene.Net.Analysis.Payloads
         {
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Position/PositionFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Position/PositionFilterFactory.cs
index 6d07152..8dc83bf 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Position/PositionFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Position/PositionFilterFactory.cs
@@ -48,7 +48,7 @@ namespace Lucene.Net.Analysis.Position
             positionIncrement = GetInt32(args, "positionIncrement", 0);
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
             if (m_luceneMatchVersion.OnOrAfter(Lucene.Net.Util.LuceneVersion.LUCENE_44))
             {
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Pt/PortugueseLightStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Pt/PortugueseLightStemFilterFactory.cs
index 05a1fe0..a838b3d 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Pt/PortugueseLightStemFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Pt/PortugueseLightStemFilterFactory.cs
@@ -41,7 +41,7 @@ namespace Lucene.Net.Analysis.Pt
         {
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Pt/PortugueseMinimalStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Pt/PortugueseMinimalStemFilterFactory.cs
index 9d58cc1..27c41f2 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Pt/PortugueseMinimalStemFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Pt/PortugueseMinimalStemFilterFactory.cs
@@ -41,7 +41,7 @@ namespace Lucene.Net.Analysis.Pt
         {
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Pt/PortugueseStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Pt/PortugueseStemFilterFactory.cs
index 8ec79b3..6079c64 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Pt/PortugueseStemFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Pt/PortugueseStemFilterFactory.cs
@@ -41,7 +41,7 @@ namespace Lucene.Net.Analysis.Pt
         {
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Reverse/ReverseStringFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Reverse/ReverseStringFilterFactory.cs
index d4f59e9..e21da33 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Reverse/ReverseStringFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Reverse/ReverseStringFilterFactory.cs
@@ -42,7 +42,7 @@ namespace Lucene.Net.Analysis.Reverse
             AssureMatchVersion();
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Ru/RussianLetterTokenizerFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Ru/RussianLetterTokenizerFactory.cs
index b1d9001..815c57d 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Ru/RussianLetterTokenizerFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Ru/RussianLetterTokenizerFactory.cs
@@ -35,7 +35,7 @@ namespace Lucene.Net.Analysis.Ru
             AssureMatchVersion();
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Ru/RussianLightStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Ru/RussianLightStemFilterFactory.cs
index 7f01c41..a2a6c7b 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Ru/RussianLightStemFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Ru/RussianLightStemFilterFactory.cs
@@ -41,7 +41,7 @@ namespace Lucene.Net.Analysis.Ru
         {
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Shingle/ShingleFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Shingle/ShingleFilterFactory.cs
index 67686ff..41f4d9b 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Shingle/ShingleFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Shingle/ShingleFilterFactory.cs
@@ -66,7 +66,7 @@ namespace Lucene.Net.Analysis.Shingle
             fillerToken = Get(args, "fillerToken", ShingleFilter.DEFAULT_FILLER_TOKEN);
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Sinks/TeeSinkTokenFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/Sinks/TeeSinkTokenFilter.cs
index 4320687..09d202b 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Sinks/TeeSinkTokenFilter.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Sinks/TeeSinkTokenFilter.cs
@@ -202,7 +202,7 @@ namespace Lucene.Net.Analysis.Sinks
             private readonly IList<AttributeSource.State> cachedStates = new List<AttributeSource.State>();
             private AttributeSource.State finalState;
             private IEnumerator<AttributeSource.State> it = null;
-            private SinkFilter filter;
+            private readonly SinkFilter filter; // LUCENENET: marked readonly
 
             internal SinkTokenStream(AttributeSource source, SinkFilter filter)
                 : base(source)
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Snowball/SnowballPorterFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Snowball/SnowballPorterFilterFactory.cs
index eb9f1fd..3f5d654 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Snowball/SnowballPorterFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Snowball/SnowballPorterFilterFactory.cs
@@ -54,7 +54,7 @@ namespace Lucene.Net.Analysis.Snowball
             wordFiles = Get(args, PROTECTED_TOKENS);
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Standard/ClassicFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Standard/ClassicFilterFactory.cs
index ea36b5c..fb87766 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Standard/ClassicFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Standard/ClassicFilterFactory.cs
@@ -40,7 +40,7 @@ namespace Lucene.Net.Analysis.Standard
         {
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Standard/ClassicTokenizerFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Standard/ClassicTokenizerFactory.cs
index 2113f9e..a7cdb3c 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Standard/ClassicTokenizerFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Standard/ClassicTokenizerFactory.cs
@@ -44,7 +44,7 @@ namespace Lucene.Net.Analysis.Standard
             maxTokenLength = GetInt32(args, "maxTokenLength", StandardAnalyzer.DEFAULT_MAX_TOKEN_LENGTH);
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Standard/ClassicTokenizerImpl.cs b/src/Lucene.Net.Analysis.Common/Analysis/Standard/ClassicTokenizerImpl.cs
index 0de05fd..4457b44 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Standard/ClassicTokenizerImpl.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Standard/ClassicTokenizerImpl.cs
@@ -135,7 +135,8 @@ namespace Lucene.Net.Analysis.Standard
         {
             int[] result = new int[50];
             int offset = 0;
-            offset = ZzUnpackAction(ZZ_ACTION_PACKED_0, offset, result);
+            /*offset = */
+            ZzUnpackAction(ZZ_ACTION_PACKED_0, offset, result); // LUCENENET: IDE0059: Remove unnecessary value assignment
             return result;
         }
 
@@ -175,7 +176,8 @@ namespace Lucene.Net.Analysis.Standard
         {
             int[] result = new int[50];
             int offset = 0;
-            offset = ZzUnpackRowMap(ZZ_ROWMAP_PACKED_0, offset, result);
+            /*offset = */
+            ZzUnpackRowMap(ZZ_ROWMAP_PACKED_0, offset, result); // LUCENENET: IDE0059: Remove unnecessary value assignment
             return result;
         }
 
@@ -243,7 +245,8 @@ namespace Lucene.Net.Analysis.Standard
         {
             int[] result = new int[552];
             int offset = 0;
-            offset = ZzUnpackTrans(ZZ_TRANS_PACKED_0, offset, result);
+            /*offset = */
+            ZzUnpackTrans(ZZ_TRANS_PACKED_0, offset, result); // LUCENENET: IDE0059: Remove unnecessary value assignment
             return result;
         }
 
@@ -291,7 +294,8 @@ namespace Lucene.Net.Analysis.Standard
         {
             int[] result = new int[50];
             int offset = 0;
-            offset = ZzUnpackAttribute(ZZ_ATTRIBUTE_PACKED_0, offset, result);
+            /*offset = */
+            ZzUnpackAttribute(ZZ_ATTRIBUTE_PACKED_0, offset, result); // LUCENENET: IDE0059: Remove unnecessary value assignment
             return result;
         }
 
@@ -348,35 +352,33 @@ namespace Lucene.Net.Analysis.Standard
         /// </summary>
         private int zzEndRead;
 
-        /// <summary>
-        /// number of newlines encountered up to the start of the matched text </summary>
-        private int yyline;
+        ///// <summary>
+        ///// number of newlines encountered up to the start of the matched text </summary>
+        //private int yyline; // LUCENENET: Never read
 
         /// <summary>
         /// the number of characters up to the start of the matched text </summary>
         private int yyChar;
 
-#pragma warning disable 169, 414
-        /// <summary>
-        /// the number of characters from the last newline up to the start of the 
-        /// matched text
-        /// </summary>
-        private int yycolumn;
+        ///// <summary>
+        ///// the number of characters from the last newline up to the start of the 
+        ///// matched text
+        ///// </summary>
+        //private int yycolumn; // LUCENENET: Never read
 
-        /// <summary>
-        /// zzAtBOL == true &lt;=&gt; the scanner is currently at the beginning of a line
-        /// </summary>
-        private bool zzAtBOL = true;
+        ///// <summary>
+        ///// zzAtBOL == true &lt;=&gt; the scanner is currently at the beginning of a line
+        ///// </summary>
+        //private bool zzAtBOL = true; // LUCENENET: Never read
 
         /// <summary>
         /// zzAtEOF == true &lt;=&gt; the scanner is at the EOF </summary>
         private bool zzAtEOF;
 
-        /// <summary>
-        /// denotes if the user-EOF-code has already been executed </summary>
-        private bool zzEOFDone;
+        ///// <summary>
+        ///// denotes if the user-EOF-code has already been executed </summary>
+        //private bool zzEOFDone; // LUCENENET: Never read
 
-#pragma warning restore 169, 414
 
         /* user code: */
 
@@ -526,12 +528,13 @@ namespace Lucene.Net.Analysis.Standard
         public void YyReset(TextReader reader)
         {
             zzReader = reader;
-            zzAtBOL = true;
+            //zzAtBOL = true; // LUCENENET: Never read
             zzAtEOF = false;
-            zzEOFDone = false;
+            //zzEOFDone = false; // LUCENENET: Never read
             zzEndRead = zzStartRead = 0;
             zzCurrentPos = zzMarkedPos = 0;
-            yyline = yyChar = yycolumn = 0;
+            //yyline = yyChar = yycolumn = 0; // LUCENENET: Never read
+            yyChar = 0;
             zzLexicalState = YYINITIAL;
             if (zzBuffer.Length > ZZ_BUFFERSIZE)
             {
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Standard/StandardFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Standard/StandardFilterFactory.cs
index fe884a6..1a7a053 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Standard/StandardFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Standard/StandardFilterFactory.cs
@@ -41,7 +41,7 @@ namespace Lucene.Net.Analysis.Standard
             AssureMatchVersion();
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Standard/StandardTokenizerFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Standard/StandardTokenizerFactory.cs
index db051bd..bd959c5 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Standard/StandardTokenizerFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Standard/StandardTokenizerFactory.cs
@@ -45,7 +45,7 @@ namespace Lucene.Net.Analysis.Standard
             maxTokenLength = GetInt32(args, "maxTokenLength", StandardAnalyzer.DEFAULT_MAX_TOKEN_LENGTH);
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Standard/StandardTokenizerImpl.cs b/src/Lucene.Net.Analysis.Common/Analysis/Standard/StandardTokenizerImpl.cs
index ca34d71..ea8acb6 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Standard/StandardTokenizerImpl.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Standard/StandardTokenizerImpl.cs
@@ -228,7 +228,8 @@ namespace Lucene.Net.Analysis.Standard
         {
             int[] result = new int[197];
             int offset = 0;
-            offset = ZzUnpackAction(ZZ_ACTION_PACKED_0, offset, result);
+            /*offset = */
+            ZzUnpackAction(ZZ_ACTION_PACKED_0, offset, result); // LUCENENET: IDE0059: Remove unnecessary value assignment
             return result;
         }
 
@@ -286,7 +287,8 @@ namespace Lucene.Net.Analysis.Standard
         {
             int[] result = new int[197];
             int offset = 0;
-            offset = ZzUnpackRowMap(ZZ_ROWMAP_PACKED_0, offset, result);
+            /*offset = */
+            ZzUnpackRowMap(ZZ_ROWMAP_PACKED_0, offset, result); // LUCENENET: IDE0059: Remove unnecessary value assignment
             return result;
         }
 
@@ -829,7 +831,8 @@ namespace Lucene.Net.Analysis.Standard
         {
             int[] result = new int[26554];
             int offset = 0;
-            offset = ZzUnpackTrans(ZZ_TRANS_PACKED_0, offset, result);
+            /*offset = */
+            ZzUnpackTrans(ZZ_TRANS_PACKED_0, offset, result); // LUCENENET: IDE0059: Remove unnecessary value assignment
             return result;
         }
 
@@ -877,7 +880,8 @@ namespace Lucene.Net.Analysis.Standard
         {
             int[] result = new int[197];
             int offset = 0;
-            offset = ZzUnpackAttribute(ZZ_ATTRIBUTE_PACKED_0, offset, result);
+            /*offset = */
+            ZzUnpackAttribute(ZZ_ATTRIBUTE_PACKED_0, offset, result); // LUCENENET: IDE0059: Remove unnecessary value assignment
             return result;
         }
 
@@ -934,35 +938,33 @@ namespace Lucene.Net.Analysis.Standard
         /// </summary>
         private int zzEndRead;
 
-        /// <summary>
-        /// number of newlines encountered up to the start of the matched text </summary>
-        private int yyline;
+        ///// <summary>
+        ///// number of newlines encountered up to the start of the matched text </summary>
+        //private int yyline; // LUCENENET: Never read
 
         /// <summary>
         /// the number of characters up to the start of the matched text </summary>
         private int yyChar;
 
-#pragma warning disable 169, 414
-        /// <summary>
-        /// the number of characters from the last newline up to the start of the 
-        /// matched text
-        /// </summary>
-        private int yycolumn;
+        ///// <summary>
+        ///// the number of characters from the last newline up to the start of the 
+        ///// matched text
+        ///// </summary>
+        //private int yycolumn; // LUCENENET: Never read
 
-        /// <summary>
-        /// zzAtBOL == true &lt;=&gt; the scanner is currently at the beginning of a line
-        /// </summary>
-        private bool zzAtBOL = true;
+        ///// <summary>
+        ///// zzAtBOL == true &lt;=&gt; the scanner is currently at the beginning of a line
+        ///// </summary>
+        //private bool zzAtBOL = true; // LUCENENET: Never read
 
         /// <summary>
         /// zzAtEOF == true &lt;=&gt; the scanner is at the EOF </summary>
         private bool zzAtEOF;
 
-        /// <summary>
-        /// denotes if the user-EOF-code has already been executed </summary>
-        private bool zzEOFDone;
+        ///// <summary>
+        ///// denotes if the user-EOF-code has already been executed </summary>
+        //private bool zzEOFDone; // LUCENENET: Never read
 
-#pragma warning restore 169, 414
 
         /* user code: */
         /// <summary>
@@ -1123,12 +1125,13 @@ namespace Lucene.Net.Analysis.Standard
         public void YyReset(TextReader reader)
         {
             zzReader = reader;
-            zzAtBOL = true;
+            //zzAtBOL = true; // LUCENENET: Never read
             zzAtEOF = false;
-            zzEOFDone = false;
+            //zzEOFDone = false; // LUCENENET: Never read
             zzEndRead = zzStartRead = 0;
             zzCurrentPos = zzMarkedPos = 0;
-            yyline = yyChar = yycolumn = 0;
+            //yyline = yyChar = yycolumn = 0; // LUCENENET: Never read
+            yyChar = 0;
             zzLexicalState = YYINITIAL;
             if (zzBuffer.Length > ZZ_BUFFERSIZE)
             {
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std31/StandardTokenizerImpl31.cs b/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std31/StandardTokenizerImpl31.cs
index 0750d6f..762355f 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std31/StandardTokenizerImpl31.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std31/StandardTokenizerImpl31.cs
@@ -676,30 +676,27 @@ namespace Lucene.Net.Analysis.Standard.Std31
         /// </summary>
         private int zzEndRead;
 
-        /// <summary>number of newlines encountered up to the start of the matched text</summary>
-        private int yyline;
+        ///// <summary>number of newlines encountered up to the start of the matched text</summary>
+        //private int yyline; // LUCENENET: Never read
 
         /// <summary>the number of characters up to the start of the matched text</summary>
         private int yyChar;
 
-#pragma warning disable 169, 414
+        ///// <summary>
+        ///// the number of characters from the last newline up to the start of the 
+        ///// matched text
+        ///// </summary>
+        //private int yycolumn; // LUCENENET: Never read
 
-        /// <summary>
-        /// the number of characters from the last newline up to the start of the 
-        /// matched text
-        /// </summary>
-        private int yycolumn;
-
-        /// <summary>zzAtBOL == true &lt;=&gt; the scanner is currently at the beginning of a line</summary>
-        private bool zzAtBOL = true;
+        ///// <summary>zzAtBOL == true &lt;=&gt; the scanner is currently at the beginning of a line</summary>
+        //private bool zzAtBOL = true; // LUCENENET: Never read
 
         /// <summary>zzAtEOF == true &lt;=&gt; the scanner is at the EOF</summary>
         private bool zzAtEOF;
 
-        /// <summary>denotes if the user-EOF-code has already been executed</summary>
-        private bool zzEOFDone;
+        ///// <summary>denotes if the user-EOF-code has already been executed</summary>
+        //private bool zzEOFDone; // LUCENENET: Never read
 
-#pragma warning disable 169, 414
 
         /* user code: */
         /// <summary>Alphanumeric sequences</summary>
@@ -847,12 +844,13 @@ namespace Lucene.Net.Analysis.Standard.Std31
         public void YyReset(TextReader reader)
         {
             zzReader = reader;
-            zzAtBOL = true;
+            //zzAtBOL = true; // LUCENENET: Never read
             zzAtEOF = false;
-            zzEOFDone = false;
+            //zzEOFDone = false; // LUCENENET: Never read
             zzEndRead = zzStartRead = 0;
             zzCurrentPos = zzMarkedPos = 0;
-            yyline = yyChar = yycolumn = 0;
+            //yyline = yyChar = yycolumn = 0; // LUCENENET: Never read
+            yyChar = 0;
             zzLexicalState = YYINITIAL;
             if (zzBuffer.Length > ZZ_BUFFERSIZE)
                 zzBuffer = new char[ZZ_BUFFERSIZE];
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std31/UAX29URLEmailTokenizerImpl31.cs b/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std31/UAX29URLEmailTokenizerImpl31.cs
index 20996a5..fa729ee 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std31/UAX29URLEmailTokenizerImpl31.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std31/UAX29URLEmailTokenizerImpl31.cs
@@ -3231,30 +3231,27 @@ namespace Lucene.Net.Analysis.Standard.Std31
         /// </summary>
         private int zzEndRead;
 
-        /// <summary>number of newlines encountered up to the start of the matched text</summary>
-        private int yyline;
+        ///// <summary>number of newlines encountered up to the start of the matched text</summary>
+        //private int yyline; // LUCENENET: Never read
 
         /// <summary>the number of characters up to the start of the matched text</summary>
         private int yychar;
 
-#pragma warning disable 169, 414
+        ///// <summary>
+        ///// the number of characters from the last newline up to the start of the 
+        ///// matched text
+        ///// </summary>
+        //private int yycolumn; // LUCENENET: Never read
 
-        /// <summary>
-        /// the number of characters from the last newline up to the start of the 
-        /// matched text
-        /// </summary>
-        private int yycolumn;
-
-        /// <summary>zzAtBOL == true &lt;=&gt; the scanner is currently at the beginning of a line</summary>
-        private bool zzAtBOL = true;
+        ///// <summary>zzAtBOL == true &lt;=&gt; the scanner is currently at the beginning of a line</summary>
+        //private bool zzAtBOL = true; // LUCENENET: Never read
 
         /// <summary>zzAtEOF == true &lt;=&gt; the scanner is at the EOF</summary>
         private bool zzAtEOF;
 
-        /// <summary>denotes if the user-EOF-code has already been executed</summary>
-        private bool zzEOFDone;
+        ///// <summary>denotes if the user-EOF-code has already been executed</summary>
+        //private bool zzEOFDone; // LUCENENET: Never read
 
-#pragma warning restore 169, 414
 
         /* user code: */
         /// <summary>Alphanumeric sequences</summary>
@@ -3407,12 +3404,13 @@ namespace Lucene.Net.Analysis.Standard.Std31
         public void YyReset(TextReader reader)
         {
             zzReader = reader;
-            zzAtBOL = true;
+            //zzAtBOL = true; // LUCENENET: Never read
             zzAtEOF = false;
-            zzEOFDone = false;
+            //zzEOFDone = false; // LUCENENET: Never read
             zzEndRead = zzStartRead = 0;
             zzCurrentPos = zzMarkedPos = 0;
-            yyline = yychar = yycolumn = 0;
+            //yyline = yychar = yycolumn = 0; // LUCENENET: Never read
+            yychar = 0;
             zzLexicalState = YYINITIAL;
             if (zzBuffer.Length > ZZ_BUFFERSIZE)
                 zzBuffer = new char[ZZ_BUFFERSIZE];
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std34/StandardTokenizerImpl34.cs b/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std34/StandardTokenizerImpl34.cs
index 3308df0..b00c698 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std34/StandardTokenizerImpl34.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std34/StandardTokenizerImpl34.cs
@@ -692,32 +692,29 @@ namespace Lucene.Net.Analysis.Standard.Std34
         /// </summary>
         private int zzEndRead;
 
-        /// <summary>number of newlines encountered up to the start of the matched text</summary>
-        private int yyline;
+        ///// <summary>number of newlines encountered up to the start of the matched text</summary>
+        //private int yyline; // LUCENENET: Never read
 
         /// <summary>the number of characters up to the start of the matched text</summary>
         private int yyChar;
 
-#pragma warning disable 169, 414
+        ///// <summary>
+        ///// the number of characters from the last newline up to the start of the 
+        ///// matched text
+        ///// </summary>
+        //private int yycolumn; // LUCENENET: Never read
 
-        /// <summary>
-        /// the number of characters from the last newline up to the start of the 
-        /// matched text
-        /// </summary>
-        private int yycolumn;
-
-        /// <summary>
-        /// zzAtBOL == true &lt;=&gt; the scanner is currently at the beginning of a line
-        /// </summary>
-        private bool zzAtBOL = true;
+        ///// <summary>
+        ///// zzAtBOL == true &lt;=&gt; the scanner is currently at the beginning of a line
+        ///// </summary>
+        //private bool zzAtBOL = true; // LUCENENET: Never read
 
         /// <summary>zzAtEOF == true &lt;=&gt; the scanner is at the EOF</summary>
         private bool zzAtEOF;
 
-        /// <summary>denotes if the user-EOF-code has already been executed</summary>
-        private bool zzEOFDone;
+        ///// <summary>denotes if the user-EOF-code has already been executed</summary>
+        //private bool zzEOFDone; // LUCENENET: Never read
 
-#pragma warning restore 169, 414
 
         /* user code: */
         /// <summary>Alphanumeric sequences</summary>
@@ -868,12 +865,13 @@ namespace Lucene.Net.Analysis.Standard.Std34
         public void YyReset(TextReader reader)
         {
             zzReader = reader;
-            zzAtBOL = true;
+            //zzAtBOL = true; // LUCENENET: Never read
             zzAtEOF = false;
-            zzEOFDone = false;
+            //zzEOFDone = false; // LUCENENET: Never read
             zzEndRead = zzStartRead = 0;
             zzCurrentPos = zzMarkedPos = 0;
-            yyline = yyChar = yycolumn = 0;
+            //yyline = yyChar = yycolumn = 0; // LUCENENET: Never read
+            yyChar = 0;
             zzLexicalState = YYINITIAL;
             if (zzBuffer.Length > ZZ_BUFFERSIZE)
                 zzBuffer = new char[ZZ_BUFFERSIZE];
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std34/UAX29URLEmailTokenizerImpl34.cs b/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std34/UAX29URLEmailTokenizerImpl34.cs
index ea9210f..aed0257 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std34/UAX29URLEmailTokenizerImpl34.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std34/UAX29URLEmailTokenizerImpl34.cs
@@ -3334,31 +3334,29 @@ namespace Lucene.Net.Analysis.Standard.Std34
         /// </summary>
         private int zzEndRead;
 
-        /// <summary>number of newlines encountered up to the start of the matched text</summary>
-        private int yyline;
+        ///// <summary>number of newlines encountered up to the start of the matched text</summary>
+        //private int yyline; // LUCENENET: Never read
 
         /// <summary>the number of characters up to the start of the matched text</summary>
         private int yychar;
 
-#pragma warning disable 169, 414
-        /// <summary>
-        /// the number of characters from the last newline up to the start of the
-        /// matched text
-        /// </summary>
-        private int yycolumn;
+        ///// <summary>
+        ///// the number of characters from the last newline up to the start of the
+        ///// matched text
+        ///// </summary>
+        //private int yycolumn; // LUCENENET: Never read
 
-        /// <summary>
-        /// zzAtBOL == true &lt;=&gt; the scanner is currently at the beginning of a line
-        /// </summary>
-        private bool zzAtBOL = true;
+        ///// <summary>
+        ///// zzAtBOL == true &lt;=&gt; the scanner is currently at the beginning of a line
+        ///// </summary>
+        //private bool zzAtBOL = true; // LUCENENET: Never read
 
         /// <summary>zzAtEOF == true &lt;=&gt; the scanner is at the EOF</summary>
         private bool zzAtEOF;
 
-        /// <summary>denotes if the user-EOF-code has already been executed</summary>
-        private bool zzEOFDone;
+        ///// <summary>denotes if the user-EOF-code has already been executed</summary>
+        //private bool zzEOFDone; // LUCENENET: Never read
 
-#pragma warning restore 169, 414
 
         /* user code: */
         /// <summary>Alphanumeric sequences</summary>
@@ -3515,12 +3513,13 @@ namespace Lucene.Net.Analysis.Standard.Std34
         public void YyReset(TextReader reader)
         {
             zzReader = reader;
-            zzAtBOL = true;
+            //zzAtBOL = true; // LUCENENET: Never read
             zzAtEOF = false;
-            zzEOFDone = false;
+            //zzEOFDone = false; // LUCENENET: Never read
             zzEndRead = zzStartRead = 0;
             zzCurrentPos = zzMarkedPos = 0;
-            yyline = yychar = yycolumn = 0;
+            //yyline = yychar = yycolumn = 0; // LUCENENET: Never read
+            yychar = 0;
             zzLexicalState = YYINITIAL;
             if (zzBuffer.Length > ZZ_BUFFERSIZE)
                 zzBuffer = new char[ZZ_BUFFERSIZE];
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std36/UAX29URLEmailTokenizerImpl36.cs b/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std36/UAX29URLEmailTokenizerImpl36.cs
index 5d89906..e3acd5c 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std36/UAX29URLEmailTokenizerImpl36.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std36/UAX29URLEmailTokenizerImpl36.cs
@@ -3769,31 +3769,29 @@ namespace Lucene.Net.Analysis.Standard.Std36
         /// </summary>
         private int zzEndRead;
 
-        /// <summary>number of newlines encountered up to the start of the matched text</summary>
-        private int yyline;
+        ///// <summary>number of newlines encountered up to the start of the matched text</summary>
+        //private int yyline; // LUCENENET: Never read
 
         /// <summary>the number of characters up to the start of the matched text</summary>
         private int yychar;
 
-#pragma warning disable 169, 414
-        /// <summary>
-        /// the number of characters from the last newline up to the start of the 
-        /// matched text
-        /// </summary>
-        private int yycolumn;
+        ///// <summary>
+        ///// the number of characters from the last newline up to the start of the 
+        ///// matched text
+        ///// </summary>
+        //private int yycolumn; // LUCENENET: Never read
 
-        /// <summary>
-        /// zzAtBOL == true &lt;=&gt; the scanner is currently at the beginning of a line
-        /// </summary>
-        private bool zzAtBOL = true;
+        ///// <summary>
+        ///// zzAtBOL == true &lt;=&gt; the scanner is currently at the beginning of a line
+        ///// </summary>
+        //private bool zzAtBOL = true; // LUCENENET: Never read
 
         /// <summary>zzAtEOF == true &lt;=&gt; the scanner is at the EOF</summary>
         private bool zzAtEOF;
 
-        /// <summary>denotes if the user-EOF-code has already been executed</summary>
-        private bool zzEOFDone;
+        ///// <summary>denotes if the user-EOF-code has already been executed</summary>
+        //private bool zzEOFDone; // LUCENENET: Never read
 
-#pragma warning restore 169, 414
 
         /* user code: */
         /// <summary>Alphanumeric sequences</summary>
@@ -3950,12 +3948,13 @@ namespace Lucene.Net.Analysis.Standard.Std36
         public void YyReset(TextReader reader)
         {
             zzReader = reader;
-            zzAtBOL = true;
+            //zzAtBOL = true; // LUCENENET: Never read
             zzAtEOF = false;
-            zzEOFDone = false;
+            //zzEOFDone = false; // LUCENENET: Never read
             zzEndRead = zzStartRead = 0;
             zzCurrentPos = zzMarkedPos = 0;
-            yyline = yychar = yycolumn = 0;
+            //yyline = yychar = yycolumn = 0; // LUCENENET: Never read
+            yychar = 0;
             zzLexicalState = YYINITIAL;
             if (zzBuffer.Length > ZZ_BUFFERSIZE)
                 zzBuffer = new char[ZZ_BUFFERSIZE];
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std40/StandardTokenizerImpl40.cs b/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std40/StandardTokenizerImpl40.cs
index 7fe20fb..432f8fa 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std40/StandardTokenizerImpl40.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std40/StandardTokenizerImpl40.cs
@@ -799,31 +799,29 @@ namespace Lucene.Net.Analysis.Standard.Std40
         /// </summary>
         private int zzEndRead;
 
-        /// <summary>number of newlines encountered up to the start of the matched text</summary>
-        private int yyline;
+        ///// <summary>number of newlines encountered up to the start of the matched text</summary>
+        //private int yyline; // LUCENENET: Never read
 
         /// <summary>the number of characters up to the start of the matched text</summary>
         private int yyChar;
 
-#pragma warning disable 169, 414
-        /// <summary>
-        /// the number of characters from the last newline up to the start of the
-        /// matched text
-        /// </summary>
-        private int yycolumn;
+        ///// <summary>
+        ///// the number of characters from the last newline up to the start of the
+        ///// matched text
+        ///// </summary>
+        //private int yycolumn; // LUCENENET: Never read
 
-        /// <summary>
-        /// zzAtBOL == true &lt;=&gt; the scanner is currently at the beginning of a line
-        /// </summary>
-        private bool zzAtBOL = true;
+        ///// <summary>
+        ///// zzAtBOL == true &lt;=&gt; the scanner is currently at the beginning of a line
+        ///// </summary>
+        //private bool zzAtBOL = true; // LUCENENET: Never read
 
         /// <summary>zzAtEOF == true &lt;=&gt; the scanner is at the EOF</summary>
         private bool zzAtEOF;
 
-        /// <summary>denotes if the user-EOF-code has already been executed</summary>
-        private bool zzEOFDone;
+        ///// <summary>denotes if the user-EOF-code has already been executed</summary>
+        //private bool zzEOFDone; // LUCENENET: Never read
 
-#pragma warning restore 169, 414
 
         /* user code: */
         /// <summary>Alphanumeric sequences</summary>
@@ -978,12 +976,13 @@ namespace Lucene.Net.Analysis.Standard.Std40
         public void YyReset(TextReader reader)
         {
             zzReader = reader;
-            zzAtBOL = true;
+            //zzAtBOL = true; // LUCENENET: Never read
             zzAtEOF = false;
-            zzEOFDone = false;
+            //zzEOFDone = false; // LUCENENET: Never read
             zzEndRead = zzStartRead = 0;
             zzCurrentPos = zzMarkedPos = 0;
-            yyline = yyChar = yycolumn = 0;
+            //yyline = yyChar = yycolumn = 0; // LUCENENET: Never read
+            yyChar = 0;
             zzLexicalState = YYINITIAL;
             if (zzBuffer.Length > ZZ_BUFFERSIZE)
                 zzBuffer = new char[ZZ_BUFFERSIZE];
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std40/UAX29URLEmailTokenizerImpl40.cs b/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std40/UAX29URLEmailTokenizerImpl40.cs
index 7a2cbc0..19e3346 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std40/UAX29URLEmailTokenizerImpl40.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std40/UAX29URLEmailTokenizerImpl40.cs
@@ -3981,31 +3981,29 @@ namespace Lucene.Net.Analysis.Standard.Std40
         /// </summary>
         private int zzEndRead;
 
-        /// <summary>number of newlines encountered up to the start of the matched text</summary>
-        private int yyline;
+        ///// <summary>number of newlines encountered up to the start of the matched text</summary>
+        //private int yyline; // LUCENENET: Never read
 
         /// <summary>the number of characters up to the start of the matched text</summary>
         private int yychar;
 
-#pragma warning disable 169, 414
-        /// <summary>
-        /// the number of characters from the last newline up to the start of the
-        /// matched text
-        /// </summary>
-        private int yycolumn;
+        ///// <summary>
+        ///// the number of characters from the last newline up to the start of the
+        ///// matched text
+        ///// </summary>
+        //private int yycolumn; // LUCENENET: Never read
 
-        /// <summary>
-        /// zzAtBOL == true &lt;=&gt; the scanner is currently at the beginning of a line
-        /// </summary>
-        private bool zzAtBOL = true;
+        ///// <summary>
+        ///// zzAtBOL == true &lt;=&gt; the scanner is currently at the beginning of a line
+        ///// </summary>
+        //private bool zzAtBOL = true; // LUCENENET: Never read
 
         /// <summary>zzAtEOF == true &lt;=&gt; the scanner is at the EOF</summary>
         private bool zzAtEOF;
 
-        /// <summary>denotes if the user-EOF-code has already been executed</summary>
-        private bool zzEOFDone;
+        ///// <summary>denotes if the user-EOF-code has already been executed</summary>
+        //private bool zzEOFDone; // LUCENENET: Never read
 
-#pragma warning restore 169, 414
 
         /* user code: */
         /// <summary>Alphanumeric sequences</summary>
@@ -4166,12 +4164,13 @@ namespace Lucene.Net.Analysis.Standard.Std40
         public void YyReset(TextReader reader)
         {
             zzReader = reader;
-            zzAtBOL = true;
+            //zzAtBOL = true; // LUCENENET: Never read
             zzAtEOF = false;
-            zzEOFDone = false;
+            //zzEOFDone = false; // LUCENENET: Never read
             zzEndRead = zzStartRead = 0;
             zzCurrentPos = zzMarkedPos = 0;
-            yyline = yychar = yycolumn = 0;
+            //yyline = yychar = yycolumn = 0; // LUCENENET: Never read
+            yychar = 0;
             zzLexicalState = YYINITIAL;
             if (zzBuffer.Length > ZZ_BUFFERSIZE)
                 zzBuffer = new char[ZZ_BUFFERSIZE];
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Standard/UAX29URLEmailTokenizerFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Standard/UAX29URLEmailTokenizerFactory.cs
index 0a5a34c..497c731 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Standard/UAX29URLEmailTokenizerFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Standard/UAX29URLEmailTokenizerFactory.cs
@@ -45,7 +45,7 @@ namespace Lucene.Net.Analysis.Standard
             maxTokenLength = GetInt32(args, "maxTokenLength", StandardAnalyzer.DEFAULT_MAX_TOKEN_LENGTH);
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Standard/UAX29URLEmailTokenizerImpl.cs b/src/Lucene.Net.Analysis.Common/Analysis/Standard/UAX29URLEmailTokenizerImpl.cs
index 9d5089a..d2565c4 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Standard/UAX29URLEmailTokenizerImpl.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Standard/UAX29URLEmailTokenizerImpl.cs
@@ -273,7 +273,8 @@ namespace Lucene.Net.Analysis.Standard
         {
             int[] result = new int[3116];
             int offset = 0;
-            offset = ZzUnpackAction(ZZ_ACTION_PACKED_0, offset, result);
+            /*offset = */
+            ZzUnpackAction(ZZ_ACTION_PACKED_0, offset, result); // LUCENENET: IDE0059: Remove unnecessary value assignment
             return result;
         }
 
@@ -693,7 +694,8 @@ namespace Lucene.Net.Analysis.Standard
         {
             int[] result = new int[3116];
             int offset = 0;
-            offset = ZzUnpackRowMap(ZZ_ROWMAP_PACKED_0, offset, result);
+            /*offset = */
+            ZzUnpackRowMap(ZZ_ROWMAP_PACKED_0, offset, result); // LUCENENET: IDE0059: Remove unnecessary value assignment
             return result;
         }
 
@@ -8966,7 +8968,7 @@ namespace Lucene.Net.Analysis.Standard
             int offset = 0;
             offset = ZzUnpackTrans(ZZ_TRANS_PACKED_0, offset, result);
             offset = ZzUnpackTrans(ZZ_TRANS_PACKED_1, offset, result);
-            offset = ZzUnpackTrans(ZZ_TRANS_PACKED_2, offset, result);
+            /*offset = */ZzUnpackTrans(ZZ_TRANS_PACKED_2, offset, result); // LUCENENET: IDE0059: Remove unnecessary value assignment
             return result;
         }
 
@@ -9038,7 +9040,7 @@ namespace Lucene.Net.Analysis.Standard
         {
             int[] result = new int[3116];
             int offset = 0;
-            offset = ZzUnpackAttribute(ZZ_ATTRIBUTE_PACKED_0, offset, result);
+            /*offset = */ZzUnpackAttribute(ZZ_ATTRIBUTE_PACKED_0, offset, result); // LUCENENET: IDE0059: Remove unnecessary value assignment
             return result;
         }
 
@@ -9086,31 +9088,29 @@ namespace Lucene.Net.Analysis.Standard
         /// </summary>
         private int zzEndRead;
 
-        /// <summary>number of newlines encountered up to the start of the matched text</summary>
-        private int yyline;
+        ///// <summary>number of newlines encountered up to the start of the matched text</summary>
+        //private int yyline; // LUCENENET: Never read
 
         /// <summary>the number of characters up to the start of the matched text</summary>
         private int yychar;
 
-#pragma warning disable 169, 414
-        /// <summary>
-        /// the number of characters from the last newline up to the start of the
-        /// matched text
-        /// </summary>
-        private int yycolumn;
+        ///// <summary>
+        ///// the number of characters from the last newline up to the start of the
+        ///// matched text
+        ///// </summary>
+        //private int yycolumn; // LUCENENET: Never read
 
-        /// <summary>
-        /// zzAtBOL == true &lt;=&gt; the scanner is currently at the beginning of a line
-        /// </summary>
-        private bool zzAtBOL = true;
+        ///// <summary>
+        ///// zzAtBOL == true &lt;=&gt; the scanner is currently at the beginning of a line
+        ///// </summary>
+        //private bool zzAtBOL = true; // LUCENENET: Never read
 
         /// <summary>zzAtEOF == true &lt;=&gt; the scanner is at the EOF</summary>
         private bool zzAtEOF;
 
-        /// <summary>denotes if the user-EOF-code has already been executed</summary>
-        private bool zzEOFDone;
+        ///// <summary>denotes if the user-EOF-code has already been executed</summary>
+        //private bool zzEOFDone; // LUCENENET: Never read
 
-#pragma warning restore 169, 414
 
         /* user code: */
         /// <summary>Alphanumeric sequences</summary>
@@ -9271,12 +9271,13 @@ namespace Lucene.Net.Analysis.Standard
         public void YyReset(TextReader reader)
         {
             zzReader = reader;
-            zzAtBOL = true;
+            //zzAtBOL = true; // LUCENENET: Never read
             zzAtEOF = false;
-            zzEOFDone = false;
+            //zzEOFDone = false; // LUCENENET: Never read
             zzEndRead = zzStartRead = 0;
             zzCurrentPos = zzMarkedPos = 0;
-            yyline = yychar = yycolumn = 0;
+            //yyline = yychar = yycolumn = 0; // LUCENENET: Never read
+            yychar = 0;
             zzLexicalState = YYINITIAL;
             if (zzBuffer.Length > ZZ_BUFFERSIZE)
                 zzBuffer = new char[ZZ_BUFFERSIZE];
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Sv/SwedishLightStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Sv/SwedishLightStemFilterFactory.cs
index ea6ffa7..49c2e0f 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Sv/SwedishLightStemFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Sv/SwedishLightStemFilterFactory.cs
@@ -41,7 +41,7 @@ namespace Lucene.Net.Analysis.Sv
         {
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Synonym/FSTSynonymFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Synonym/FSTSynonymFilterFactory.cs
index 21ec6d8..1b05100 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Synonym/FSTSynonymFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Synonym/FSTSynonymFilterFactory.cs
@@ -66,7 +66,7 @@ namespace Lucene.Net.Analysis.Synonym
             }
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
@@ -151,9 +151,9 @@ namespace Lucene.Net.Analysis.Synonym
             {
                 TokenizerFactory tokFactory = (TokenizerFactory)Activator.CreateInstance(clazz, new object[] { tokArgs });
 
-                if (tokFactory is IResourceLoaderAware)
+                if (tokFactory is IResourceLoaderAware resourceLoaderAware)
                 {
-                    ((IResourceLoaderAware)tokFactory).Inform(loader);
+                    resourceLoaderAware.Inform(loader);
                 }
                 return tokFactory;
             }
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SlowSynonymFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SlowSynonymFilterFactory.cs
index aaafa9a..4538724 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SlowSynonymFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SlowSynonymFilterFactory.cs
@@ -74,7 +74,7 @@ namespace Lucene.Net.Analysis.Synonym
             }
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
@@ -150,8 +150,10 @@ namespace Lucene.Net.Analysis.Synonym
                     else
                     {
                         // reduce to first argument
-                        target = new List<IList<string>>(1);
-                        target.Add(source[0]);
+                        target = new List<IList<string>>(1)
+                        {
+                            source[0]
+                        };
                     }
                 }
 
@@ -212,9 +214,9 @@ namespace Lucene.Net.Analysis.Synonym
             {
                 TokenizerFactory tokFactory = (TokenizerFactory)Activator.CreateInstance(clazz, new object[] { tokArgs });
 
-                if (tokFactory is IResourceLoaderAware)
+                if (tokFactory is IResourceLoaderAware resourceLoaderAware)
                 {
-                    ((IResourceLoaderAware)tokFactory).Inform(loader);
+                    resourceLoaderAware.Inform(loader);
                 }
                 return tokFactory;
             }
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Th/ThaiTokenizerFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Th/ThaiTokenizerFactory.cs
index 8b345b4..3eb0e5e 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Th/ThaiTokenizerFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Th/ThaiTokenizerFactory.cs
@@ -42,7 +42,7 @@ namespace Lucene.Net.Analysis.Th
         {
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Th/ThaiWordFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Th/ThaiWordFilterFactory.cs
index 84d660e..8e88f5b 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Th/ThaiWordFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Th/ThaiWordFilterFactory.cs
@@ -43,7 +43,7 @@ namespace Lucene.Net.Analysis.Th
             AssureMatchVersion();
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Tr/TurkishLowerCaseFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Tr/TurkishLowerCaseFilterFactory.cs
index a3cd1f6..30e9c77 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Tr/TurkishLowerCaseFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Tr/TurkishLowerCaseFilterFactory.cs
@@ -40,7 +40,7 @@ namespace Lucene.Net.Analysis.Tr
         {
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Util/AnalysisSPILoader.cs b/src/Lucene.Net.Analysis.Common/Analysis/Util/AnalysisSPILoader.cs
index 0384dca..d363c5c 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Util/AnalysisSPILoader.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Util/AnalysisSPILoader.cs
@@ -115,8 +115,7 @@ namespace Lucene.Net.Analysis.Util
 
         public Type LookupClass(string name)
         {
-            Type service;
-            if (this.services.TryGetValue(name.ToLowerInvariant(), out service))
+            if (this.services.TryGetValue(name.ToLowerInvariant(), out Type service))
             {
                 return service;
             }
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Util/BufferedCharFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/Util/BufferedCharFilter.cs
index e96223c..0071147 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Util/BufferedCharFilter.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Util/BufferedCharFilter.cs
@@ -356,19 +356,6 @@ namespace Lucene.Net.Analysis.Util
         }
 
         /// <summary>
-        /// Peeks at the next input character, refilling the buffer if necessary. If
-        /// this character is a newline character ("\n"), it is discarded.
-        /// </summary>
-        private void ChompNewline()
-        {
-            if ((pos != end || FillBuf() != -1)
-                && buf[pos] == '\n')
-            {
-                pos++;
-            }
-        }
-
-        /// <summary>
         /// Returns the next line of text available from this reader. A line is
         /// represented by zero or more characters followed by <c>'\n'</c>,
         /// <c>'\r'</c>, <c>"\r\n"</c> or the end of the reader. The string does
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Util/CharArrayMap.cs b/src/Lucene.Net.Analysis.Common/Analysis/Util/CharArrayMap.cs
index b2dd01a..4838472 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Util/CharArrayMap.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Util/CharArrayMap.cs
@@ -86,7 +86,7 @@ namespace Lucene.Net.Analysis.Util
         /// </summary>
         internal class MapValue
         {
-            private TValue value = default(TValue);
+            private TValue value = default;
             public TValue Value
             {
                 get => value;
@@ -215,12 +215,10 @@ namespace Lucene.Net.Analysis.Util
         /// <param name="arrayIndex">A 32-bit integer that represents the index in <paramref name="array"/> at which copying begins.</param>
         public virtual void CopyTo(KeyValuePair<string, TValue>[] array, int arrayIndex)
         {
-            using (var iter = (EntryIterator)EntrySet().GetEnumerator())
+            using var iter = (EntryIterator)EntrySet().GetEnumerator();
+            for (int i = arrayIndex; iter.MoveNext(); i++)
             {
-                for (int i = arrayIndex; iter.MoveNext(); i++)
-                {
-                    array[i] = new KeyValuePair<string, TValue>(iter.Current.Key, iter.CurrentValue);
-                }
+                array[i] = new KeyValuePair<string, TValue>(iter.Current.Key, iter.CurrentValue);
             }
         }
 
@@ -231,12 +229,10 @@ namespace Lucene.Net.Analysis.Util
         /// <param name="map"></param>
         public virtual void CopyTo(CharArrayMap<TValue> map)
         {
-            using (var iter = (EntryIterator)EntrySet().GetEnumerator())
+            using var iter = (EntryIterator)EntrySet().GetEnumerator();
+            while (iter.MoveNext())
             {
-                while (iter.MoveNext())
-                {
-                    map.Put(iter.Current.Key, iter.CurrentValue);
-                }
+                map.Put(iter.Current.Key, iter.CurrentValue);
             }
         }
 
@@ -286,7 +282,7 @@ namespace Lucene.Net.Analysis.Util
         {
             if (o == null)
             {
-                throw new ArgumentException("o can't be null", "o");
+                throw new ArgumentException("o can't be null", nameof(o));
             }
 
             var c = o as char[];
@@ -305,7 +301,7 @@ namespace Lucene.Net.Analysis.Util
         public virtual TValue Get(char[] text, int offset, int length)
         {
             var value = values[GetSlot(text, offset, length)];
-            return (value != null) ? value.Value : default(TValue);
+            return (value != null) ? value.Value : default;
         }
 
         /// <summary>
@@ -314,7 +310,7 @@ namespace Lucene.Net.Analysis.Util
         public virtual TValue Get(char[] text)
         {
             var value = values[GetSlot(text, 0, text.Length)];
-            return (value != null) ? value.Value : default(TValue);
+            return (value != null) ? value.Value : default;
         }
 
         /// <summary>
@@ -323,7 +319,7 @@ namespace Lucene.Net.Analysis.Util
         public virtual TValue Get(ICharSequence text)
         {
             var value = values[GetSlot(text)];
-            return (value != null) ? value.Value : default(TValue);
+            return (value != null) ? value.Value : default;
         }
 
         /// <summary>
@@ -332,7 +328,7 @@ namespace Lucene.Net.Analysis.Util
         public virtual TValue Get(string text)
         {
             var value = values[GetSlot(text)];
-            return (value != null) ? value.Value : default(TValue);
+            return (value != null) ? value.Value : default;
         }
 
         /// <summary>
@@ -417,7 +413,7 @@ namespace Lucene.Net.Analysis.Util
         public virtual TValue Put(ICharSequence text, TValue value)
         {
             MapValue oldValue = PutImpl(text, new MapValue(value)); // could be more efficient
-            return (oldValue != null) ? oldValue.Value : default(TValue);
+            return (oldValue != null) ? oldValue.Value : default;
         }
 
         /// <summary>
@@ -427,7 +423,7 @@ namespace Lucene.Net.Analysis.Util
         public virtual TValue Put(object o, TValue value)
         {
             MapValue oldValue = PutImpl(o, new MapValue(value));
-            return (oldValue != null) ? oldValue.Value : default(TValue);
+            return (oldValue != null) ? oldValue.Value : default;
         }
 
         /// <summary>
@@ -436,7 +432,7 @@ namespace Lucene.Net.Analysis.Util
         public virtual TValue Put(string text, TValue value)
         {
             MapValue oldValue = PutImpl(text, new MapValue(value));
-            return (oldValue != null) ? oldValue.Value : default(TValue);
+            return (oldValue != null) ? oldValue.Value : default;
         }
 
         /// <summary>
@@ -447,7 +443,7 @@ namespace Lucene.Net.Analysis.Util
         public virtual TValue Put(char[] text, TValue value)
         {
             MapValue oldValue = PutImpl(text, new MapValue(value));
-            return (oldValue != null) ? oldValue.Value : default(TValue);
+            return (oldValue != null) ? oldValue.Value : default;
         }
 
         /// <summary>
@@ -802,7 +798,7 @@ namespace Lucene.Net.Analysis.Util
         {
             if (text == null)
             {
-                throw new ArgumentException("text can't be null", "text");
+                throw new ArgumentException("text can't be null", nameof(text));
             }
             int code = 0;
             int stop = offset + length;
@@ -829,7 +825,7 @@ namespace Lucene.Net.Analysis.Util
         {
             if (text == null)
             {
-                throw new ArgumentException("text can't be null", "text");
+                throw new ArgumentException("text can't be null", nameof(text));
             }
 
             int code = 0;
@@ -857,7 +853,7 @@ namespace Lucene.Net.Analysis.Util
         {
             if (text == null)
             {
-                throw new ArgumentException("text can't be null", "text");
+                throw new ArgumentException("text can't be null", nameof(text));
             }
 
             int code = 0;
@@ -943,7 +939,7 @@ namespace Lucene.Net.Analysis.Util
                 value = val.Value;
                 return true;
             }
-            value = default(TValue);
+            value = default;
             return false;
         }
 
@@ -963,7 +959,7 @@ namespace Lucene.Net.Analysis.Util
                 value = val.Value;
                 return true;
             }
-            value = default(TValue);
+            value = default;
             return false;
         }
 
@@ -983,7 +979,7 @@ namespace Lucene.Net.Analysis.Util
                 value = val.Value;
                 return true;
             }
-            value = default(TValue);
+            value = default;
             return false;
         }
 
@@ -1003,7 +999,7 @@ namespace Lucene.Net.Analysis.Util
                 value = val.Value;
                 return true;
             }
-            value = default(TValue);
+            value = default;
             return false;
         }
 
@@ -1144,12 +1140,10 @@ namespace Lucene.Net.Analysis.Util
 
             public void CopyTo(string[] array, int arrayIndex)
             {
-                using (var iter = GetEnumerator())
+                using var iter = GetEnumerator();
+                for (int i = arrayIndex; iter.MoveNext(); i++)
                 {
-                    for (int i = arrayIndex; iter.MoveNext(); i++)
-                    {
-                        array[i] = iter.Current;
-                    }
+                    array[i] = iter.Current;
                 }
             }
 
@@ -1260,25 +1254,23 @@ namespace Lucene.Net.Analysis.Util
 
             public override string ToString()
             {
-                using (var i = (ValueEnumerator)GetEnumerator())
-                {
-                    if (!i.HasNext)
-                        return "[]";
+                using var i = (ValueEnumerator)GetEnumerator();
+                if (!i.HasNext)
+                    return "[]";
 
-                    StringBuilder sb = new StringBuilder();
-                    sb.Append('[');
-                    while (i.MoveNext())
+                StringBuilder sb = new StringBuilder();
+                sb.Append('[');
+                while (i.MoveNext())
+                {
+                    TValue value = i.Current;
+                    if (sb.Length > 1)
                     {
-                        TValue value = i.Current;
-                        if (sb.Length > 1)
-                        {
-                            sb.Append(',').Append(' ');
-                        }
-                        sb.Append(value.ToString());
+                        sb.Append(',').Append(' ');
                     }
-
-                    return sb.Append(']').ToString();
+                    sb.Append(value.ToString());
                 }
+
+                return sb.Append(']').ToString();
             }
 
             /// <summary>
@@ -1520,7 +1512,7 @@ namespace Lucene.Net.Analysis.Util
                 get
                 {
                     var val = outerInstance.values[lastPos];
-                    return val != null ? val.Value : default(TValue);
+                    return val != null ? val.Value : default;
                 }
             }
 
@@ -1572,7 +1564,7 @@ namespace Lucene.Net.Analysis.Util
                     var val = outerInstance.values[lastPos];
                     return new KeyValuePair<string, TValue>(
                         new string(outerInstance.keys[lastPos]), 
-                        val != null ? val.Value : default(TValue));
+                        val != null ? val.Value : default);
                 }
             }
 
@@ -1735,12 +1727,8 @@ namespace Lucene.Net.Analysis.Util
         bool Put(string text);
     }
 
-    public class CharArrayMap
+    public static class CharArrayMap // LUCENENET specific: CA1052 Static holder types should be Static or NotInheritable
     {
-        // Prevent direct creation
-        private CharArrayMap()
-        { }
-
         /// <summary>
         /// Returns a copy of the given map as a <see cref="CharArrayMap{TValue}"/>. If the given map
         /// is a <see cref="CharArrayMap{TValue}"/> the ignoreCase property will be preserved.
@@ -1805,7 +1793,7 @@ namespace Lucene.Net.Analysis.Util
         {
             if (map == null)
             {
-                throw new ArgumentException("Given map is null", "map");
+                throw new ArgumentException("Given map is null", nameof(map));
             }
             if (map == CharArrayMap<TValue>.EmptyMap() || map.Count == 0)
             {
@@ -1826,7 +1814,7 @@ namespace Lucene.Net.Analysis.Util
         {
             if (map == null)
             {
-                throw new ArgumentException("Given map is null", "map");
+                throw new ArgumentException("Given map is null", nameof(map));
             }
             if (map == CharArrayMap<TValue>.EmptyMap() || map.Count == 0)
             {
@@ -1971,7 +1959,7 @@ namespace Lucene.Net.Analysis.Util
             {
                 if (text == null)
                 {
-                    throw new ArgumentNullException("text");
+                    throw new ArgumentNullException(nameof(text));
                 }
                 return false;
             }
@@ -1980,7 +1968,7 @@ namespace Lucene.Net.Analysis.Util
             {
                 if (text == null)
                 {
-                    throw new ArgumentNullException("text");
+                    throw new ArgumentNullException(nameof(text));
                 }
                 return false;
             }
@@ -1989,7 +1977,7 @@ namespace Lucene.Net.Analysis.Util
             {
                 if (text == null)
                 {
-                    throw new ArgumentNullException("text");
+                    throw new ArgumentNullException(nameof(text));
                 }
                 return false;
             }
@@ -1998,7 +1986,7 @@ namespace Lucene.Net.Analysis.Util
             {
                 if (o == null)
                 {
-                    throw new ArgumentNullException("o");
+                    throw new ArgumentNullException(nameof(o));
                 }
                 return false;
             }
@@ -2007,36 +1995,36 @@ namespace Lucene.Net.Analysis.Util
             {
                 if (text == null)
                 {
-                    throw new ArgumentNullException("text");
+                    throw new ArgumentNullException(nameof(text));
                 }
-                return default(V);
+                return default;
             }
 
             public override V Get(char[] text)
             {
                 if (text == null)
                 {
-                    throw new ArgumentNullException("text");
+                    throw new ArgumentNullException(nameof(text));
                 }
-                return default(V);
+                return default;
             }
 
             public override V Get(ICharSequence text)
             {
                 if (text == null)
                 {
-                    throw new ArgumentNullException("text");
+                    throw new ArgumentNullException(nameof(text));
                 }
-                return default(V);
+                return default;
             }
 
             public override V Get(object o)
             {
                 if (o == null)
                 {
-                    throw new ArgumentNullException("o");
+                    throw new ArgumentNullException(nameof(o));
                 }
-                return default(V);
+                return default;
             }
         }
     }
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Util/CharArraySet.cs b/src/Lucene.Net.Analysis.Common/Analysis/Util/CharArraySet.cs
index 3036aa8..465b953 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Util/CharArraySet.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Util/CharArraySet.cs
@@ -412,7 +412,7 @@ namespace Lucene.Net.Analysis.Util
         {
             if (other == null)
             {
-                throw new ArgumentNullException("other");
+                throw new ArgumentNullException(nameof(other));
             }
             if (IsReadOnly)
             {
@@ -439,7 +439,7 @@ namespace Lucene.Net.Analysis.Util
         {
             if (other == null)
             {
-                throw new ArgumentNullException("other");
+                throw new ArgumentNullException(nameof(other));
             }
             if (IsReadOnly)
             {
@@ -465,7 +465,7 @@ namespace Lucene.Net.Analysis.Util
         {
             if (other == null)
             {
-                throw new ArgumentNullException("other");
+                throw new ArgumentNullException(nameof(other));
             }
             if (IsReadOnly)
             {
@@ -487,7 +487,7 @@ namespace Lucene.Net.Analysis.Util
         {
             if (other == null)
             {
-                throw new ArgumentNullException("other");
+                throw new ArgumentNullException(nameof(other));
             }
             if (IsReadOnly)
             {
@@ -559,7 +559,7 @@ namespace Lucene.Net.Analysis.Util
         {
             if (other == null)
             {
-                throw new ArgumentNullException("other");
+                throw new ArgumentNullException(nameof(other));
             }
             if (this.Count == 0)
             {
@@ -577,8 +577,7 @@ namespace Lucene.Net.Analysis.Util
             // we just need to return true if the other set
             // contains all of the elements of the this set,
             // but we need to use the comparison rules of the current set.
-            int foundCount, unfoundCount;
-            this.GetFoundAndUnfoundCounts(other, out foundCount, out unfoundCount);
+            this.GetFoundAndUnfoundCounts(other, out int foundCount, out int _);
             return foundCount == this.Count;
         }
 
@@ -591,7 +590,7 @@ namespace Lucene.Net.Analysis.Util
         {
             if (other == null)
             {
-                throw new ArgumentNullException("other");
+                throw new ArgumentNullException(nameof(other));
             }
             if (this.Count == 0)
             {
@@ -600,8 +599,7 @@ namespace Lucene.Net.Analysis.Util
             // we just need to return true if the other set
             // contains all of the elements of the this set,
             // but we need to use the comparison rules of the current set.
-            int foundCount, unfoundCount;
-            this.GetFoundAndUnfoundCounts(other, out foundCount, out unfoundCount);
+            this.GetFoundAndUnfoundCounts(other, out int foundCount, out int _);
             return foundCount == this.Count;
         }
 
@@ -614,7 +612,7 @@ namespace Lucene.Net.Analysis.Util
         {
             if (other == null)
             {
-                throw new ArgumentNullException("other");
+                throw new ArgumentNullException(nameof(other));
             }
             ICollection<string> is2 = other as ICollection<string>;
             if (is2 != null)
@@ -641,7 +639,7 @@ namespace Lucene.Net.Analysis.Util
         {
             if (other == null)
             {
-                throw new ArgumentNullException("other");
+                throw new ArgumentNullException(nameof(other));
             }
             ICollection<T> is2 = other as ICollection<T>;
             if (is2 != null && is2.Count == 0)
@@ -660,7 +658,7 @@ namespace Lucene.Net.Analysis.Util
         {
             if (other == null)
             {
-                throw new ArgumentNullException("other");
+                throw new ArgumentNullException(nameof(other));
             }
             ICollection<string> is2 = other as ICollection<string>;
             if (is2 != null)
@@ -682,8 +680,7 @@ namespace Lucene.Net.Analysis.Util
             // we just need to return true if the other set
             // contains all of the elements of the this set plus at least one more,
             // but we need to use the comparison rules of the current set.
-            int foundCount, unfoundCount;
-            this.GetFoundAndUnfoundCounts(other, out foundCount, out unfoundCount);
+            this.GetFoundAndUnfoundCounts(other, out int foundCount, out int unfoundCount);
             return foundCount == this.Count && unfoundCount > 0;
         }
 
@@ -696,7 +693,7 @@ namespace Lucene.Net.Analysis.Util
         {
             if (other == null)
             {
-                throw new ArgumentNullException("other");
+                throw new ArgumentNullException(nameof(other));
             }
             ICollection<T> is2 = other as ICollection<T>;
             if (is2 != null && this.Count == 0)
@@ -706,8 +703,7 @@ namespace Lucene.Net.Analysis.Util
             // we just need to return true if the other set
             // contains all of the elements of the this set plus at least one more,
             // but we need to use the comparison rules of the current set.
-            int foundCount, unfoundCount;
-            this.GetFoundAndUnfoundCounts(other, out foundCount, out unfoundCount);
+            this.GetFoundAndUnfoundCounts(other, out int foundCount, out int unfoundCount);
             return foundCount == this.Count && unfoundCount > 0;
         }
 
@@ -720,7 +716,7 @@ namespace Lucene.Net.Analysis.Util
         {
             if (other == null)
             {
-                throw new ArgumentNullException("other");
+                throw new ArgumentNullException(nameof(other));
             }
             if (this.Count == 0)
             {
@@ -743,8 +739,7 @@ namespace Lucene.Net.Analysis.Util
                     return this.ContainsAll(set);
                 }
             }
-            int foundCount, unfoundCount;
-            this.GetFoundAndUnfoundCounts(other, out foundCount, out unfoundCount);
+            this.GetFoundAndUnfoundCounts(other, out int foundCount, out int unfoundCount);
             return foundCount < this.Count && unfoundCount == 0;
         }
 
@@ -757,7 +752,7 @@ namespace Lucene.Net.Analysis.Util
         {
             if (other == null)
             {
-                throw new ArgumentNullException("other");
+                throw new ArgumentNullException(nameof(other));
             }
             if (this.Count == 0)
             {
@@ -768,8 +763,7 @@ namespace Lucene.Net.Analysis.Util
             {
                 return true;
             }
-            int foundCount, unfoundCount;
-            this.GetFoundAndUnfoundCounts(other, out foundCount, out unfoundCount);
+            this.GetFoundAndUnfoundCounts(other, out int foundCount, out int unfoundCount);
             return foundCount < this.Count && unfoundCount == 0;
         }
 
@@ -782,7 +776,7 @@ namespace Lucene.Net.Analysis.Util
         {
             if (other == null)
             {
-                throw new ArgumentNullException("other");
+                throw new ArgumentNullException(nameof(other));
             }
             if (this.Count != 0)
             {
@@ -806,7 +800,7 @@ namespace Lucene.Net.Analysis.Util
         {
             if (other == null)
             {
-                throw new ArgumentNullException("other");
+                throw new ArgumentNullException(nameof(other));
             }
             if (this.Count != 0)
             {
@@ -1144,7 +1138,7 @@ namespace Lucene.Net.Analysis.Util
         {
             if (other == null)
             {
-                throw new ArgumentNullException("other");
+                throw new ArgumentNullException(nameof(other));
             }
             if (set.IsReadOnly)
             {
@@ -1172,7 +1166,7 @@ namespace Lucene.Net.Analysis.Util
         {
             if (other == null)
             {
-                throw new ArgumentNullException("other");
+                throw new ArgumentNullException(nameof(other));
             }
             if (set.IsReadOnly)
             {
@@ -1200,7 +1194,7 @@ namespace Lucene.Net.Analysis.Util
         //{
         //    if (other == null)
         //    {
-        //        throw new ArgumentNullException("other");
+        //        throw new ArgumentNullException(nameof(other));
         //    }
         //    if (set.IsReadOnly)
         //    {
@@ -1228,7 +1222,7 @@ namespace Lucene.Net.Analysis.Util
         //{
         //    if (other == null)
         //    {
-        //        throw new ArgumentNullException("other");
+        //        throw new ArgumentNullException(nameof(other));
         //    }
         //    if (set.IsReadOnly)
         //    {
@@ -1256,7 +1250,7 @@ namespace Lucene.Net.Analysis.Util
         //{
         //    if (other == null)
         //    {
-        //        throw new ArgumentNullException("other");
+        //        throw new ArgumentNullException(nameof(other));
         //    }
         //    if (set.IsReadOnly)
         //    {
@@ -1284,7 +1278,7 @@ namespace Lucene.Net.Analysis.Util
         {
             if (other == null)
             {
-                throw new ArgumentNullException("other");
+                throw new ArgumentNullException(nameof(other));
             }
             if (set.IsReadOnly)
             {
@@ -1312,7 +1306,7 @@ namespace Lucene.Net.Analysis.Util
         {
             if (other == null)
             {
-                throw new ArgumentNullException("other");
+                throw new ArgumentNullException(nameof(other));
             }
             if (set.IsReadOnly)
             {
@@ -1341,7 +1335,7 @@ namespace Lucene.Net.Analysis.Util
         {
             if (other == null)
             {
-                throw new ArgumentNullException("other");
+                throw new ArgumentNullException(nameof(other));
             }
             if (set.IsReadOnly)
             {
@@ -1369,7 +1363,7 @@ namespace Lucene.Net.Analysis.Util
         {
             if (other == null)
             {
-                throw new ArgumentNullException("other");
+                throw new ArgumentNullException(nameof(other));
             }
             if (set.IsReadOnly)
             {
@@ -1398,7 +1392,7 @@ namespace Lucene.Net.Analysis.Util
         {
             if (other == null)
             {
-                throw new ArgumentNullException("other");
+                throw new ArgumentNullException(nameof(other));
             }
             if (set.IsReadOnly)
             {
@@ -1427,7 +1421,7 @@ namespace Lucene.Net.Analysis.Util
         {
             if (other == null)
             {
-                throw new ArgumentNullException("other");
+                throw new ArgumentNullException(nameof(other));
             }
             if (set.IsReadOnly)
             {
@@ -1456,7 +1450,7 @@ namespace Lucene.Net.Analysis.Util
         {
             if (other == null)
             {
-                throw new ArgumentNullException("other");
+                throw new ArgumentNullException(nameof(other));
             }
             if (set.IsReadOnly)
             {
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Util/ElisionFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Util/ElisionFilterFactory.cs
index ee67812..2f76cad 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Util/ElisionFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Util/ElisionFilterFactory.cs
@@ -47,7 +47,7 @@ namespace Lucene.Net.Analysis.Util
             ignoreCase = GetBoolean(args, "ignoreCase", false);
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Util/SegmentingTokenizerBase.cs b/src/Lucene.Net.Analysis.Common/Analysis/Util/SegmentingTokenizerBase.cs
index 396cc00..259365c 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Util/SegmentingTokenizerBase.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Util/SegmentingTokenizerBase.cs
@@ -68,7 +68,7 @@ namespace Lucene.Net.Analysis.Util
         /// be provided to this constructor.
         /// </para>
         /// </summary>
-        public SegmentingTokenizerBase(TextReader reader, BreakIterator iterator)
+        protected SegmentingTokenizerBase(TextReader reader, BreakIterator iterator) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected)
             : this(AttributeFactory.DEFAULT_ATTRIBUTE_FACTORY, reader, iterator)
         {
         }
@@ -76,7 +76,7 @@ namespace Lucene.Net.Analysis.Util
         /// <summary>
         /// Construct a new SegmenterBase, also supplying the <see cref="Lucene.Net.Util.AttributeSource.AttributeFactory"/>
         /// </summary>
-        public SegmentingTokenizerBase(AttributeFactory factory, TextReader reader, BreakIterator iterator)
+        protected SegmentingTokenizerBase(AttributeFactory factory, TextReader reader, BreakIterator iterator) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected)
             : base(factory, reader)
         {
             offsetAtt = AddAttribute<IOffsetAttribute>();
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Util/StemmerUtil.cs b/src/Lucene.Net.Analysis.Common/Analysis/Util/StemmerUtil.cs
index a6f0ebc..a8e5d63 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Util/StemmerUtil.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Util/StemmerUtil.cs
@@ -26,15 +26,9 @@ namespace Lucene.Net.Analysis.Util
     /// 
     /// @lucene.internal
     /// </summary>
-    public class StemmerUtil
+    public static class StemmerUtil // LUCENENET specific: CA1052 Static holder types should be Static or NotInheritable
     {
         /// <summary>
-        /// no instance </summary>
-        private StemmerUtil()
-        {
-        }
-
-        /// <summary>
         /// Returns true if the character array starts with the prefix.
         /// </summary>
         /// <param name="s"> Input Buffer </param>
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Util/WordlistLoader.cs b/src/Lucene.Net.Analysis.Common/Analysis/Util/WordlistLoader.cs
index f49a375..27230cd 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Util/WordlistLoader.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Util/WordlistLoader.cs
@@ -31,19 +31,13 @@ namespace Lucene.Net.Analysis.Util
     /// <see cref="IOUtils"/> to obtain <see cref="TextReader"/> instances.
     /// @lucene.internal
     /// </summary>
-    public class WordlistLoader
+    public static class WordlistLoader // LUCENENET specific: CA1052 Static holder types should be Static or NotInheritable
     {
         private const int INITIAL_CAPACITY = 16;
 
         // LUCENENET specific
         private readonly static Regex WHITESPACE = new Regex("\\s+", RegexOptions.Compiled);
 
-        /// <summary>
-        /// no instance </summary>
-        private WordlistLoader()
-        {
-        }
-
         // LUCENENET TODO: Add .NET overloads that accept a file name? Or at least a FileInfo object as was done in 3.0.3?
 
         /// <summary>
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Wikipedia/WikipediaTokenizerFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Wikipedia/WikipediaTokenizerFactory.cs
index 21a1705..2e54313 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Wikipedia/WikipediaTokenizerFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Wikipedia/WikipediaTokenizerFactory.cs
@@ -42,7 +42,7 @@ namespace Lucene.Net.Analysis.Wikipedia
         {
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Wikipedia/WikipediaTokenizerImpl.cs b/src/Lucene.Net.Analysis.Common/Analysis/Wikipedia/WikipediaTokenizerImpl.cs
index fdcf8e7..ac043c6 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Wikipedia/WikipediaTokenizerImpl.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Wikipedia/WikipediaTokenizerImpl.cs
@@ -421,31 +421,29 @@ namespace Lucene.Net.Analysis.Wikipedia
         /// </summary>
         private int zzEndRead;
 
-        /// <summary>number of newlines encountered up to the start of the matched text</summary>
-        private int yyline;
+        ///// <summary>number of newlines encountered up to the start of the matched text</summary>
+        //private int yyline;
 
         /// <summary>the number of characters up to the start of the matched text</summary>
         private int yychar;
 
-#pragma warning disable 169, 414
-        /// <summary>
-        /// the number of characters from the last newline up to the start of the
-        /// matched text
-        /// </summary>
-        private int yycolumn;
+        ///// <summary>
+        ///// the number of characters from the last newline up to the start of the
+        ///// matched text
+        ///// </summary>
+        //private int yycolumn; // LUCENENET: Never read
 
-        /// <summary>
-        /// zzAtBOL == true &lt;=&gt; the scanner is currently at the beginning of a line
-        /// </summary>
-        private bool zzAtBOL = true;
+        ///// <summary>
+        ///// zzAtBOL == true &lt;=&gt; the scanner is currently at the beginning of a line
+        ///// </summary>
+        //private bool zzAtBOL = true; // LUCENENET: Never read
 
         /// <summary>zzAtEOF == true &lt;=&gt; the scanner is at the EOF</summary>
         private bool zzAtEOF;
 
-        /// <summary>denotes if the user-EOF-code has already been executed</summary>
-        private bool zzEOFDone;
+        ///// <summary>denotes if the user-EOF-code has already been executed</summary>
+        //private bool zzEOFDone; // LUCENENET: Never read
 
-#pragma warning disable 169, 414
 
         /* user code: */
 
@@ -633,12 +631,13 @@ namespace Lucene.Net.Analysis.Wikipedia
         public void YyReset(TextReader reader)
         {
             zzReader = reader;
-            zzAtBOL = true;
+            //zzAtBOL = true; // LUCENENET: Never read
             zzAtEOF = false;
-            zzEOFDone = false;
+            //zzEOFDone = false; // LUCENENET: Never read
             zzEndRead = zzStartRead = 0;
             zzCurrentPos = zzMarkedPos = 0;
-            yyline = yychar = yycolumn = 0;
+            //yyline = yychar = yycolumn = 0; // LUCENENET: Never read
+            yychar = 0;
             zzLexicalState = YYINITIAL;
             if (zzBuffer.Length > ZZ_BUFFERSIZE)
                 zzBuffer = new char[ZZ_BUFFERSIZE];
diff --git a/src/Lucene.Net.Analysis.Common/Collation/CollationKeyFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Collation/CollationKeyFilterFactory.cs
index 1f4391d..081aaa9 100644
--- a/src/Lucene.Net.Analysis.Common/Collation/CollationKeyFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Collation/CollationKeyFilterFactory.cs
@@ -103,7 +103,7 @@ namespace Lucene.Net.Collation
 
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Among.cs b/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Among.cs
index d636a2f..3e2bf8c 100644
--- a/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Among.cs
+++ b/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Among.cs
@@ -102,7 +102,7 @@ namespace Lucene.Net.Tartarus.Snowball
         private readonly MethodInfo method;
 
         /// <summary>object to invoke method on</summary>
-        public SnowballProgram MethodObject => MethodObject;
+        public SnowballProgram MethodObject => methodobject;
 
         private readonly SnowballProgram methodobject;
     }
diff --git a/src/Lucene.Net.Analysis.ICU/Analysis/Icu/ICUFoldingFilterFactory.cs b/src/Lucene.Net.Analysis.ICU/Analysis/Icu/ICUFoldingFilterFactory.cs
index 1587751..8b444bf 100644
--- a/src/Lucene.Net.Analysis.ICU/Analysis/Icu/ICUFoldingFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.ICU/Analysis/Icu/ICUFoldingFilterFactory.cs
@@ -43,9 +43,9 @@ namespace Lucene.Net.Analysis.Icu
         public ICUFoldingFilterFactory(IDictionary<string, string> args)
             : base(args)
         {
-            if (args.Count != 0)
+            if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.ICU/Analysis/Icu/ICUNormalizer2CharFilter.cs b/src/Lucene.Net.Analysis.ICU/Analysis/Icu/ICUNormalizer2CharFilter.cs
index 49ff4a3..93018bb 100644
--- a/src/Lucene.Net.Analysis.ICU/Analysis/Icu/ICUNormalizer2CharFilter.cs
+++ b/src/Lucene.Net.Analysis.ICU/Analysis/Icu/ICUNormalizer2CharFilter.cs
@@ -63,22 +63,14 @@ namespace Lucene.Net.Analysis.Icu
         public ICUNormalizer2CharFilter(TextReader input, Normalizer2 normalizer)
             : this(input, normalizer, 128)
         {
-            if (normalizer == null)
-            {
-                throw new ArgumentNullException("normalizer");
-            }
-            this.normalizer = normalizer;
+            this.normalizer = normalizer ?? throw new ArgumentNullException(nameof(normalizer));
         }
 
         // for testing ONLY
         internal ICUNormalizer2CharFilter(TextReader input, Normalizer2 normalizer, int bufferSize)
             : base(input)
         {
-            if (normalizer == null)
-            {
-                throw new ArgumentNullException("normalizer");
-            }
-            this.normalizer = normalizer;
+            this.normalizer = normalizer ?? throw new ArgumentNullException(nameof(normalizer));
             this.tmpBuffer = CharacterUtils.NewCharacterBuffer(bufferSize);
         }
 
diff --git a/src/Lucene.Net.Analysis.ICU/Analysis/Icu/ICUNormalizer2CharFilterFactory.cs b/src/Lucene.Net.Analysis.ICU/Analysis/Icu/ICUNormalizer2CharFilterFactory.cs
index 993ddf2..730bb59 100644
--- a/src/Lucene.Net.Analysis.ICU/Analysis/Icu/ICUNormalizer2CharFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.ICU/Analysis/Icu/ICUNormalizer2CharFilterFactory.cs
@@ -81,7 +81,7 @@ namespace Lucene.Net.Analysis.Icu
             }
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
             this.normalizer = normalizer;
         }
diff --git a/src/Lucene.Net.Analysis.ICU/Analysis/Icu/ICUNormalizer2FilterFactory.cs b/src/Lucene.Net.Analysis.ICU/Analysis/Icu/ICUNormalizer2FilterFactory.cs
index cf11e70..190886a 100644
--- a/src/Lucene.Net.Analysis.ICU/Analysis/Icu/ICUNormalizer2FilterFactory.cs
+++ b/src/Lucene.Net.Analysis.ICU/Analysis/Icu/ICUNormalizer2FilterFactory.cs
@@ -83,7 +83,7 @@ namespace Lucene.Net.Analysis.Icu
             }
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
             this.normalizer = normalizer;
         }
diff --git a/src/Lucene.Net.Analysis.ICU/Analysis/Icu/ICUTransformFilterFactory.cs b/src/Lucene.Net.Analysis.ICU/Analysis/Icu/ICUTransformFilterFactory.cs
index 9627804..ece0cd2 100644
--- a/src/Lucene.Net.Analysis.ICU/Analysis/Icu/ICUTransformFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.ICU/Analysis/Icu/ICUTransformFilterFactory.cs
@@ -49,9 +49,9 @@ namespace Lucene.Net.Analysis.Icu
             string direction = Get(args, "direction", new string[] { "forward", "reverse" }, "forward", false);
             TransliterationDirection dir = "forward".Equals(direction, StringComparison.Ordinal) ? Transliterator.Forward : Transliterator.Reverse;
             transliterator = Transliterator.GetInstance(id, dir);
-            if (args.Count != 0)
+            if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.ICU/Analysis/Icu/Segmentation/DefaultICUTokenizerConfig.cs b/src/Lucene.Net.Analysis.ICU/Analysis/Icu/Segmentation/DefaultICUTokenizerConfig.cs
index b6093cb..ef3444d 100644
--- a/src/Lucene.Net.Analysis.ICU/Analysis/Icu/Segmentation/DefaultICUTokenizerConfig.cs
+++ b/src/Lucene.Net.Analysis.ICU/Analysis/Icu/Segmentation/DefaultICUTokenizerConfig.cs
@@ -131,18 +131,16 @@ namespace Lucene.Net.Analysis.Icu.Segmentation
 
         private static RuleBasedBreakIterator ReadBreakIterator(string filename)
         {
-            using (Stream @is = typeof(DefaultICUTokenizerConfig).FindAndGetManifestResourceStream(filename))
+            using Stream @is = typeof(DefaultICUTokenizerConfig).FindAndGetManifestResourceStream(filename);
+            try
             {
-                try
-                {
-                    RuleBasedBreakIterator bi =
-                        RuleBasedBreakIterator.GetInstanceFromCompiledRules(@is);
-                    return bi;
-                }
-                catch (IOException e)
-                {
-                    throw new Exception(e.ToString(), e);
-                }
+                RuleBasedBreakIterator bi =
+                    RuleBasedBreakIterator.GetInstanceFromCompiledRules(@is);
+                return bi;
+            }
+            catch (IOException e)
+            {
+                throw new Exception(e.ToString(), e);
             }
         }
     }
diff --git a/src/Lucene.Net.Analysis.ICU/Analysis/Icu/Segmentation/ICUTokenizerFactory.cs b/src/Lucene.Net.Analysis.ICU/Analysis/Icu/Segmentation/ICUTokenizerFactory.cs
index f959065..aa74389 100644
--- a/src/Lucene.Net.Analysis.ICU/Analysis/Icu/Segmentation/ICUTokenizerFactory.cs
+++ b/src/Lucene.Net.Analysis.ICU/Analysis/Icu/Segmentation/ICUTokenizerFactory.cs
@@ -97,9 +97,9 @@ namespace Lucene.Net.Analysis.Icu.Segmentation
             }
             cjkAsWords = GetBoolean(args, "cjkAsWords", true);
             myanmarAsWords = GetBoolean(args, "myanmarAsWords", true);
-            if (args.Count != 0)
+            if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.ICU/Analysis/Icu/TokenAttributes/ScriptAttributeImpl.cs b/src/Lucene.Net.Analysis.ICU/Analysis/Icu/TokenAttributes/ScriptAttributeImpl.cs
index 99bd817..ce1e126 100644
--- a/src/Lucene.Net.Analysis.ICU/Analysis/Icu/TokenAttributes/ScriptAttributeImpl.cs
+++ b/src/Lucene.Net.Analysis.ICU/Analysis/Icu/TokenAttributes/ScriptAttributeImpl.cs
@@ -72,9 +72,9 @@ namespace Lucene.Net.Analysis.Icu.TokenAttributes
                 return true;
             }
 
-            if (other is ScriptAttribute)
+            if (other is ScriptAttribute scriptAttribute)
             {
-                return ((ScriptAttribute)other).code == code;
+                return scriptAttribute.code == code;
             }
 
             return false;
diff --git a/src/Lucene.Net.Analysis.ICU/Collation/ICUCollationKeyFilter.cs b/src/Lucene.Net.Analysis.ICU/Collation/ICUCollationKeyFilter.cs
index bd4c47d..527df44 100644
--- a/src/Lucene.Net.Analysis.ICU/Collation/ICUCollationKeyFilter.cs
+++ b/src/Lucene.Net.Analysis.ICU/Collation/ICUCollationKeyFilter.cs
@@ -59,8 +59,8 @@ namespace Lucene.Net.Collation
     [ExceptionToClassNameConvention]
     public sealed class ICUCollationKeyFilter : TokenFilter
     {
-        private Collator collator = null;
-        private RawCollationKey reusableKey = new RawCollationKey();
+        private readonly Collator collator = null; // LUCENENET: marked readonly
+        private readonly RawCollationKey reusableKey = new RawCollationKey(); // LUCENENET: marked readonly
         private readonly ICharTermAttribute termAtt;
 
         /// <summary>
diff --git a/src/Lucene.Net.Analysis.ICU/Collation/ICUCollationKeyFilterFactory.cs b/src/Lucene.Net.Analysis.ICU/Collation/ICUCollationKeyFilterFactory.cs
index 4ba29c2..dd30193 100644
--- a/src/Lucene.Net.Analysis.ICU/Collation/ICUCollationKeyFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.ICU/Collation/ICUCollationKeyFilterFactory.cs
@@ -101,9 +101,9 @@ namespace Lucene.Net.Collation
                     + "To tailor rules for a built-in language, see the javadocs for RuleBasedCollator. "
                     + "Then save the entire customized ruleset to a file, and use with the custom parameter");
 
-            if (args.Count != 0)
+            if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
@@ -240,12 +240,12 @@ namespace Lucene.Net.Collation
             return this;
         }
 
-        private string ToUTF8String(Stream input)
+        private static string ToUTF8String(Stream input) // LUCENENET: CA1822: Mark members as static
         {
             StringBuilder sb = new StringBuilder();
             char[] buffer = new char[1024];
             TextReader r = IOUtils.GetDecodingReader(input, Encoding.UTF8);
-            int len = 0;
+            int len; // LUCENENET: IDE0059: Remove unnecessary value assignment
             while ((len = r.Read(buffer, 0, buffer.Length)) > 0)
             {
                 sb.Append(buffer, 0, len);
diff --git a/src/Lucene.Net.Analysis.Kuromoji/Dict/BinaryDictionary.cs b/src/Lucene.Net.Analysis.Kuromoji/Dict/BinaryDictionary.cs
index 3532165..eb3c754 100644
--- a/src/Lucene.Net.Analysis.Kuromoji/Dict/BinaryDictionary.cs
+++ b/src/Lucene.Net.Analysis.Kuromoji/Dict/BinaryDictionary.cs
@@ -53,20 +53,20 @@ namespace Lucene.Net.Analysis.Ja.Dict
         private readonly string[] inflFormDict;
 
         // LUCENENET specific - variable to hold the name of the data directory (or empty string to load embedded resources)
-        private static readonly string DATA_DIR;
+        private static readonly string DATA_DIR = LoadDataDir();
         // LUCENENET specific - name of the subdirectory inside of the directory where the Kuromoji dictionary files reside.
-        private static readonly string DATA_SUBDIR = "kuromoji-data";
+        private const string DATA_SUBDIR = "kuromoji-data";
 
-        static BinaryDictionary()
+        private static string LoadDataDir()
         {
             // LUCENENET specific - reformatted with :, renamed from "analysis.data.dir"
             string currentPath = SystemProperties.GetProperty("kuromoji:data:dir",
 #if FEATURE_APPDOMAIN_BASEDIRECTORY
-                AppDomain.CurrentDomain.BaseDirectory
+            AppDomain.CurrentDomain.BaseDirectory
 #else
-                System.AppContext.BaseDirectory
+            System.AppContext.BaseDirectory
 #endif
-                );
+            );
 
             // If a matching directory path is found, set our DATA_DIR static
             // variable. If it is null or empty after this process, we need to
@@ -74,8 +74,7 @@ namespace Lucene.Net.Analysis.Ja.Dict
             string candidatePath = System.IO.Path.Combine(currentPath, DATA_SUBDIR);
             if (System.IO.Directory.Exists(candidatePath))
             {
-                DATA_DIR = candidatePath;
-                return;
+                return candidatePath;
             }
 
             while (new DirectoryInfo(currentPath).Parent != null)
@@ -85,8 +84,7 @@ namespace Lucene.Net.Analysis.Ja.Dict
                     candidatePath = System.IO.Path.Combine(new DirectoryInfo(currentPath).Parent.FullName, DATA_SUBDIR);
                     if (System.IO.Directory.Exists(candidatePath))
                     {
-                        DATA_DIR = candidatePath;
-                        return;
+                        return candidatePath;
                     }
                     currentPath = new DirectoryInfo(currentPath).Parent.FullName;
                 }
@@ -95,6 +93,8 @@ namespace Lucene.Net.Analysis.Ja.Dict
                     // ignore security errors
                 }
             }
+
+            return null; // This is the signal to load from local resources
         }
 
         protected BinaryDictionary()
@@ -103,7 +103,7 @@ namespace Lucene.Net.Analysis.Ja.Dict
             string[] posDict = null;
             string[] inflFormDict = null;
             string[] inflTypeDict = null;
-            ByteBuffer buffer = null;
+            ByteBuffer buffer; // LUCENENET: IDE0059: Remove unnecessary value assignment
 
             using (Stream mapIS = GetResource(TARGETMAP_FILENAME_SUFFIX))
             {
diff --git a/src/Lucene.Net.Analysis.Kuromoji/Dict/CharacterDefinition.cs b/src/Lucene.Net.Analysis.Kuromoji/Dict/CharacterDefinition.cs
index 7e0938a..2d6b285 100644
--- a/src/Lucene.Net.Analysis.Kuromoji/Dict/CharacterDefinition.cs
+++ b/src/Lucene.Net.Analysis.Kuromoji/Dict/CharacterDefinition.cs
@@ -60,17 +60,15 @@ namespace Lucene.Net.Analysis.Ja.Dict
 
         private CharacterDefinition()
         {
-            using (Stream @is = BinaryDictionary.GetTypeResource(GetType(), FILENAME_SUFFIX))
+            using Stream @is = BinaryDictionary.GetTypeResource(GetType(), FILENAME_SUFFIX);
+            DataInput @in = new InputStreamDataInput(@is);
+            CodecUtil.CheckHeader(@in, HEADER, VERSION, VERSION);
+            @in.ReadBytes(characterCategoryMap, 0, characterCategoryMap.Length);
+            for (int i = 0; i < CLASS_COUNT; i++)
             {
-                DataInput @in = new InputStreamDataInput(@is);
-                CodecUtil.CheckHeader(@in, HEADER, VERSION, VERSION);
-                @in.ReadBytes(characterCategoryMap, 0, characterCategoryMap.Length);
-                for (int i = 0; i < CLASS_COUNT; i++)
-                {
-                    byte b = @in.ReadByte();
-                    invokeMap[i] = (b & 0x01) != 0;
-                    groupMap[i] = (b & 0x02) != 0;
-                }
+                byte b = @in.ReadByte();
+                invokeMap[i] = (b & 0x01) != 0;
+                groupMap[i] = (b & 0x02) != 0;
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Kuromoji/Dict/Dictionary.cs b/src/Lucene.Net.Analysis.Kuromoji/Dict/Dictionary.cs
index 4c24a4e..1b8b808 100644
--- a/src/Lucene.Net.Analysis.Kuromoji/Dict/Dictionary.cs
+++ b/src/Lucene.Net.Analysis.Kuromoji/Dict/Dictionary.cs
@@ -99,7 +99,7 @@
     }
 
     // LUCENENT TODO: Make this whole thing into an abstact class??
-    public class Dictionary
+    public static class Dictionary // LUCENENET specific: CA1052 Static holder types should be Static or NotInheritable
     {
         public static readonly string INTERNAL_SEPARATOR = "\u0000";
     }
diff --git a/src/Lucene.Net.Analysis.Kuromoji/Dict/UserDictionary.cs b/src/Lucene.Net.Analysis.Kuromoji/Dict/UserDictionary.cs
index d98f613..e42ce66 100644
--- a/src/Lucene.Net.Analysis.Kuromoji/Dict/UserDictionary.cs
+++ b/src/Lucene.Net.Analysis.Kuromoji/Dict/UserDictionary.cs
@@ -1,5 +1,6 @@
 using J2N.Text;
 using Lucene.Net.Analysis.Ja.Util;
+using Lucene.Net.Support;
 using Lucene.Net.Util;
 using Lucene.Net.Util.Fst;
 using System;
@@ -43,13 +44,13 @@ namespace Lucene.Net.Analysis.Ja.Dict
         // holds readings and POS, indexed by wordid
         private readonly string[] data;
 
-        private static readonly int CUSTOM_DICTIONARY_WORD_ID_OFFSET = 100000000;
+        private const int CUSTOM_DICTIONARY_WORD_ID_OFFSET = 100000000;
 
-        public static readonly int WORD_COST = -100000;
+        public const int WORD_COST = -100000;
 
-        public static readonly int LEFT_ID = 5;
+        public const int LEFT_ID = 5;
 
-        public static readonly int RIGHT_ID = 5;
+        public const int RIGHT_ID = 5;
 
         private static readonly Regex specialChars = new Regex(@"#.*$", RegexOptions.Compiled);
         private static readonly Regex commentLine = new Regex(@"  *", RegexOptions.Compiled);
@@ -169,14 +170,14 @@ namespace Lucene.Net.Analysis.Ja.Dict
 
         public TokenInfoFST FST => fst;
 
-        private static readonly int[][] EMPTY_RESULT = new int[0][];
+        private static readonly int[][] EMPTY_RESULT = Arrays.Empty<int[]>();
 
         /// <summary>
         /// Convert Map of index and wordIdAndLength to array of {wordId, index, length}
         /// </summary>
         /// <param name="input"></param>
         /// <returns>Array of {wordId, index, length}.</returns>
-        private int[][] ToIndexArray(IDictionary<int, int[]> input)
+        private static int[][] ToIndexArray(IDictionary<int, int[]> input) // LUCENENET: CA1822: Mark members as static
         {
             List<int[]> result = new List<int[]>();
             foreach (int i in input.Keys)
diff --git a/src/Lucene.Net.Analysis.Kuromoji/GraphvizFormatter.cs b/src/Lucene.Net.Analysis.Kuromoji/GraphvizFormatter.cs
index 0570a17..e2c922e 100644
--- a/src/Lucene.Net.Analysis.Kuromoji/GraphvizFormatter.cs
+++ b/src/Lucene.Net.Analysis.Kuromoji/GraphvizFormatter.cs
@@ -31,11 +31,11 @@ namespace Lucene.Net.Analysis.Ja
     /// </summary>
     public class GraphvizFormatter
     {
-        private readonly static string BOS_LABEL = "BOS";
+        private const string BOS_LABEL = "BOS";
 
-        private readonly static string EOS_LABEL = "EOS";
+        private const string EOS_LABEL = "EOS";
 
-        private readonly static string FONT_NAME = "Helvetica";
+        private const string FONT_NAME = "Helvetica";
 
         private readonly ConnectionCosts costs;
 
@@ -135,8 +135,7 @@ namespace Lucene.Net.Analysis.Ja
                     sb.Append(toNodeID);
 
                     string attrs;
-                    string path;
-                    bestPathMap.TryGetValue(fromNodeID, out path);
+                    bestPathMap.TryGetValue(fromNodeID, out string path);
                     if (toNodeID.Equals(path, StringComparison.Ordinal))
                     {
                         // This arc is on best path
@@ -193,7 +192,7 @@ namespace Lucene.Net.Analysis.Ja
             return "}";
         }
 
-        private string GetNodeID(int pos, int idx)
+        private static string GetNodeID(int pos, int idx) // LUCENENET: CA1822: Mark members as static
         {
             return pos + "." + idx;
         }
diff --git a/src/Lucene.Net.Analysis.Kuromoji/JapaneseBaseFormFilterFactory.cs b/src/Lucene.Net.Analysis.Kuromoji/JapaneseBaseFormFilterFactory.cs
index 5524be7..cec2cfd 100644
--- a/src/Lucene.Net.Analysis.Kuromoji/JapaneseBaseFormFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Kuromoji/JapaneseBaseFormFilterFactory.cs
@@ -40,7 +40,7 @@ namespace Lucene.Net.Analysis.Ja
         {
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Kuromoji/JapaneseIterationMarkCharFilter.cs b/src/Lucene.Net.Analysis.Kuromoji/JapaneseIterationMarkCharFilter.cs
index ac781db..58a60b8 100644
--- a/src/Lucene.Net.Analysis.Kuromoji/JapaneseIterationMarkCharFilter.cs
+++ b/src/Lucene.Net.Analysis.Kuromoji/JapaneseIterationMarkCharFilter.cs
@@ -60,10 +60,10 @@ namespace Lucene.Net.Analysis.Ja
         private const char FULL_STOP_PUNCTUATION = '\u3002';           // 。
 
         // Hiragana to dakuten map (lookup using code point - 0x30ab(か)*/
-        private static char[] h2d = new char[50];
+        private static readonly char[] h2d = new char[50]; // LUCENENET: marked readonly
 
         // Katakana to dakuten map (lookup using code point - 0x30ab(カ
-        private static char[] k2d = new char[50];
+        private static readonly char[] k2d = new char[50]; // LUCENENET: marked readonly
 
         private readonly RollingCharBuffer buffer = new RollingCharBuffer();
 
@@ -73,9 +73,9 @@ namespace Lucene.Net.Analysis.Ja
 
         private int iterationMarkSpanEndPosition = 0;
 
-        private bool normalizeKanji;
+        private readonly bool normalizeKanji; // LUCENENET: marked readonly
 
-        private bool normalizeKana;
+        private readonly bool normalizeKana; // LUCENENET: marked readonly
 
         static JapaneseIterationMarkCharFilter()
         {
@@ -327,7 +327,7 @@ namespace Lucene.Net.Analysis.Ja
         /// <param name="c">Hiragana character.</param>
         /// <param name="m">Repetition mark referring to <paramref name="c"/>.</param>
         /// <returns>Normalized character - return <paramref name="c"/> on illegal iteration marks.</returns>
-        private char NormalizedHiragana(char c, char m)
+        private static char NormalizedHiragana(char c, char m) // LUCENENET: CA1822: Mark members as static
         {
             switch (m)
             {
@@ -346,7 +346,7 @@ namespace Lucene.Net.Analysis.Ja
         /// <param name="c">Katakana character.</param>
         /// <param name="m">Repetition mark referring to <paramref name="c"/>.</param>
         /// <returns>Normalized character - return <paramref name="c"/> on illegal iteration marks.</returns>
-        private char NormalizedKatakana(char c, char m)
+        private static char NormalizedKatakana(char c, char m) // LUCENENET: CA1822: Mark members as static
         {
             switch (m)
             {
@@ -425,7 +425,7 @@ namespace Lucene.Net.Analysis.Ja
         /// </summary>
         /// <param name="c">Character to look up.</param>
         /// <returns>Hiragana dakuten variant of c or c itself if no dakuten variant exists.</returns>
-        private char LookupHiraganaDakuten(char c)
+        private static char LookupHiraganaDakuten(char c) // LUCENENET: CA1822: Mark members as static
         {
             return Lookup(c, h2d, '\u304b'); // Code point is for か
         }
@@ -435,7 +435,7 @@ namespace Lucene.Net.Analysis.Ja
         /// </summary>
         /// <param name="c">Character to look up.</param>
         /// <returns>Katakana dakuten variant of <paramref name="c"/> or <paramref name="c"/> itself if no dakuten variant exists.</returns>
-        private char LookupKatakanaDakuten(char c)
+        private static char LookupKatakanaDakuten(char c) // LUCENENET: CA1822: Mark members as static
         {
             return Lookup(c, k2d, '\u30ab'); // Code point is for カ
         }
@@ -445,7 +445,7 @@ namespace Lucene.Net.Analysis.Ja
         /// </summary>
         /// <param name="c">Character to check.</param>
         /// <returns><c>true</c> if c is a hiragana dakuten and otherwise <c>false</c>.</returns>
-        private bool IsHiraganaDakuten(char c)
+        private static bool IsHiraganaDakuten(char c) // LUCENENET: CA1822: Mark members as static
         {
             return Inside(c, h2d, '\u304b') && c == LookupHiraganaDakuten(c);
         }
@@ -455,7 +455,7 @@ namespace Lucene.Net.Analysis.Ja
         /// </summary>
         /// <param name="c">Character to check.</param>
         /// <returns><c>true</c> if c is a hiragana dakuten and otherwise <c>false</c>.</returns>
-        private bool IsKatakanaDakuten(char c)
+        private static bool IsKatakanaDakuten(char c) // LUCENENET: CA1822: Mark members as static
         {
             return Inside(c, k2d, '\u30ab') && c == LookupKatakanaDakuten(c);
         }
@@ -468,7 +468,7 @@ namespace Lucene.Net.Analysis.Ja
         /// <param name="map">Dakuten map.</param>
         /// <param name="offset">Code point offset from <paramref name="c"/>.</param>
         /// <returns>Mapped character or <paramref name="c"/> if no mapping exists.</returns>
-        private char Lookup(char c, char[] map, char offset)
+        private static char Lookup(char c, char[] map, char offset) // LUCENENET: CA1822: Mark members as static
         {
             if (!Inside(c, map, offset))
             {
@@ -487,7 +487,7 @@ namespace Lucene.Net.Analysis.Ja
         /// <param name="map">Dakuten map.</param>
         /// <param name="offset">Code point offset from <paramref name="c"/>.</param>
         /// <returns><c>true</c> if <paramref name="c"/> is mapped by map and otherwise <c>false</c>.</returns>
-        private bool Inside(char c, char[] map, char offset)
+        private static bool Inside(char c, char[] map, char offset) // LUCENENET: CA1822: Mark members as static
         {
             return c >= offset && c < offset + map.Length;
         }
diff --git a/src/Lucene.Net.Analysis.Kuromoji/JapaneseIterationMarkCharFilterFactory.cs b/src/Lucene.Net.Analysis.Kuromoji/JapaneseIterationMarkCharFilterFactory.cs
index c9518c9..2550784 100644
--- a/src/Lucene.Net.Analysis.Kuromoji/JapaneseIterationMarkCharFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Kuromoji/JapaneseIterationMarkCharFilterFactory.cs
@@ -35,8 +35,8 @@ namespace Lucene.Net.Analysis.Ja
     /// </summary>
     public class JapaneseIterationMarkCharFilterFactory : CharFilterFactory, IMultiTermAwareComponent
     {
-        private static readonly string NORMALIZE_KANJI_PARAM = "normalizeKanji";
-        private static readonly string NORMALIZE_KANA_PARAM = "normalizeKana";
+        private const string NORMALIZE_KANJI_PARAM = "normalizeKanji";
+        private const string NORMALIZE_KANA_PARAM = "normalizeKana";
 
         private readonly bool normalizeKanji;
         private readonly bool normalizeKana;
@@ -49,7 +49,7 @@ namespace Lucene.Net.Analysis.Ja
             normalizeKana = GetBoolean(args, NORMALIZE_KANA_PARAM, JapaneseIterationMarkCharFilter.NORMALIZE_KANA_DEFAULT);
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Kuromoji/JapaneseKatakanaStemFilter.cs b/src/Lucene.Net.Analysis.Kuromoji/JapaneseKatakanaStemFilter.cs
index 857e5bf..b250fdb 100644
--- a/src/Lucene.Net.Analysis.Kuromoji/JapaneseKatakanaStemFilter.cs
+++ b/src/Lucene.Net.Analysis.Kuromoji/JapaneseKatakanaStemFilter.cs
@@ -37,8 +37,8 @@ namespace Lucene.Net.Analysis.Ja
     /// </remarks>
     public sealed class JapaneseKatakanaStemFilter : TokenFilter
     {
-        public readonly static int DEFAULT_MINIMUM_LENGTH = 4;
-        private readonly static char HIRAGANA_KATAKANA_PROLONGED_SOUND_MARK = '\u30fc';
+        public const int DEFAULT_MINIMUM_LENGTH = 4;
+        private const char HIRAGANA_KATAKANA_PROLONGED_SOUND_MARK = '\u30fc';
 
         private readonly ICharTermAttribute termAttr;
         private readonly IKeywordAttribute keywordAttr;
@@ -95,7 +95,7 @@ namespace Lucene.Net.Analysis.Ja
             return length;
         }
 
-        private bool IsKatakana(char[] term, int length)
+        private static bool IsKatakana(char[] term, int length) // LUCENENET: CA1822: Mark members as static
         {
             for (int i = 0; i < length; i++)
             {
diff --git a/src/Lucene.Net.Analysis.Kuromoji/JapaneseKatakanaStemFilterFactory.cs b/src/Lucene.Net.Analysis.Kuromoji/JapaneseKatakanaStemFilterFactory.cs
index af2acb5..ec322ee 100644
--- a/src/Lucene.Net.Analysis.Kuromoji/JapaneseKatakanaStemFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Kuromoji/JapaneseKatakanaStemFilterFactory.cs
@@ -35,7 +35,7 @@ namespace Lucene.Net.Analysis.Ja
     /// </summary>
     public class JapaneseKatakanaStemFilterFactory : TokenFilterFactory
     {
-        private static readonly string MINIMUM_LENGTH_PARAM = "minimumLength";
+        private const string MINIMUM_LENGTH_PARAM = "minimumLength";
         private readonly int minimumLength;
 
         /// <summary>Creates a new <see cref="JapaneseKatakanaStemFilterFactory"/></summary>
@@ -49,7 +49,7 @@ namespace Lucene.Net.Analysis.Ja
             }
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Kuromoji/JapanesePartOfSpeechStopFilterFactory.cs b/src/Lucene.Net.Analysis.Kuromoji/JapanesePartOfSpeechStopFilterFactory.cs
index f52c550..c66f7c3 100644
--- a/src/Lucene.Net.Analysis.Kuromoji/JapanesePartOfSpeechStopFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Kuromoji/JapanesePartOfSpeechStopFilterFactory.cs
@@ -49,7 +49,7 @@ namespace Lucene.Net.Analysis.Ja
             enablePositionIncrements = GetBoolean(args, "enablePositionIncrements", true);
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Kuromoji/JapaneseReadingFormFilter.cs b/src/Lucene.Net.Analysis.Kuromoji/JapaneseReadingFormFilter.cs
index b2e1542..3cc563f 100644
--- a/src/Lucene.Net.Analysis.Kuromoji/JapaneseReadingFormFilter.cs
+++ b/src/Lucene.Net.Analysis.Kuromoji/JapaneseReadingFormFilter.cs
@@ -32,8 +32,8 @@ namespace Lucene.Net.Analysis.Ja
         private readonly ICharTermAttribute termAttr;
         private readonly IReadingAttribute readingAttr;
 
-        private StringBuilder buffer = new StringBuilder();
-        private bool useRomaji;
+        private readonly StringBuilder buffer = new StringBuilder(); // LUCENENET: marked readonly
+        private readonly bool useRomaji; // LUCENENET: marked readonly
 
         public JapaneseReadingFormFilter(TokenStream input, bool useRomaji)
             : base(input)
diff --git a/src/Lucene.Net.Analysis.Kuromoji/JapaneseReadingFormFilterFactory.cs b/src/Lucene.Net.Analysis.Kuromoji/JapaneseReadingFormFilterFactory.cs
index 9464c2e..705b656 100644
--- a/src/Lucene.Net.Analysis.Kuromoji/JapaneseReadingFormFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Kuromoji/JapaneseReadingFormFilterFactory.cs
@@ -35,7 +35,7 @@ namespace Lucene.Net.Analysis.Ja
     /// </summary>
     public class JapaneseReadingFormFilterFactory : TokenFilterFactory
     {
-        private static readonly string ROMAJI_PARAM = "useRomaji";
+        private const string ROMAJI_PARAM = "useRomaji";
         private readonly bool useRomaji;
 
         /// <summary>Creates a new <see cref="JapaneseReadingFormFilterFactory"/>.</summary>
@@ -45,7 +45,7 @@ namespace Lucene.Net.Analysis.Ja
             useRomaji = GetBoolean(args, ROMAJI_PARAM, false);
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Kuromoji/JapaneseTokenizer.cs b/src/Lucene.Net.Analysis.Kuromoji/JapaneseTokenizer.cs
index 2c59571..a635e98 100644
--- a/src/Lucene.Net.Analysis.Kuromoji/JapaneseTokenizer.cs
+++ b/src/Lucene.Net.Analysis.Kuromoji/JapaneseTokenizer.cs
@@ -65,19 +65,21 @@ namespace Lucene.Net.Analysis.Ja
         // LUCENENET specific: de-nested Type and renamed JapaneseTokenizerType
 
 
-        private static readonly bool VERBOSE = false;
+#pragma warning disable CA1802 // Use literals where appropriate
+        private static readonly bool VERBOSE = false; // For debugging
+#pragma warning restore CA1802 // Use literals where appropriate
 
-        private static readonly int SEARCH_MODE_KANJI_LENGTH = 2;
+        private const int SEARCH_MODE_KANJI_LENGTH = 2;
 
-        private static readonly int SEARCH_MODE_OTHER_LENGTH = 7; // Must be >= SEARCH_MODE_KANJI_LENGTH
+        private const int SEARCH_MODE_OTHER_LENGTH = 7; // Must be >= SEARCH_MODE_KANJI_LENGTH
 
-        private static readonly int SEARCH_MODE_KANJI_PENALTY = 3000;
+        private const int SEARCH_MODE_KANJI_PENALTY = 3000;
 
-        private static readonly int SEARCH_MODE_OTHER_PENALTY = 1700;
+        private const int SEARCH_MODE_OTHER_PENALTY = 1700;
 
         // For safety:
-        private static readonly int MAX_UNKNOWN_WORD_LENGTH = 1024;
-        private static readonly int MAX_BACKTRACE_GAP = 1024;
+        private const int MAX_UNKNOWN_WORD_LENGTH = 1024;
+        private const int MAX_BACKTRACE_GAP = 1024;
 
         private readonly IDictionary<JapaneseTokenizerType, IDictionary> dictionaryMap = new Dictionary<JapaneseTokenizerType, IDictionary>();
 
@@ -1177,8 +1179,7 @@ namespace Lucene.Net.Analysis.Ja
 
         internal IDictionary GetDict(JapaneseTokenizerType type)
         {
-            IDictionary result;
-            dictionaryMap.TryGetValue(type, out result);
+            dictionaryMap.TryGetValue(type, out IDictionary result);
             return result;
         }
 
diff --git a/src/Lucene.Net.Analysis.Kuromoji/JapaneseTokenizerFactory.cs b/src/Lucene.Net.Analysis.Kuromoji/JapaneseTokenizerFactory.cs
index 738226a..0ba2c8d 100644
--- a/src/Lucene.Net.Analysis.Kuromoji/JapaneseTokenizerFactory.cs
+++ b/src/Lucene.Net.Analysis.Kuromoji/JapaneseTokenizerFactory.cs
@@ -43,13 +43,13 @@ namespace Lucene.Net.Analysis.Ja
     /// </summary>
     public class JapaneseTokenizerFactory : TokenizerFactory, IResourceLoaderAware
     {
-        private static readonly string MODE = "mode";
+        private const string MODE = "mode";
 
-        private static readonly string USER_DICT_PATH = "userDictionary";
+        private const string USER_DICT_PATH = "userDictionary";
 
-        private static readonly string USER_DICT_ENCODING = "userDictionaryEncoding";
+        private const string USER_DICT_ENCODING = "userDictionaryEncoding";
 
-        private static readonly string DISCARD_PUNCTUATION = "discardPunctuation"; // Expert option
+        private const string DISCARD_PUNCTUATION = "discardPunctuation"; // Expert option
 
         private UserDictionary userDictionary;
 
@@ -68,7 +68,7 @@ namespace Lucene.Net.Analysis.Ja
             discardPunctuation = GetBoolean(args, DISCARD_PUNCTUATION, true);
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Kuromoji/TokenAttributes/BaseFormAttributeImpl.cs b/src/Lucene.Net.Analysis.Kuromoji/TokenAttributes/BaseFormAttributeImpl.cs
index e9c82c8..4df05e4 100644
--- a/src/Lucene.Net.Analysis.Kuromoji/TokenAttributes/BaseFormAttributeImpl.cs
+++ b/src/Lucene.Net.Analysis.Kuromoji/TokenAttributes/BaseFormAttributeImpl.cs
@@ -31,7 +31,7 @@ namespace Lucene.Net.Analysis.Ja.TokenAttributes
 
         public virtual string GetBaseForm()
         {
-            return token == null ? null : token.GetBaseForm();
+            return token?.GetBaseForm();
         }
 
         public virtual void SetToken(Token token)
diff --git a/src/Lucene.Net.Analysis.Kuromoji/TokenAttributes/InflectionAttributeImpl.cs b/src/Lucene.Net.Analysis.Kuromoji/TokenAttributes/InflectionAttributeImpl.cs
index fe0dae8..9f0bf2c 100644
--- a/src/Lucene.Net.Analysis.Kuromoji/TokenAttributes/InflectionAttributeImpl.cs
+++ b/src/Lucene.Net.Analysis.Kuromoji/TokenAttributes/InflectionAttributeImpl.cs
@@ -32,12 +32,12 @@ namespace Lucene.Net.Analysis.Ja.TokenAttributes
 
         public virtual string GetInflectionType()
         {
-            return token == null ? null : token.GetInflectionType();
+            return token?.GetInflectionType();
         }
 
         public virtual string GetInflectionForm()
         {
-            return token == null ? null : token.GetInflectionForm();
+            return token?.GetInflectionForm();
         }
 
         public virtual void SetToken(Token token)
diff --git a/src/Lucene.Net.Analysis.Kuromoji/TokenAttributes/PartOfSpeechAttributeImpl.cs b/src/Lucene.Net.Analysis.Kuromoji/TokenAttributes/PartOfSpeechAttributeImpl.cs
index c043511..cf589c1 100644
--- a/src/Lucene.Net.Analysis.Kuromoji/TokenAttributes/PartOfSpeechAttributeImpl.cs
+++ b/src/Lucene.Net.Analysis.Kuromoji/TokenAttributes/PartOfSpeechAttributeImpl.cs
@@ -32,7 +32,7 @@ namespace Lucene.Net.Analysis.Ja.TokenAttributes
 
         public virtual string GetPartOfSpeech()
         {
-            return token == null ? null : token.GetPartOfSpeech();
+            return token?.GetPartOfSpeech();
         }
 
         public virtual void SetToken(Token token)
diff --git a/src/Lucene.Net.Analysis.Kuromoji/TokenAttributes/ReadingAttributeImpl.cs b/src/Lucene.Net.Analysis.Kuromoji/TokenAttributes/ReadingAttributeImpl.cs
index 6697598..6cb656f 100644
--- a/src/Lucene.Net.Analysis.Kuromoji/TokenAttributes/ReadingAttributeImpl.cs
+++ b/src/Lucene.Net.Analysis.Kuromoji/TokenAttributes/ReadingAttributeImpl.cs
@@ -32,12 +32,12 @@ namespace Lucene.Net.Analysis.Ja.TokenAttributes
 
         public virtual string GetReading()
         {
-            return token == null ? null : token.GetReading();
+            return token?.GetReading();
         }
 
         public virtual string GetPronunciation()
         {
-            return token == null ? null : token.GetPronunciation();
+            return token?.GetPronunciation();
         }
 
         public virtual void SetToken(Token token)
diff --git a/src/Lucene.Net.Analysis.Kuromoji/Tools/BinaryDictionaryWriter.cs b/src/Lucene.Net.Analysis.Kuromoji/Tools/BinaryDictionaryWriter.cs
index 14bfb11..9843dc7 100644
--- a/src/Lucene.Net.Analysis.Kuromoji/Tools/BinaryDictionaryWriter.cs
+++ b/src/Lucene.Net.Analysis.Kuromoji/Tools/BinaryDictionaryWriter.cs
@@ -38,7 +38,7 @@ namespace Lucene.Net.Analysis.Ja.Util
         private int[] targetMapOffsets = new int[8192];
         private readonly List<string> posDict = new List<string>();
 
-        public BinaryDictionaryWriter(Type implClazz, int size)
+        protected BinaryDictionaryWriter(Type implClazz, int size) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected)
         {
             this.m_implClazz = implClazz;
             m_buffer = ByteBuffer.Allocate(size);
@@ -296,59 +296,55 @@ namespace Lucene.Net.Analysis.Ja.Util
         {
             //new File(filename).getParentFile().mkdirs();
             System.IO.Directory.CreateDirectory(System.IO.Path.GetDirectoryName(filename));
-            using (Stream os = new FileStream(filename, FileMode.Create, FileAccess.Write))
+            using Stream os = new FileStream(filename, FileMode.Create, FileAccess.Write);
+            DataOutput @out = new OutputStreamDataOutput(os);
+            CodecUtil.WriteHeader(@out, BinaryDictionary.TARGETMAP_HEADER, BinaryDictionary.VERSION);
+
+            int numSourceIds = lastSourceId + 1;
+            @out.WriteVInt32(targetMapEndOffset); // <-- size of main array
+            @out.WriteVInt32(numSourceIds + 1); // <-- size of offset array (+ 1 more entry)
+            int prev = 0, sourceId = 0;
+            for (int ofs = 0; ofs < targetMapEndOffset; ofs++)
             {
-                DataOutput @out = new OutputStreamDataOutput(os);
-                CodecUtil.WriteHeader(@out, BinaryDictionary.TARGETMAP_HEADER, BinaryDictionary.VERSION);
-
-                int numSourceIds = lastSourceId + 1;
-                @out.WriteVInt32(targetMapEndOffset); // <-- size of main array
-                @out.WriteVInt32(numSourceIds + 1); // <-- size of offset array (+ 1 more entry)
-                int prev = 0, sourceId = 0;
-                for (int ofs = 0; ofs < targetMapEndOffset; ofs++)
+                int val = targetMap[ofs], delta = val - prev;
+                if (Debugging.AssertsEnabled) Debugging.Assert(delta >= 0);
+                if (ofs == targetMapOffsets[sourceId])
                 {
-                    int val = targetMap[ofs], delta = val - prev;
-                    if (Debugging.AssertsEnabled) Debugging.Assert(delta >= 0);
-                    if (ofs == targetMapOffsets[sourceId])
-                    {
-                        @out.WriteVInt32((delta << 1) | 0x01);
-                        sourceId++;
-                    }
-                    else
-                    {
-                        @out.WriteVInt32((delta << 1));
-                    }
-                    prev += delta;
+                    @out.WriteVInt32((delta << 1) | 0x01);
+                    sourceId++;
+                }
+                else
+                {
+                    @out.WriteVInt32((delta << 1));
                 }
-                if (Debugging.AssertsEnabled) Debugging.Assert(sourceId == numSourceIds,"sourceId:{0} != numSourceIds:{1}", sourceId, numSourceIds);
+                prev += delta;
             }
+            if (Debugging.AssertsEnabled) Debugging.Assert(sourceId == numSourceIds, "sourceId:{0} != numSourceIds:{1}", sourceId, numSourceIds);
         }
 
         protected virtual void WritePosDict(string filename)
         {
             //new File(filename).getParentFile().mkdirs();
             System.IO.Directory.CreateDirectory(System.IO.Path.GetDirectoryName(filename));
-            using (Stream os = new FileStream(filename, FileMode.Create, FileAccess.Write))
+            using Stream os = new FileStream(filename, FileMode.Create, FileAccess.Write);
+            DataOutput @out = new OutputStreamDataOutput(os);
+            CodecUtil.WriteHeader(@out, BinaryDictionary.POSDICT_HEADER, BinaryDictionary.VERSION);
+            @out.WriteVInt32(posDict.Count);
+            foreach (string s in posDict)
             {
-                DataOutput @out = new OutputStreamDataOutput(os);
-                CodecUtil.WriteHeader(@out, BinaryDictionary.POSDICT_HEADER, BinaryDictionary.VERSION);
-                @out.WriteVInt32(posDict.Count);
-                foreach (string s in posDict)
+                if (s == null)
                 {
-                    if (s == null)
-                    {
-                        @out.WriteByte((byte)0);
-                        @out.WriteByte((byte)0);
-                        @out.WriteByte((byte)0);
-                    }
-                    else
-                    {
-                        string[] data = CSVUtil.Parse(s);
-                        if (Debugging.AssertsEnabled) Debugging.Assert(data.Length == 3,"malformed pos/inflection: {0}", s);
-                        @out.WriteString(data[0]);
-                        @out.WriteString(data[1]);
-                        @out.WriteString(data[2]);
-                    }
+                    @out.WriteByte((byte)0);
+                    @out.WriteByte((byte)0);
+                    @out.WriteByte((byte)0);
+                }
+                else
+                {
+                    string[] data = CSVUtil.Parse(s);
+                    if (Debugging.AssertsEnabled) Debugging.Assert(data.Length == 3, "malformed pos/inflection: {0}", s);
+                    @out.WriteString(data[0]);
+                    @out.WriteString(data[1]);
+                    @out.WriteString(data[2]);
                 }
             }
         }
@@ -357,24 +353,22 @@ namespace Lucene.Net.Analysis.Ja.Util
         {
             //new File(filename).getParentFile().mkdirs();
             System.IO.Directory.CreateDirectory(System.IO.Path.GetDirectoryName(filename));
-            using (Stream os = new FileStream(filename, FileMode.Create, FileAccess.Write))
-            {
-                DataOutput @out = new OutputStreamDataOutput(os);
-                CodecUtil.WriteHeader(@out, BinaryDictionary.DICT_HEADER, BinaryDictionary.VERSION);
-                @out.WriteVInt32(m_buffer.Position);
+            using Stream os = new FileStream(filename, FileMode.Create, FileAccess.Write);
+            DataOutput @out = new OutputStreamDataOutput(os);
+            CodecUtil.WriteHeader(@out, BinaryDictionary.DICT_HEADER, BinaryDictionary.VERSION);
+            @out.WriteVInt32(m_buffer.Position);
 
-                //WritableByteChannel channel = Channels.newChannel(os);
-                // Write Buffer
-                m_buffer.Flip();  // set position to 0, set limit to current position
-                //channel.write(buffer);
+            //WritableByteChannel channel = Channels.newChannel(os);
+            // Write Buffer
+            m_buffer.Flip();  // set position to 0, set limit to current position
+                              //channel.write(buffer);
 
-                while (m_buffer.HasRemaining)
-                {
-                    @out.WriteByte(m_buffer.Get());
-                }
-
-                if (Debugging.AssertsEnabled) Debugging.Assert(m_buffer.Remaining == 0L);
+            while (m_buffer.HasRemaining)
+            {
+                @out.WriteByte(m_buffer.Get());
             }
+
+            if (Debugging.AssertsEnabled) Debugging.Assert(m_buffer.Remaining == 0L);
         }
     }
 }
diff --git a/src/Lucene.Net.Analysis.Kuromoji/Tools/CharacterDefinitionWriter.cs b/src/Lucene.Net.Analysis.Kuromoji/Tools/CharacterDefinitionWriter.cs
index 9f019fb..d963d12 100644
--- a/src/Lucene.Net.Analysis.Kuromoji/Tools/CharacterDefinitionWriter.cs
+++ b/src/Lucene.Net.Analysis.Kuromoji/Tools/CharacterDefinitionWriter.cs
@@ -58,7 +58,9 @@ namespace Lucene.Net.Analysis.Ja.Util
             characterCategoryMap[codePoint] = CharacterDefinition.LookupCharacterClass(characterClassName);
         }
 
+#pragma warning disable IDE0060 // Remove unused parameter
         public void PutInvokeDefinition(string characterClassName, int invoke, int group, int length)
+#pragma warning restore IDE0060 // Remove unused parameter
         {
             byte characterClass = CharacterDefinition.LookupCharacterClass(characterClassName);
             invokeMap[characterClass] = invoke == 1;
@@ -77,19 +79,17 @@ namespace Lucene.Net.Analysis.Ja.Util
             string filename = System.IO.Path.Combine(baseDir, typeof(CharacterDefinition).Name + CharacterDefinition.FILENAME_SUFFIX);
             //new File(filename).getParentFile().mkdirs();
             System.IO.Directory.CreateDirectory(System.IO.Path.GetDirectoryName(baseDir));
-            using (Stream os = new FileStream(filename, FileMode.Create, FileAccess.Write))
+            using Stream os = new FileStream(filename, FileMode.Create, FileAccess.Write);
+            DataOutput @out = new OutputStreamDataOutput(os);
+            CodecUtil.WriteHeader(@out, CharacterDefinition.HEADER, CharacterDefinition.VERSION);
+            @out.WriteBytes(characterCategoryMap, 0, characterCategoryMap.Length);
+            for (int i = 0; i < CharacterDefinition.CLASS_COUNT; i++)
             {
-                DataOutput @out = new OutputStreamDataOutput(os);
-                CodecUtil.WriteHeader(@out, CharacterDefinition.HEADER, CharacterDefinition.VERSION);
-                @out.WriteBytes(characterCategoryMap, 0, characterCategoryMap.Length);
-                for (int i = 0; i < CharacterDefinition.CLASS_COUNT; i++)
-                {
-                    byte b = (byte)(
-                      (invokeMap[i] ? 0x01 : 0x00) |
-                      (groupMap[i] ? 0x02 : 0x00)
-                    );
-                    @out.WriteByte(b);
-                }
+                byte b = (byte)(
+                  (invokeMap[i] ? 0x01 : 0x00) |
+                  (groupMap[i] ? 0x02 : 0x00)
+                );
+                @out.WriteByte(b);
             }
         }
     }
diff --git a/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsBuilder.cs b/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsBuilder.cs
index 1d7d8e3..d15ddb9 100644
--- a/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsBuilder.cs
+++ b/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsBuilder.cs
@@ -24,46 +24,40 @@ namespace Lucene.Net.Analysis.Ja.Util
      * limitations under the License.
      */
 
-    public class ConnectionCostsBuilder
+    public static class ConnectionCostsBuilder // LUCENENET specific: CA1052 Static holder types should be Static or NotInheritable
     {
         private static readonly Regex whiteSpaceRegex = new Regex("\\s+", RegexOptions.Compiled);
 
-        private ConnectionCostsBuilder()
-        {
-        }
-
         public static ConnectionCostsWriter Build(string filename)
         {
-            using (Stream inputStream = new FileStream(filename, FileMode.Open, FileAccess.Read))
-            {
-                StreamReader streamReader = new StreamReader(inputStream, Encoding.ASCII);
+            using Stream inputStream = new FileStream(filename, FileMode.Open, FileAccess.Read);
+            StreamReader streamReader = new StreamReader(inputStream, Encoding.ASCII);
 
-                string line = streamReader.ReadLine();
-                string[] dimensions = whiteSpaceRegex.Split(line).TrimEnd();
+            string line = streamReader.ReadLine();
+            string[] dimensions = whiteSpaceRegex.Split(line).TrimEnd();
 
-                if (Debugging.AssertsEnabled) Debugging.Assert(dimensions.Length == 2);
+            if (Debugging.AssertsEnabled) Debugging.Assert(dimensions.Length == 2);
 
-                int forwardSize = int.Parse(dimensions[0], CultureInfo.InvariantCulture);
-                int backwardSize = int.Parse(dimensions[1], CultureInfo.InvariantCulture);
+            int forwardSize = int.Parse(dimensions[0], CultureInfo.InvariantCulture);
+            int backwardSize = int.Parse(dimensions[1], CultureInfo.InvariantCulture);
 
-                if (Debugging.AssertsEnabled) Debugging.Assert(forwardSize > 0 && backwardSize > 0);
+            if (Debugging.AssertsEnabled) Debugging.Assert(forwardSize > 0 && backwardSize > 0);
 
-                ConnectionCostsWriter costs = new ConnectionCostsWriter(forwardSize, backwardSize);
+            ConnectionCostsWriter costs = new ConnectionCostsWriter(forwardSize, backwardSize);
 
-                while ((line = streamReader.ReadLine()) != null)
-                {
-                    string[] fields = whiteSpaceRegex.Split(line).TrimEnd();
+            while ((line = streamReader.ReadLine()) != null)
+            {
+                string[] fields = whiteSpaceRegex.Split(line).TrimEnd();
 
-                    if (Debugging.AssertsEnabled) Debugging.Assert(fields.Length == 3);
+                if (Debugging.AssertsEnabled) Debugging.Assert(fields.Length == 3);
 
-                    int forwardId = int.Parse(fields[0], CultureInfo.InvariantCulture);
-                    int backwardId = int.Parse(fields[1], CultureInfo.InvariantCulture);
-                    int cost = int.Parse(fields[2], CultureInfo.InvariantCulture);
+                int forwardId = int.Parse(fields[0], CultureInfo.InvariantCulture);
+                int backwardId = int.Parse(fields[1], CultureInfo.InvariantCulture);
+                int cost = int.Parse(fields[2], CultureInfo.InvariantCulture);
 
-                    costs.Add(forwardId, backwardId, cost);
-                }
-                return costs;
+                costs.Add(forwardId, backwardId, cost);
             }
+            return costs;
         }
     }
 }
diff --git a/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsWriter.cs b/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsWriter.cs
index 4b5fdb4..936999d 100644
--- a/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsWriter.cs
+++ b/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsWriter.cs
@@ -56,23 +56,21 @@ namespace Lucene.Net.Analysis.Ja.Util
             string filename = System.IO.Path.Combine(baseDir, typeof(ConnectionCosts).Name + CharacterDefinition.FILENAME_SUFFIX);
             //new File(filename).getParentFile().mkdirs();
             System.IO.Directory.CreateDirectory(System.IO.Path.GetDirectoryName(filename));
-            using (Stream os = new FileStream(filename, FileMode.Create, FileAccess.Write))
+            using Stream os = new FileStream(filename, FileMode.Create, FileAccess.Write);
+            DataOutput @out = new OutputStreamDataOutput(os);
+            CodecUtil.WriteHeader(@out, ConnectionCosts.HEADER, ConnectionCosts.VERSION);
+            @out.WriteVInt32(forwardSize);
+            @out.WriteVInt32(backwardSize);
+            int last = 0;
+            if (Debugging.AssertsEnabled) Debugging.Assert(costs.Length == backwardSize);
+            foreach (short[] a in costs)
             {
-                DataOutput @out = new OutputStreamDataOutput(os);
-                CodecUtil.WriteHeader(@out, ConnectionCosts.HEADER, ConnectionCosts.VERSION);
-                @out.WriteVInt32(forwardSize);
-                @out.WriteVInt32(backwardSize);
-                int last = 0;
-                if (Debugging.AssertsEnabled) Debugging.Assert(costs.Length == backwardSize);
-                foreach (short[] a in costs)
+                if (Debugging.AssertsEnabled) Debugging.Assert(a.Length == forwardSize);
+                for (int i = 0; i < a.Length; i++)
                 {
-                    if (Debugging.AssertsEnabled) Debugging.Assert(a.Length == forwardSize);
-                    for (int i = 0; i < a.Length; i++)
-                    {
-                        int delta = (int)a[i] - last;
-                        @out.WriteVInt32((delta >> 31) ^ (delta << 1));
-                        last = a[i];
-                    }
+                    int delta = (int)a[i] - last;
+                    @out.WriteVInt32((delta >> 31) ^ (delta << 1));
+                    last = a[i];
                 }
             }
         }
diff --git a/src/Lucene.Net.Analysis.Kuromoji/Tools/DictionaryBuilder.cs b/src/Lucene.Net.Analysis.Kuromoji/Tools/DictionaryBuilder.cs
index d6d6b43..83fd993 100644
--- a/src/Lucene.Net.Analysis.Kuromoji/Tools/DictionaryBuilder.cs
+++ b/src/Lucene.Net.Analysis.Kuromoji/Tools/DictionaryBuilder.cs
@@ -20,14 +20,10 @@ namespace Lucene.Net.Analysis.Ja.Util
      * limitations under the License.
      */
 
-    public class DictionaryBuilder
+    public static class DictionaryBuilder // LUCENENET specific: CA1052 Static holder types should be Static or NotInheritable
     {
         public enum DictionaryFormat { IPADIC, UNIDIC };
 
-        private DictionaryBuilder()
-        {
-        }
-
         static DictionaryBuilder()
         {
 #if FEATURE_ENCODINGPROVIDERS
@@ -47,16 +43,16 @@ namespace Lucene.Net.Analysis.Ja.Util
             TokenInfoDictionaryBuilder tokenInfoBuilder = new TokenInfoDictionaryBuilder(format, encoding, normalizeEntry);
             TokenInfoDictionaryWriter tokenInfoDictionary = tokenInfoBuilder.Build(inputDirname);
             tokenInfoDictionary.Write(outputDirname);
-            tokenInfoDictionary = null;
-            tokenInfoBuilder = null;
+            //tokenInfoDictionary = null; // LUCENENET: IDE0059: Remove unnecessary value assignment
+            //tokenInfoBuilder = null; // LUCENENET: IDE0059: Remove unnecessary value assignment
             Console.WriteLine("done");
 
             Console.WriteLine("building unknown word dict...");
             UnknownDictionaryBuilder unkBuilder = new UnknownDictionaryBuilder(encoding);
             UnknownDictionaryWriter unkDictionary = unkBuilder.Build(inputDirname);
             unkDictionary.Write(outputDirname);
-            unkDictionary = null;
-            unkBuilder = null;
+            //unkDictionary = null; // LUCENENET: IDE0059: Remove unnecessary value assignment
+            //unkBuilder = null; // LUCENENET: IDE0059: Remove unnecessary value assignment
             Console.WriteLine("done");
 
             Console.WriteLine("building connection costs...");
diff --git a/src/Lucene.Net.Analysis.Kuromoji/Tools/TokenInfoDictionaryBuilder.cs b/src/Lucene.Net.Analysis.Kuromoji/Tools/TokenInfoDictionaryBuilder.cs
index 31ccf90..8562f01 100644
--- a/src/Lucene.Net.Analysis.Kuromoji/Tools/TokenInfoDictionaryBuilder.cs
+++ b/src/Lucene.Net.Analysis.Kuromoji/Tools/TokenInfoDictionaryBuilder.cs
@@ -33,12 +33,12 @@ namespace Lucene.Net.Analysis.Ja.Util
         /// <summary>Internal word id - incrementally assigned as entries are read and added. This will be byte offset of dictionary file</summary>
         private int offset = 0;
 
-        private string encoding = "euc-jp";
+        private readonly string encoding = "euc-jp"; // LUCENENET: marked readonly
 
-        private bool normalizeEntries = false;
+        private readonly bool normalizeEntries = false; // LUCENENET: marked readonly
         //private Normalizer2 normalizer;
 
-        private DictionaryBuilder.DictionaryFormat format = DictionaryBuilder.DictionaryFormat.IPADIC;
+        private readonly DictionaryBuilder.DictionaryFormat format = DictionaryBuilder.DictionaryFormat.IPADIC; // LUCENENET: marked readonly
 
         public TokenInfoDictionaryBuilder(DictionaryBuilder.DictionaryFormat format, string encoding, bool normalizeEntries)
         {
@@ -68,43 +68,41 @@ namespace Lucene.Net.Analysis.Ja.Util
             List<string[]> lines = new List<string[]>(400000);
             foreach (string file in csvFiles)
             {
-                using (Stream inputStream = new FileStream(file, FileMode.Open, FileAccess.Read))
+                using Stream inputStream = new FileStream(file, FileMode.Open, FileAccess.Read);
+                Encoding decoder = Encoding.GetEncoding(encoding);
+                TextReader reader = new StreamReader(inputStream, decoder);
+
+                string line = null;
+                while ((line = reader.ReadLine()) != null)
                 {
-                    Encoding decoder = Encoding.GetEncoding(encoding);
-                    TextReader reader = new StreamReader(inputStream, decoder);
+                    string[] entry = CSVUtil.Parse(line);
 
-                    string line = null;
-                    while ((line = reader.ReadLine()) != null)
+                    if (entry.Length < 13)
                     {
-                        string[] entry = CSVUtil.Parse(line);
+                        Console.WriteLine("Entry in CSV is not valid: " + line);
+                        continue;
+                    }
+
+                    string[] formatted = FormatEntry(entry);
+                    lines.Add(formatted);
 
-                        if (entry.Length < 13)
+                    // NFKC normalize dictionary entry
+                    if (normalizeEntries)
+                    {
+                        //if (normalizer.isNormalized(entry[0])){
+                        if (entry[0].IsNormalized(NormalizationForm.FormKC))
                         {
-                            Console.WriteLine("Entry in CSV is not valid: " + line);
                             continue;
                         }
-
-                        string[] formatted = FormatEntry(entry);
-                        lines.Add(formatted);
-
-                        // NFKC normalize dictionary entry
-                        if (normalizeEntries)
+                        string[] normalizedEntry = new string[entry.Length];
+                        for (int i = 0; i < entry.Length; i++)
                         {
-                            //if (normalizer.isNormalized(entry[0])){
-                            if (entry[0].IsNormalized(NormalizationForm.FormKC))
-                            {
-                                continue;
-                            }
-                            string[] normalizedEntry = new string[entry.Length];
-                            for (int i = 0; i < entry.Length; i++)
-                            {
-                                //normalizedEntry[i] = normalizer.normalize(entry[i]);
-                                normalizedEntry[i] = entry[i].Normalize(NormalizationForm.FormKC);
-                            }
-
-                            formatted = FormatEntry(normalizedEntry);
-                            lines.Add(formatted);
+                            //normalizedEntry[i] = normalizer.normalize(entry[i]);
+                            normalizedEntry[i] = entry[i].Normalize(NormalizationForm.FormKC);
                         }
+
+                        formatted = FormatEntry(normalizedEntry);
+                        lines.Add(formatted);
                     }
                 }
             }
diff --git a/src/Lucene.Net.Analysis.Kuromoji/Tools/UnknownDictionaryBuilder.cs b/src/Lucene.Net.Analysis.Kuromoji/Tools/UnknownDictionaryBuilder.cs
index 61e30f2..060ce8a 100644
--- a/src/Lucene.Net.Analysis.Kuromoji/Tools/UnknownDictionaryBuilder.cs
+++ b/src/Lucene.Net.Analysis.Kuromoji/Tools/UnknownDictionaryBuilder.cs
@@ -89,53 +89,51 @@ namespace Lucene.Net.Analysis.Ja.Util
 
         public virtual void ReadCharacterDefinition(string filename, UnknownDictionaryWriter dictionary)
         {
-            using (Stream inputStream = new FileStream(filename, FileMode.Open, FileAccess.Read))
-            using (TextReader reader = new StreamReader(inputStream, Encoding.GetEncoding(encoding)))
+            using Stream inputStream = new FileStream(filename, FileMode.Open, FileAccess.Read);
+            using TextReader reader = new StreamReader(inputStream, Encoding.GetEncoding(encoding));
+            string line = null;
+
+            while ((line = reader.ReadLine()) != null)
             {
-                string line = null;
+                line = Regex.Replace(line, "^\\s", "");
+                line = Regex.Replace(line, "\\s*#.*", "");
+                line = Regex.Replace(line, "\\s+", " ");
 
-                while ((line = reader.ReadLine()) != null)
+                // Skip empty line or comment line
+                if (line.Length == 0)
                 {
-                    line = Regex.Replace(line, "^\\s", "");
-                    line = Regex.Replace(line, "\\s*#.*", "");
-                    line = Regex.Replace(line, "\\s+", " ");
+                    continue;
+                }
+
+                if (line.StartsWith("0x", StringComparison.Ordinal))
+                {  // Category mapping
+                    string[] values = new Regex(" ").Split(line, 2);  // Split only first space
 
-                    // Skip empty line or comment line
-                    if (line.Length == 0)
+                    if (!values[0].Contains(".."))
                     {
-                        continue;
+                        int cp = Convert.ToInt32(values[0], 16);
+                        dictionary.PutCharacterCategory(cp, values[1]);
                     }
+                    else
+                    {
+                        string[] codePoints = Regex.Split(values[0], "\\.\\.").TrimEnd();
+                        int cpFrom = Convert.ToInt32(codePoints[0], 16);
+                        int cpTo = Convert.ToInt32(codePoints[1], 16);
 
-                    if (line.StartsWith("0x", StringComparison.Ordinal))
-                    {  // Category mapping
-                        string[] values = new Regex(" ").Split(line, 2);  // Split only first space
-
-                        if (!values[0].Contains(".."))
-                        {
-                            int cp = Convert.ToInt32(values[0], 16);
-                            dictionary.PutCharacterCategory(cp, values[1]);
-                        }
-                        else
+                        for (int i = cpFrom; i <= cpTo; i++)
                         {
-                            string[] codePoints = Regex.Split(values[0], "\\.\\.").TrimEnd();
-                            int cpFrom = Convert.ToInt32(codePoints[0], 16);
-                            int cpTo = Convert.ToInt32(codePoints[1], 16);
-
-                            for (int i = cpFrom; i <= cpTo; i++)
-                            {
-                                dictionary.PutCharacterCategory(i, values[1]);
-                            }
+                            dictionary.PutCharacterCategory(i, values[1]);
                         }
                     }
-                    else
-                    {  // Invoke definition
-                        string[] values = line.Split(' ').TrimEnd(); // Consecutive space is merged above
-                        string characterClassName = values[0];
-                        int invoke = int.Parse(values[1], CultureInfo.InvariantCulture);
-                        int group = int.Parse(values[2], CultureInfo.InvariantCulture);
-                        int length = int.Parse(values[3], CultureInfo.InvariantCulture);
-                        dictionary.PutInvokeDefinition(characterClassName, invoke, group, length);
-                    }
+                }
+                else
+                {  // Invoke definition
+                    string[] values = line.Split(' ').TrimEnd(); // Consecutive space is merged above
+                    string characterClassName = values[0];
+                    int invoke = int.Parse(values[1], CultureInfo.InvariantCulture);
+                    int group = int.Parse(values[2], CultureInfo.InvariantCulture);
+                    int length = int.Parse(values[3], CultureInfo.InvariantCulture);
+                    dictionary.PutInvokeDefinition(characterClassName, invoke, group, length);
                 }
             }
         }
diff --git a/src/Lucene.Net.Analysis.Kuromoji/Util/ToStringUtil.cs b/src/Lucene.Net.Analysis.Kuromoji/Util/ToStringUtil.cs
index 95e2703..f88421c 100644
--- a/src/Lucene.Net.Analysis.Kuromoji/Util/ToStringUtil.cs
+++ b/src/Lucene.Net.Analysis.Kuromoji/Util/ToStringUtil.cs
@@ -128,8 +128,7 @@ namespace Lucene.Net.Analysis.Ja.Util
         /// </summary>
         public static string GetPOSTranslation(string s)
         {
-            string result;
-            posTranslations.TryGetValue(s, out result);
+            posTranslations.TryGetValue(s, out string result);
             return result;
         }
 
@@ -202,8 +201,7 @@ namespace Lucene.Net.Analysis.Ja.Util
         /// </summary>
         public static string GetInflectionTypeTranslation(string s)
         {
-            string result;
-            inflTypeTranslations.TryGetValue(s, out result);
+            inflTypeTranslations.TryGetValue(s, out string result);
             return result;
         }
 
@@ -246,8 +244,7 @@ namespace Lucene.Net.Analysis.Ja.Util
         /// </summary>
         public static string GetInflectedFormTranslation(string s)
         {
-            string result;
-            inflFormTranslations.TryGetValue(s, out result);
+            inflFormTranslations.TryGetValue(s, out string result);
             return result;
         }
 
diff --git a/src/Lucene.Net.Analysis.Morfologik/Morfologik/MorfologikFilterFactory.cs b/src/Lucene.Net.Analysis.Morfologik/Morfologik/MorfologikFilterFactory.cs
index 5feef4a..5f1b573 100644
--- a/src/Lucene.Net.Analysis.Morfologik/Morfologik/MorfologikFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Morfologik/Morfologik/MorfologikFilterFactory.cs
@@ -71,9 +71,9 @@ namespace Lucene.Net.Analysis.Morfologik
 
             resourceName = Get(args, DICTIONARY_ATTRIBUTE);
 
-            if (args.Count != 0)
+            if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
@@ -86,11 +86,9 @@ namespace Lucene.Net.Analysis.Morfologik
             }
             else
             {
-                using (Stream dict = loader.OpenResource(resourceName))
-                using (Stream meta = loader.OpenResource(DictionaryMetadata.GetExpectedMetadataFileName(resourceName)))
-                {
-                    this.dictionary = Dictionary.Read(dict, meta);
-                }
+                using Stream dict = loader.OpenResource(resourceName);
+                using Stream meta = loader.OpenResource(DictionaryMetadata.GetExpectedMetadataFileName(resourceName));
+                this.dictionary = Dictionary.Read(dict, meta);
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Morfologik/Morfologik/TokenAttributes/MorphosyntacticTagsAttribute.cs b/src/Lucene.Net.Analysis.Morfologik/Morfologik/TokenAttributes/MorphosyntacticTagsAttribute.cs
index 2e15a3d..056a790 100644
--- a/src/Lucene.Net.Analysis.Morfologik/Morfologik/TokenAttributes/MorphosyntacticTagsAttribute.cs
+++ b/src/Lucene.Net.Analysis.Morfologik/Morfologik/TokenAttributes/MorphosyntacticTagsAttribute.cs
@@ -59,9 +59,10 @@ namespace Lucene.Net.Analysis.Morfologik.TokenAttributes
 
         public override bool Equals(object other)
         {
-            if (other is IMorphosyntacticTagsAttribute)
+            if (other is null) return false;
+            if (other is IMorphosyntacticTagsAttribute morphosyntacticTagsAttribute)
             {
-                return Equal(this.Tags, ((IMorphosyntacticTagsAttribute)other).Tags);
+                return Equal(this.Tags, morphosyntacticTagsAttribute.Tags);
             }
             return false;
         }
diff --git a/src/Lucene.Net.Analysis.Morfologik/Uk/UkrainianMorfologikAnalyzer.cs b/src/Lucene.Net.Analysis.Morfologik/Uk/UkrainianMorfologikAnalyzer.cs
index 6e3ecc8..1e97edb 100644
--- a/src/Lucene.Net.Analysis.Morfologik/Uk/UkrainianMorfologikAnalyzer.cs
+++ b/src/Lucene.Net.Analysis.Morfologik/Uk/UkrainianMorfologikAnalyzer.cs
@@ -66,13 +66,11 @@ namespace Lucene.Net.Analysis.Uk
                         LuceneVersion.LUCENE_CURRENT);
 #pragma warning restore 612, 618
                 }
-#pragma warning disable 168
                 catch (IOException ex)
-#pragma warning restore 168
                 {
                     // default set should always be present as it is part of the
                     // distribution (JAR)
-                    throw new Exception("Unable to load default stopword set");
+                    throw new Exception("Unable to load default stopword set", ex);
                 }
             }
         }
@@ -164,9 +162,9 @@ namespace Lucene.Net.Analysis.Uk
                 // (see https://search.maven.org/search?q=a:morfologik-ukrainian-search). However, we are embedding the file in .NET.
                 // Since it doesn't appear to be updated frequently, this should be okay.
                 string dictFile = "ukrainian.dict";
-                using (var dictStream = type.FindAndGetManifestResourceStream(dictFile))
-                using (var metadataStream = type.FindAndGetManifestResourceStream(DictionaryMetadata.GetExpectedMetadataFileName(dictFile)))
-                    return Dictionary.Read(dictStream, metadataStream);
+                using var dictStream = type.FindAndGetManifestResourceStream(dictFile);
+                using var metadataStream = type.FindAndGetManifestResourceStream(DictionaryMetadata.GetExpectedMetadataFileName(dictFile));
+                return Dictionary.Read(dictStream, metadataStream);
             }
             catch (IOException e)
             {
diff --git a/src/Lucene.Net.Analysis.OpenNLP/OpenNLPChunkerFilterFactory.cs b/src/Lucene.Net.Analysis.OpenNLP/OpenNLPChunkerFilterFactory.cs
index 09c1b14..53d3750 100644
--- a/src/Lucene.Net.Analysis.OpenNLP/OpenNLPChunkerFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.OpenNLP/OpenNLPChunkerFilterFactory.cs
@@ -49,7 +49,7 @@ namespace Lucene.Net.Analysis.OpenNlp
             chunkerModelFile = Get(args, CHUNKER_MODEL);
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.OpenNLP/OpenNLPLemmatizerFilter.cs b/src/Lucene.Net.Analysis.OpenNLP/OpenNLPLemmatizerFilter.cs
index e981e25..a15b6f8 100644
--- a/src/Lucene.Net.Analysis.OpenNLP/OpenNLPLemmatizerFilter.cs
+++ b/src/Lucene.Net.Analysis.OpenNLP/OpenNLPLemmatizerFilter.cs
@@ -41,7 +41,7 @@ namespace Lucene.Net.Analysis.OpenNlp
         private readonly ITypeAttribute typeAtt;
         private readonly IKeywordAttribute keywordAtt;
         private readonly IFlagsAttribute flagsAtt;
-        private IList<AttributeSource> sentenceTokenAttrs = new List<AttributeSource>();
+        private readonly IList<AttributeSource> sentenceTokenAttrs = new List<AttributeSource>(); // LUCENENET: marked readonly
         private IEnumerator<AttributeSource> sentenceTokenAttrsIter = null;
         private bool moreTokensAvailable = true;
         private string[] sentenceTokens = null;     // non-keyword tokens
@@ -126,5 +126,29 @@ namespace Lucene.Net.Analysis.OpenNlp
             lemmas = null;
             lemmaNum = 0;
         }
+
+        /// <summary>
+        /// Releases resources used by the <see cref="OpenNLPLemmatizerFilter"/> and
+        /// if overridden in a derived class, optionally releases unmanaged resources.
+        /// </summary>
+        /// <param name="disposing"><c>true</c> to release both managed and unmanaged resources;
+        /// <c>false</c> to release only unmanaged resources.</param>
+
+        // LUCENENET specific
+        protected override void Dispose(bool disposing)
+        {
+            try
+            {
+                if (disposing)
+                {
+                    sentenceTokenAttrsIter?.Dispose();
+                    sentenceTokenAttrsIter = null;
+                }
+            }
+            finally
+            {
+                base.Dispose(disposing);
+            }
+        }
     }
 }
diff --git a/src/Lucene.Net.Analysis.OpenNLP/OpenNLPLemmatizerFilterFactory.cs b/src/Lucene.Net.Analysis.OpenNLP/OpenNLPLemmatizerFilterFactory.cs
index 9941561..5d4b697 100644
--- a/src/Lucene.Net.Analysis.OpenNLP/OpenNLPLemmatizerFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.OpenNLP/OpenNLPLemmatizerFilterFactory.cs
@@ -62,7 +62,7 @@ namespace Lucene.Net.Analysis.OpenNlp
 
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.OpenNLP/OpenNLPPOSFilter.cs b/src/Lucene.Net.Analysis.OpenNLP/OpenNLPPOSFilter.cs
index 42f8230..3e7e200 100644
--- a/src/Lucene.Net.Analysis.OpenNLP/OpenNLPPOSFilter.cs
+++ b/src/Lucene.Net.Analysis.OpenNLP/OpenNLPPOSFilter.cs
@@ -29,7 +29,7 @@ namespace Lucene.Net.Analysis.OpenNlp
     public sealed class OpenNLPPOSFilter : TokenFilter
     {
         private readonly IList<AttributeSource> sentenceTokenAttrs = new List<AttributeSource>();
-        string[] tags = null;
+        private string[] tags = null;
         private int tokenNum = 0;
         private bool moreTokensAvailable = true;
 
diff --git a/src/Lucene.Net.Analysis.OpenNLP/OpenNLPPOSFilterFactory.cs b/src/Lucene.Net.Analysis.OpenNLP/OpenNLPPOSFilterFactory.cs
index 5295668..5f112c2 100644
--- a/src/Lucene.Net.Analysis.OpenNLP/OpenNLPPOSFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.OpenNLP/OpenNLPPOSFilterFactory.cs
@@ -48,7 +48,7 @@ namespace Lucene.Net.Analysis.OpenNlp
             posTaggerModelFile = Require(args, POS_TAGGER_MODEL);
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.OpenNLP/OpenNLPSentenceBreakIterator.cs b/src/Lucene.Net.Analysis.OpenNLP/OpenNLPSentenceBreakIterator.cs
index 99c9b07..6e12869 100644
--- a/src/Lucene.Net.Analysis.OpenNLP/OpenNLPSentenceBreakIterator.cs
+++ b/src/Lucene.Net.Analysis.OpenNLP/OpenNLPSentenceBreakIterator.cs
@@ -35,7 +35,7 @@ namespace Lucene.Net.Analysis.OpenNlp
         private CharacterIterator text;
         private int currentSentence;
         private int[] sentenceStarts;
-        private NLPSentenceDetectorOp sentenceOp;
+        private readonly NLPSentenceDetectorOp sentenceOp; // LUCENENET: marked readonly
 
         public OpenNLPSentenceBreakIterator(NLPSentenceDetectorOp sentenceOp)
         {
@@ -256,9 +256,8 @@ namespace Lucene.Net.Analysis.OpenNlp
         private string CharacterIteratorToString()
         {
             string fullText;
-            if (text is CharArrayIterator)
+            if (text is CharArrayIterator charArrayIterator)
             {
-                CharArrayIterator charArrayIterator = (CharArrayIterator)text;
                 fullText = new string(charArrayIterator.Text, charArrayIterator.Start, charArrayIterator.Length);
             }
             else
diff --git a/src/Lucene.Net.Analysis.OpenNLP/OpenNLPTokenizer.cs b/src/Lucene.Net.Analysis.OpenNLP/OpenNLPTokenizer.cs
index 68c1b84..1eed080 100644
--- a/src/Lucene.Net.Analysis.OpenNLP/OpenNLPTokenizer.cs
+++ b/src/Lucene.Net.Analysis.OpenNLP/OpenNLPTokenizer.cs
@@ -42,7 +42,7 @@ namespace Lucene.Net.Analysis.OpenNlp
         private int termNum = 0;
         private int sentenceStart = 0;
 
-        private readonly NLPSentenceDetectorOp sentenceOp = null;
+        //private readonly NLPSentenceDetectorOp sentenceOp = null; // LUCENENET: Never read
         private readonly NLPTokenizerOp tokenizerOp = null;
 
         /// <summary>
@@ -59,7 +59,7 @@ namespace Lucene.Net.Analysis.OpenNlp
             {
                 throw new ArgumentException("OpenNLPTokenizer: both a Sentence Detector and a Tokenizer are required");
             }
-            this.sentenceOp = sentenceOp;
+            //this.sentenceOp = sentenceOp; // LUCENENET: Never read
             this.tokenizerOp = tokenizerOp;
             this.termAtt = AddAttribute<ICharTermAttribute>();
             this.flagsAtt = AddAttribute<IFlagsAttribute>();
diff --git a/src/Lucene.Net.Analysis.OpenNLP/OpenNLPTokenizerFactory.cs b/src/Lucene.Net.Analysis.OpenNLP/OpenNLPTokenizerFactory.cs
index 20afc8b..22ddd9b 100644
--- a/src/Lucene.Net.Analysis.OpenNLP/OpenNLPTokenizerFactory.cs
+++ b/src/Lucene.Net.Analysis.OpenNLP/OpenNLPTokenizerFactory.cs
@@ -51,7 +51,7 @@ namespace Lucene.Net.Analysis.OpenNlp
             tokenizerModelFile = Require(args, TOKENIZER_MODEL);
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.OpenNLP/Tools/NLPLemmatizerOp.cs b/src/Lucene.Net.Analysis.OpenNLP/Tools/NLPLemmatizerOp.cs
index 65a5509..f511296 100644
--- a/src/Lucene.Net.Analysis.OpenNLP/Tools/NLPLemmatizerOp.cs
+++ b/src/Lucene.Net.Analysis.OpenNLP/Tools/NLPLemmatizerOp.cs
@@ -45,7 +45,7 @@ namespace Lucene.Net.Analysis.OpenNlp.Tools
 
         public virtual string[] Lemmatize(string[] words, string[] postags)
         {
-            string[] lemmas = null;
+            string[] lemmas; // LUCENENET: IDE0059: Remove unnecessary value assignment
             string[] maxEntLemmas = null;
             if (dictionaryLemmatizer != null)
             {
diff --git a/src/Lucene.Net.Analysis.OpenNLP/Tools/OpenNLPOpsFactory.cs b/src/Lucene.Net.Analysis.OpenNLP/Tools/OpenNLPOpsFactory.cs
index e4b3dc5..7a4fda1 100644
--- a/src/Lucene.Net.Analysis.OpenNLP/Tools/OpenNLPOpsFactory.cs
+++ b/src/Lucene.Net.Analysis.OpenNLP/Tools/OpenNLPOpsFactory.cs
@@ -171,22 +171,20 @@ namespace Lucene.Net.Analysis.OpenNlp.Tools
         {
             if (!lemmaDictionaries.TryGetValue(dictionaryFile, out string dictionary) || dictionary == null)
             {
-                using (TextReader reader = new StreamReader(loader.OpenResource(dictionaryFile), Encoding.UTF8))
+                using TextReader reader = new StreamReader(loader.OpenResource(dictionaryFile), Encoding.UTF8);
+                StringBuilder builder = new StringBuilder();
+                char[] chars = new char[8092];
+                int numRead = 0;
+                do
                 {
-                    StringBuilder builder = new StringBuilder();
-                    char[] chars = new char[8092];
-                    int numRead = 0;
-                    do
+                    numRead = reader.Read(chars, 0, chars.Length);
+                    if (numRead > 0)
                     {
-                        numRead = reader.Read(chars, 0, chars.Length);
-                        if (numRead > 0)
-                        {
-                            builder.Append(chars, 0, numRead);
-                        }
-                    } while (numRead > 0);
-                    dictionary = builder.ToString();
-                    lemmaDictionaries[dictionaryFile] = dictionary;
-                }
+                        builder.Append(chars, 0, numRead);
+                    }
+                } while (numRead > 0);
+                dictionary = builder.ToString();
+                lemmaDictionaries[dictionaryFile] = dictionary;
             }
             return dictionary;
         }
diff --git a/src/Lucene.Net.Analysis.Phonetic/BeiderMorseFilterFactory.cs b/src/Lucene.Net.Analysis.Phonetic/BeiderMorseFilterFactory.cs
index d4331bb..e669c15 100644
--- a/src/Lucene.Net.Analysis.Phonetic/BeiderMorseFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Phonetic/BeiderMorseFilterFactory.cs
@@ -57,9 +57,9 @@ namespace Lucene.Net.Analysis.Phonetic
             // LanguageSet: defaults to automagic, otherwise a comma-separated list.
             ISet<string> langs = GetSet(args, "languageSet");
             languageSet = (null == langs || (1 == langs.Count && langs.Contains("auto"))) ? null : LanguageSet.From(langs);
-            if (!(args.Count == 0))
+            if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Phonetic/DoubleMetaphoneFilter.cs b/src/Lucene.Net.Analysis.Phonetic/DoubleMetaphoneFilter.cs
index 6065b01..b4e2592 100644
--- a/src/Lucene.Net.Analysis.Phonetic/DoubleMetaphoneFilter.cs
+++ b/src/Lucene.Net.Analysis.Phonetic/DoubleMetaphoneFilter.cs
@@ -96,7 +96,7 @@ namespace Lucene.Net.Analysis.Phonetic
                     if (saveState)
                     {
                         remainingTokens.Enqueue(CaptureState());
-                        saveState = false;
+                        //saveState = false; // LUCENENET: IDE0059: Remove unnecessary value assignment
                     }
                     posAtt.PositionIncrement = firstAlternativeIncrement;
                     termAtt.SetEmpty().Append(alternatePhoneticValue);
diff --git a/src/Lucene.Net.Analysis.Phonetic/DoubleMetaphoneFilterFactory.cs b/src/Lucene.Net.Analysis.Phonetic/DoubleMetaphoneFilterFactory.cs
index d70fd41..8f4d014 100644
--- a/src/Lucene.Net.Analysis.Phonetic/DoubleMetaphoneFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Phonetic/DoubleMetaphoneFilterFactory.cs
@@ -53,9 +53,9 @@ namespace Lucene.Net.Analysis.Phonetic
         {
             inject = GetBoolean(args, INJECT, true);
             maxCodeLength = GetInt32(args, MAX_CODE_LENGTH, DEFAULT_MAX_CODE_LENGTH);
-            if (!(args.Count == 0))
+            if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Phonetic/Language/AbstractCaverphone .cs b/src/Lucene.Net.Analysis.Phonetic/Language/AbstractCaverphone .cs
index 4510d6f..49ced5c 100644
--- a/src/Lucene.Net.Analysis.Phonetic/Language/AbstractCaverphone .cs	
+++ b/src/Lucene.Net.Analysis.Phonetic/Language/AbstractCaverphone .cs	
@@ -35,31 +35,12 @@ namespace Lucene.Net.Analysis.Phonetic.Language
         /// <summary>
         /// Creates an instance of the Caverphone encoder
         /// </summary>
-        public AbstractCaverphone()
+        protected AbstractCaverphone() // LUCENENET: CA1012: Abstract types should not have constructors (marked protected)
             : base()
         {
         }
 
-        // LUCENENET specific - in .NET we don't need an object overload, since strings are sealed anyway.
-        //    /**
-        //     * Encodes an Object using the caverphone algorithm. This method is provided in order to satisfy the requirements of
-        //     * the Encoder interface, and will throw an EncoderException if the supplied object is not of type java.lang.String.
-        //     *
-        //     * @param source
-        //     *            Object to encode
-        //     * @return An object (or type java.lang.String) containing the caverphone code which corresponds to the String
-        //     *         supplied.
-        //     * @throws EncoderException
-        //     *             if the parameter supplied is not of type java.lang.String
-        //     */
-        //    @Override
-        //public Object encode(final Object source) throws EncoderException
-        //    {
-        //    if (!(source instanceof String)) {
-        //            throw new EncoderException("Parameter supplied to Caverphone encode is not of type java.lang.String");
-        //        }
-        //    return this.encode((String) source);
-        //    }
+        // LUCENENET specific - in .NET we don't need an object overload of Encode(), since strings are sealed anyway.
 
         // LUCENENET specific - must provide implementation for IStringEncoder
         public abstract string Encode(string source);
diff --git a/src/Lucene.Net.Analysis.Phonetic/Language/Bm/Lang.cs b/src/Lucene.Net.Analysis.Phonetic/Language/Bm/Lang.cs
index 26cfe1c..201cd97 100644
--- a/src/Lucene.Net.Analysis.Phonetic/Language/Bm/Lang.cs
+++ b/src/Lucene.Net.Analysis.Phonetic/Language/Bm/Lang.cs
@@ -110,7 +110,7 @@ namespace Lucene.Net.Analysis.Phonetic.Language.Bm
         }
 
         // LUCENENET specific - need to load this first for LoadLangs() to work
-        private static readonly string LANGUAGE_RULES_RN = "lang.txt";
+        private const string LANGUAGE_RULES_RN = "lang.txt";
 
         private static readonly IDictionary<NameType, Lang> langs = LoadLangs();
 
@@ -131,8 +131,7 @@ namespace Lucene.Net.Analysis.Phonetic.Language.Bm
         /// <returns>A Lang encapsulating the language guessing rules for that name type.</returns>
         public static Lang GetInstance(NameType nameType)
         {
-            Lang result;
-            langs.TryGetValue(nameType, out result);
+            langs.TryGetValue(nameType, out Lang result);
             return result;
         }
 
diff --git a/src/Lucene.Net.Analysis.Phonetic/Language/Bm/Languages.cs b/src/Lucene.Net.Analysis.Phonetic/Language/Bm/Languages.cs
index 2b33ebb..63d7886 100644
--- a/src/Lucene.Net.Analysis.Phonetic/Language/Bm/Languages.cs
+++ b/src/Lucene.Net.Analysis.Phonetic/Language/Bm/Languages.cs
@@ -82,8 +82,7 @@ namespace Lucene.Net.Analysis.Phonetic.Language.Bm
 
         public static Languages GetInstance(NameType nameType)
         {
-            Languages result;
-            LANGUAGES.TryGetValue(nameType, out result);
+            LANGUAGES.TryGetValue(nameType, out Languages result);
             return result;
         }
 
diff --git a/src/Lucene.Net.Analysis.Phonetic/Language/Bm/PhoneticEngine.cs b/src/Lucene.Net.Analysis.Phonetic/Language/Bm/PhoneticEngine.cs
index 9602d80..fedc8de 100644
--- a/src/Lucene.Net.Analysis.Phonetic/Language/Bm/PhoneticEngine.cs
+++ b/src/Lucene.Net.Analysis.Phonetic/Language/Bm/PhoneticEngine.cs
@@ -73,8 +73,10 @@ namespace Lucene.Net.Analysis.Phonetic.Language.Bm
 
             private PhonemeBuilder(Phoneme phoneme)
             {
-                this.phonemes = new JCG.LinkedHashSet<Phoneme>();
-                this.phonemes.Add(phoneme);
+                this.phonemes = new JCG.LinkedHashSet<Phoneme>
+                {
+                    phoneme
+                };
             }
 
             internal PhonemeBuilder(ISet<Phoneme> phonemes)
@@ -212,11 +214,7 @@ namespace Lucene.Net.Analysis.Phonetic.Language.Bm
             public RulesApplication(IDictionary<string, IList<Rule>> finalRules, string input,
                                     PhonemeBuilder phonemeBuilder, int i, int maxPhonemes)
             {
-                if (finalRules == null)
-                {
-                    throw new ArgumentNullException("The finalRules argument must not be null");
-                }
-                this.finalRules = finalRules;
+                this.finalRules = finalRules ?? throw new ArgumentNullException(nameof(finalRules), "The finalRules argument must not be null");
                 this.phonemeBuilder = phonemeBuilder;
                 this.input = input;
                 this.i = i;
@@ -237,8 +235,7 @@ namespace Lucene.Net.Analysis.Phonetic.Language.Bm
             {
                 this.found = false;
                 int patternLength = 1;
-                IList<Rule> rules;
-                if (this.finalRules.TryGetValue(input.Substring(i, patternLength), out rules) && rules != null)
+                if (this.finalRules.TryGetValue(input.Substring(i, patternLength), out IList<Rule> rules) && rules != null)
                 {
                     foreach (Rule rule in rules)
                     {
@@ -269,13 +266,14 @@ namespace Lucene.Net.Analysis.Phonetic.Language.Bm
 
         private static IDictionary<NameType, ISet<string>> LoadNamePrefixes() // LUCENENET: Avoid static constructors (see https://github.com/apache/lucenenet/pull/224#issuecomment-469284006)
         {
-            var namePrefixes = new Dictionary<NameType, ISet<string>>();
-            namePrefixes[NameType.ASHKENAZI] = new JCG.HashSet<string>() { "bar", "ben", "da", "de", "van", "von" }.AsReadOnly();
-            namePrefixes[NameType.SEPHARDIC] = new JCG.HashSet<string>() { "al", "el", "da", "dal", "de", "del", "dela", "de la",
-                                                              "della", "des", "di", "do", "dos", "du", "van", "von" }.AsReadOnly();
-            namePrefixes[NameType.GENERIC] = new JCG.HashSet<string>() { "da", "dal", "de", "del", "dela", "de la", "della",
-                                                          "des", "di", "do", "dos", "du", "van", "von" }.AsReadOnly();
-            return namePrefixes;
+            return new Dictionary<NameType, ISet<string>>
+            {
+                [NameType.ASHKENAZI] = new JCG.HashSet<string>() { "bar", "ben", "da", "de", "van", "von" }.AsReadOnly(),
+                [NameType.SEPHARDIC] = new JCG.HashSet<string>() { "al", "el", "da", "dal", "de", "del", "dela", "de la",
+                                                              "della", "des", "di", "do", "dos", "du", "van", "von" }.AsReadOnly(),
+                [NameType.GENERIC] = new JCG.HashSet<string>() { "da", "dal", "de", "del", "dela", "de la", "della",
+                                                          "des", "di", "do", "dos", "du", "van", "von" }.AsReadOnly()
+            };
         }
 
         /// <summary>
diff --git a/src/Lucene.Net.Analysis.Phonetic/Language/Bm/ResourceConstants.cs b/src/Lucene.Net.Analysis.Phonetic/Language/Bm/ResourceConstants.cs
index c70d404..bff6704 100644
--- a/src/Lucene.Net.Analysis.Phonetic/Language/Bm/ResourceConstants.cs
+++ b/src/Lucene.Net.Analysis.Phonetic/Language/Bm/ResourceConstants.cs
@@ -29,9 +29,9 @@ namespace Lucene.Net.Analysis.Phonetic.Language.Bm
     /// </summary>
     internal class ResourceConstants
     {
-        public static readonly string CMT = "//";
+        public const string CMT = "//";
         public static readonly Encoding ENCODING = Encoding.UTF8;
-        public static readonly string EXT_CMT_END = "*/";
-        public static readonly string EXT_CMT_START = "/*";
+        public const string EXT_CMT_END = "*/";
+        public const string EXT_CMT_START = "/*";
     }
 }
diff --git a/src/Lucene.Net.Analysis.Phonetic/Language/Bm/Rule.cs b/src/Lucene.Net.Analysis.Phonetic/Language/Bm/Rule.cs
index b93d980..23eb7ec 100644
--- a/src/Lucene.Net.Analysis.Phonetic/Language/Bm/Rule.cs
+++ b/src/Lucene.Net.Analysis.Phonetic/Language/Bm/Rule.cs
@@ -158,7 +158,9 @@ namespace Lucene.Net.Analysis.Phonetic.Language.Bm
             return rules;
         }
 
+#pragma warning disable IDE0051 // Remove unused private members
         private static bool Contains(ICharSequence chars, char input)
+#pragma warning restore IDE0051 // Remove unused private members
         {
             for (int i = 0; i < chars.Length; i++)
             {
@@ -180,7 +182,9 @@ namespace Lucene.Net.Analysis.Phonetic.Language.Bm
             }
             return false;
         }
+#pragma warning disable IDE0051 // Remove unused private members
         private static bool Contains(StringBuilder chars, char input)
+#pragma warning restore IDE0051 // Remove unused private members
         {
             for (int i = 0; i < chars.Length; i++)
             {
@@ -331,13 +335,9 @@ namespace Lucene.Net.Analysis.Phonetic.Language.Bm
         public static IDictionary<string, IList<Rule>> GetInstanceMap(NameType nameType, RuleType rt,
                                                              string lang)
         {
-            IDictionary<RuleType, IDictionary<string, IDictionary<string, IList<Rule>>>> nameTypes;
-            IDictionary<string, IDictionary<string, IList<Rule>>> ruleTypes;
-            IDictionary<string, IList<Rule>> rules = null;
-
-            if (RULES.TryGetValue(nameType, out nameTypes) && nameTypes != null &&
-                nameTypes.TryGetValue(rt, out ruleTypes) && ruleTypes != null &&
-                ruleTypes.TryGetValue(lang, out rules) && rules != null)
+            if (RULES.TryGetValue(nameType, out var nameTypes) && nameTypes != null &&
+                nameTypes.TryGetValue(rt, out var ruleTypes) && ruleTypes != null &&
+                ruleTypes.TryGetValue(lang, out var rules) && rules != null)
             {
             }
             else
@@ -500,8 +500,7 @@ namespace Lucene.Net.Analysis.Phonetic.Language.Bm
                                         Rule r = new RuleAnonymousHelper(pat, lCon, rCon, ph, cLine, location);
 
                                         string patternKey = r.pattern.Substring(0, 1 - 0);
-                                        IList<Rule> rules;
-                                        if (!lines.TryGetValue(patternKey, out rules) || rules == null)
+                                        if (!lines.TryGetValue(patternKey, out IList<Rule> rules) || rules == null)
                                         {
                                             rules = new List<Rule>();
                                             lines[patternKey] = rules;
diff --git a/src/Lucene.Net.Analysis.Phonetic/Language/DaitchMokotoffSoundex.cs b/src/Lucene.Net.Analysis.Phonetic/Language/DaitchMokotoffSoundex.cs
index a4bb000..88c92a4 100644
--- a/src/Lucene.Net.Analysis.Phonetic/Language/DaitchMokotoffSoundex.cs
+++ b/src/Lucene.Net.Analysis.Phonetic/Language/DaitchMokotoffSoundex.cs
@@ -6,6 +6,7 @@ using Lucene.Net.Util;
 using System;
 using System.Collections.Generic;
 using System.IO;
+using System.Runtime.CompilerServices;
 using System.Text;
 using System.Text.RegularExpressions;
 
@@ -200,11 +201,13 @@ namespace Lucene.Net.Analysis.Phonetic.Language
                 return replacementDefault;
             }
 
-            private bool IsVowel(char ch)
+            [MethodImpl(MethodImplOptions.AggressiveInlining)]
+            private static bool IsVowel(char ch) // LUCENENET: CA1822: Mark members as static
             {
                 return ch == 'a' || ch == 'e' || ch == 'i' || ch == 'o' || ch == 'u';
             }
 
+            [MethodImpl(MethodImplOptions.AggressiveInlining)]
             public bool Matches(string context)
             {
                 return context.StartsWith(pattern, StringComparison.Ordinal);
@@ -240,6 +243,7 @@ namespace Lucene.Net.Analysis.Phonetic.Language
 
         private class DaitchMokotoffRuleComparer : IComparer<Rule>
         {
+            [MethodImpl(MethodImplOptions.AggressiveInlining)]
             public int Compare(Rule rule1, Rule rule2)
             {
                 return rule2.PatternLength - rule1.PatternLength;
@@ -275,7 +279,7 @@ namespace Lucene.Net.Analysis.Phonetic.Language
 
             string rawLine;
             while ((rawLine = scanner.ReadLine()) != null)
-            { 
+            {
                 currentLine++;
                 string line = rawLine;
 
@@ -352,8 +356,7 @@ namespace Lucene.Net.Analysis.Phonetic.Language
 
                                 Rule r = new Rule(pattern, replacement1, replacement2, replacement3);
                                 char patternKey = r.Pattern[0];
-                                IList<Rule> rules;
-                                if (!ruleMapping.TryGetValue(patternKey, out rules) || rules == null)
+                                if (!ruleMapping.TryGetValue(patternKey, out IList<Rule> rules) || rules == null)
                                 {
                                     rules = new List<Rule>();
                                     ruleMapping[patternKey] = rules;
@@ -437,34 +440,7 @@ namespace Lucene.Net.Analysis.Phonetic.Language
             return sb.ToString();
         }
 
-        // LUCENENET specific - in .NET we don't need an object overload, since strings are sealed anyway.
-        //**
-        // * Encodes an Object using the Daitch-Mokotoff soundex algorithm without branching.
-        // * <p>
-        // * This method is provided in order to satisfy the requirements of the Encoder interface, and will throw an
-        // * EncoderException if the supplied object is not of type java.lang.String.
-        // * </p>
-        // *
-        // * @see #soundex(String)
-        // *
-        // * @param obj
-        // *            Object to encode
-        // * @return An object (of type java.lang.String) containing the DM soundex code, which corresponds to the String
-        // *         supplied.
-        // * @throws EncoderException
-        // *             if the parameter supplied is not of type java.lang.String
-        // * @throws IllegalArgumentException
-        // *             if a character is not mapped
-        // */
-        //@Override
-        //    public Object encode(object obj) 
-        //{
-        //        if (!(obj instanceof String)) {
-        //        throw new EncoderException(
-        //                "Parameter supplied to DaitchMokotoffSoundex encode is not of type java.lang.String");
-        //    }
-        //        return encode((String) obj);
-        //}
+        // LUCENENET specific - in .NET we don't need an object overload of Encode(), since strings are sealed anyway.
 
         /// <summary>
         /// Encodes a string using the Daitch-Mokotoff soundex algorithm without branching.
@@ -473,6 +449,7 @@ namespace Lucene.Net.Analysis.Phonetic.Language
         /// <returns>A DM Soundex code corresponding to the string supplied.</returns>
         /// <exception cref="ArgumentException">If a character is not mapped.</exception>
         /// <seealso cref="GetSoundex(string)"/>
+        [MethodImpl(MethodImplOptions.AggressiveInlining)]
         public virtual string Encode(string source)
         {
             if (source == null)
@@ -532,8 +509,10 @@ namespace Lucene.Net.Analysis.Phonetic.Language
             string input = Cleanup(source);
 
             // LinkedHashSet preserves input order. In .NET we can use List for that purpose.
-            IList<Branch> currentBranches = new List<Branch>();
-            currentBranches.Add(new Branch());
+            IList<Branch> currentBranches = new List<Branch>
+            {
+                new Branch()
+            };
 
             char lastChar = '\0';
             for (int index = 0; index < input.Length; index++)
@@ -547,8 +526,7 @@ namespace Lucene.Net.Analysis.Phonetic.Language
                 }
 
                 string inputContext = input.Substring(index);
-                IList<Rule> rules;
-                if (!RULES.TryGetValue(ch, out rules) || rules == null)
+                if (!RULES.TryGetValue(ch, out IList<Rule> rules) || rules == null)
                 {
                     continue;
                 }
diff --git a/src/Lucene.Net.Analysis.Phonetic/Language/DoubleMetaphone.cs b/src/Lucene.Net.Analysis.Phonetic/Language/DoubleMetaphone.cs
index 73eb64f..17bd705 100644
--- a/src/Lucene.Net.Analysis.Phonetic/Language/DoubleMetaphone.cs
+++ b/src/Lucene.Net.Analysis.Phonetic/Language/DoubleMetaphone.cs
@@ -1,6 +1,7 @@
 // commons-codec version compatibility level: 1.9
 using System;
 using System.Globalization;
+using System.Runtime.CompilerServices;
 using System.Text;
 
 namespace Lucene.Net.Analysis.Phonetic.Language
@@ -40,7 +41,7 @@ namespace Lucene.Net.Analysis.Phonetic.Language
         /// <summary>
         /// "Vowels" to test for
         /// </summary>
-        private static readonly string VOWELS = "AEIOUY";
+        private const string VOWELS = "AEIOUY";
 
         /// <summary>
         /// Prefixes when present which are not pronounced
@@ -72,6 +73,7 @@ namespace Lucene.Net.Analysis.Phonetic.Language
         /// </summary>
         /// <param name="value">String to encode.</param>
         /// <returns>An encoded string.</returns>
+        [MethodImpl(MethodImplOptions.AggressiveInlining)]
         public virtual string GetDoubleMetaphone(string value)
         {
             return GetDoubleMetaphone(value, false);
@@ -194,29 +196,14 @@ namespace Lucene.Net.Analysis.Phonetic.Language
             return alternate ? result.Alternate : result.Primary;
         }
 
-        // LUCENENET specific - in .NET we don't need an object overload, since strings are sealed anyway.
-        //    /**
-        //     * Encode the value using DoubleMetaphone.  It will only work if
-        //     * <code>obj</code> is a <code>String</code> (like <code>Metaphone</code>).
-        //     *
-        //     * @param obj Object to encode (should be of type String)
-        //     * @return An encoded Object (will be of type String)
-        //     * @throws EncoderException encode parameter is not of type String
-        //     */
-
-        //public virtual object Encode(object obj) 
-        //    {
-        //    if (!(obj is String)) {
-        //            throw new EncoderException("DoubleMetaphone encode parameter is not of type String");
-        //        }
-        //    return GetDoubleMetaphone((String) obj);
-        //    }
+        // LUCENENET specific - in .NET we don't need an object overload of Encode(), since strings are sealed anyway.
 
         /// <summary>
         /// Encode the value using DoubleMetaphone.
         /// </summary>
         /// <param name="value">String to encode.</param>
         /// <returns>An encoded string.</returns>
+        [MethodImpl(MethodImplOptions.AggressiveInlining)]
         public virtual string Encode(string value)
         {
             return GetDoubleMetaphone(value);
@@ -229,6 +216,7 @@ namespace Lucene.Net.Analysis.Phonetic.Language
         /// <param name="value1">The left-hand side of the encoded <see cref="string.Equals(object)"/>.</param>
         /// <param name="value2">The right-hand side of the encoded <see cref="string.Equals(object)"/>.</param>
         /// <returns><c>true</c> if the encoded <see cref="string"/>s are equal; <c>false</c> otherwise.</returns>
+        [MethodImpl(MethodImplOptions.AggressiveInlining)]
         public virtual bool IsDoubleMetaphoneEqual(string value1, string value2)
         {
             return IsDoubleMetaphoneEqual(value1, value2, false);
@@ -242,6 +230,7 @@ namespace Lucene.Net.Analysis.Phonetic.Language
         /// <param name="value2">The right-hand side of the encoded <see cref="string.Equals(object)"/>.</param>
         /// <param name="alternate">Use the alternate value if <c>true</c>.</param>
         /// <returns><c>true</c> if the encoded <see cref="string"/>s are equal; <c>false</c> otherwise.</returns>
+        [MethodImpl(MethodImplOptions.AggressiveInlining)]
         public virtual bool IsDoubleMetaphoneEqual(string value1, string value2, bool alternate)
         {
             return GetDoubleMetaphone(value1, alternate).Equals(GetDoubleMetaphone(value2, alternate), StringComparison.Ordinal);
@@ -261,6 +250,7 @@ namespace Lucene.Net.Analysis.Phonetic.Language
         /// <summary>
         /// Handles 'A', 'E', 'I', 'O', 'U', and 'Y' cases.
         /// </summary>
+        [MethodImpl(MethodImplOptions.AggressiveInlining)]
         private int HandleAEIOUY(DoubleMetaphoneResult result, int index)
         {
             if (index == 0)
@@ -382,7 +372,7 @@ namespace Lucene.Net.Analysis.Phonetic.Language
         /// <summary>
         /// Handles 'CH' cases.
         /// </summary>
-        private int HandleCH(string value, DoubleMetaphoneResult result, int index)
+        private static int HandleCH(string value, DoubleMetaphoneResult result, int index) // LUCENENET: CA1822: Mark members as static
         {
             if (index > 0 && Contains(value, index, 4, "CHAE"))
             {   // Michael
@@ -1011,7 +1001,7 @@ namespace Lucene.Net.Analysis.Phonetic.Language
         /// <summary>
         /// Complex condition 0 for 'CH'.
         /// </summary>
-        private bool ConditionCH0(string value, int index)
+        private static bool ConditionCH0(string value, int index) // LUCENENET: CA1822: Mark members as static
         {
             if (index != 0)
             {
@@ -1035,7 +1025,7 @@ namespace Lucene.Net.Analysis.Phonetic.Language
         /// <summary>
         /// Complex condition 1 for 'CH'.
         /// </summary>
-        private bool ConditionCH1(string value, int index)
+        private static bool ConditionCH1(string value, int index) // LUCENENET: CA1822: Mark members as static
         {
             return ((Contains(value, 0, 4, "VAN ", "VON ") || Contains(value, 0, 3, "SCH")) ||
                     Contains(value, index - 2, 6, "ORCHES", "ARCHIT", "ORCHID") ||
@@ -1047,7 +1037,7 @@ namespace Lucene.Net.Analysis.Phonetic.Language
         /// <summary>
         /// Complex condition 0 for 'L'.
         /// </summary>
-        private bool ConditionL0(string value, int index)
+        private static bool ConditionL0(string value, int index) // LUCENENET: CA1822: Mark members as static
         {
             if (index == value.Length - 3 &&
                 Contains(value, index - 1, 4, "ILLO", "ILLA", "ALLE"))
@@ -1085,6 +1075,7 @@ namespace Lucene.Net.Analysis.Phonetic.Language
         /// Determines whether or not a value is of slavo-germanic origin. A value is
         /// of slavo-germanic origin if it contians any of 'W', 'K', 'CZ', or 'WITZ'.
         /// </summary>
+        [MethodImpl(MethodImplOptions.AggressiveInlining)]
         private bool IsSlavoGermanic(string value)
         {
             return value.IndexOf('W') > -1 || value.IndexOf('K') > -1 ||
@@ -1094,7 +1085,8 @@ namespace Lucene.Net.Analysis.Phonetic.Language
         /// <summary>
         /// Determines whether or not a character is a vowel or not
         /// </summary>
-        private bool IsVowel(char ch)
+        [MethodImpl(MethodImplOptions.AggressiveInlining)]
+        private static bool IsVowel(char ch) // LUCENENET: CA1822: Mark members as static
         {
             return VOWELS.IndexOf(ch) != -1;
         }
@@ -1193,18 +1185,21 @@ namespace Lucene.Net.Analysis.Phonetic.Language
                 this.alternate = new StringBuilder(maxLength);
             }
 
+            [MethodImpl(MethodImplOptions.AggressiveInlining)]
             public virtual void Append(char value)
             {
                 AppendPrimary(value);
                 AppendAlternate(value);
             }
 
+            [MethodImpl(MethodImplOptions.AggressiveInlining)]
             public virtual void Append(char primary, char alternate)
             {
                 AppendPrimary(primary);
                 AppendAlternate(alternate);
             }
 
+            [MethodImpl(MethodImplOptions.AggressiveInlining)]
             public virtual void AppendPrimary(char value)
             {
                 if (this.primary.Length < this.maxLength)
@@ -1213,6 +1208,7 @@ namespace Lucene.Net.Analysis.Phonetic.Language
                 }
             }
 
+            [MethodImpl(MethodImplOptions.AggressiveInlining)]
             public virtual void AppendAlternate(char value)
             {
                 if (this.alternate.Length < this.maxLength)
@@ -1221,12 +1217,14 @@ namespace Lucene.Net.Analysis.Phonetic.Language
                 }
             }
 
+            [MethodImpl(MethodImplOptions.AggressiveInlining)]
             public virtual void Append(string value)
             {
                 AppendPrimary(value);
                 AppendAlternate(value);
             }
 
+            [MethodImpl(MethodImplOptions.AggressiveInlining)]
             public virtual void Append(string primary, string alternate)
             {
                 AppendPrimary(primary);
diff --git a/src/Lucene.Net.Analysis.Phonetic/Language/MatchRatingApproachEncoder.cs b/src/Lucene.Net.Analysis.Phonetic/Language/MatchRatingApproachEncoder.cs
index 4cc3b34..ce3ab00 100644
--- a/src/Lucene.Net.Analysis.Phonetic/Language/MatchRatingApproachEncoder.cs
+++ b/src/Lucene.Net.Analysis.Phonetic/Language/MatchRatingApproachEncoder.cs
@@ -98,26 +98,7 @@ namespace Lucene.Net.Analysis.Phonetic.Language
             return upperName;
         }
 
-        // LUCENENET specific - in .NET we don't need an object overload, since strings are sealed anyway.
-        // **
-        // * Encodes an Object using the Match Rating Approach algorithm. Method is here to satisfy the requirements of the
-        // * Encoder interface Throws an EncoderException if input object is not of type java.lang.string.
-        // *
-        // * @param pObject
-        // *            Object to encode
-        // * @return An object (or type java.lang.string) containing the Match Rating Approach code which corresponds to the
-        // *         string supplied.
-        // * @throws EncoderException
-        // *             if the parameter supplied is not of type java.lang.string
-        // */
-        //public Object encode(Object pObject) throws EncoderException
-        //{
-        //if (!(pObject instanceof string)) {
-        //        throw new EncoderException(
-        //                "Parameter supplied to Match Rating Approach encoder is not of type java.lang.string");
-        //    }
-        //return encode((string) pObject);
-        //}
+        // LUCENENET specific - in .NET we don't need an object overload of Encode(), since strings are sealed anyway.
 
         /// <summary>
         /// Encodes a string using the Match Rating Approach (MRA) algorithm.
@@ -178,7 +159,7 @@ namespace Lucene.Net.Analysis.Phonetic.Language
         /// <returns>The min rating value.</returns>
         internal int GetMinRating(int sumLength)
         {
-            int minRating = 0;
+            int minRating; // LUCENENET: IDE0059: Remove unnecessary value assignment
 
             if (sumLength <= FOUR)
             {
@@ -259,7 +240,7 @@ namespace Lucene.Net.Analysis.Phonetic.Language
             // 5. Obtain the minimum rating value by calculating the length sum of the
             // encoded strings and sending it down.
             int sumLength = Math.Abs(name1.Length + name2.Length);
-            int minRating = 0;
+            int minRating; // LUCENENET: IDE0059: Remove unnecessary value assignment
             minRating = GetMinRating(sumLength);
 
             // 6. Process the encoded strings from left to right and remove any
@@ -277,7 +258,7 @@ namespace Lucene.Net.Analysis.Phonetic.Language
         /// </summary>
         /// <param name="letter">The letter under investiagtion.</param>
         /// <returns><c>true</c> if a vowel, else <c>false</c>.</returns>
-        internal bool IsVowel(string letter)
+        internal static bool IsVowel(string letter) // LUCENENET: CA1822: Mark members as static
         {
             return letter.Equals("E", StringComparison.OrdinalIgnoreCase) || letter.Equals("A", StringComparison.OrdinalIgnoreCase) || letter.Equals("O", StringComparison.OrdinalIgnoreCase) ||
                    letter.Equals("I", StringComparison.OrdinalIgnoreCase) || letter.Equals("U", StringComparison.OrdinalIgnoreCase);
@@ -298,11 +279,11 @@ namespace Lucene.Net.Analysis.Phonetic.Language
             int name1Size = name1.Length - 1;
             int name2Size = name2.Length - 1;
 
-            string name1LtRStart = EMPTY;
-            string name1LtREnd = EMPTY;
+            string name1LtRStart/* = EMPTY*/; // LUCENENET: IDE0059: Remove unnecessary value assignment
+            string name1LtREnd/* = EMPTY*/; // LUCENENET: IDE0059: Remove unnecessary value assignment
 
-            string name2RtLStart = EMPTY;
-            string name2RtLEnd = EMPTY;
+            string name2RtLStart/* = EMPTY*/; // LUCENENET: IDE0059: Remove unnecessary value assignment
+            string name2RtLEnd/* = EMPTY*/; // LUCENENET: IDE0059: Remove unnecessary value assignment
 
             for (int i = 0; i < name1Char.Length; i++)
             {
@@ -353,7 +334,7 @@ namespace Lucene.Net.Analysis.Phonetic.Language
         /// </summary>
         /// <param name="accentedWord">The word that may have accents in it.</param>
         /// <returns>De-accented word.</returns>
-        internal string RemoveAccents(string accentedWord)
+        internal static string RemoveAccents(string accentedWord) // LUCENENET: CA1822: Mark members as static
         {
             if (accentedWord == null)
             {
diff --git a/src/Lucene.Net.Analysis.Phonetic/Language/Metaphone.cs b/src/Lucene.Net.Analysis.Phonetic/Language/Metaphone.cs
index 5487368..c10b992 100644
--- a/src/Lucene.Net.Analysis.Phonetic/Language/Metaphone.cs
+++ b/src/Lucene.Net.Analysis.Phonetic/Language/Metaphone.cs
@@ -53,17 +53,17 @@ namespace Lucene.Net.Analysis.Phonetic.Language
         /// <summary>
         /// Five values in the English language
         /// </summary>
-        private static readonly string VOWELS = "AEIOU";
+        private const string VOWELS = "AEIOU";
 
         /// <summary>
         /// Variable used in Metaphone algorithm
         /// </summary>
-        private static readonly string FRONTV = "EIY";
+        private const string FRONTV = "EIY";
 
         /// <summary>
         /// Variable used in Metaphone algorithm
         /// </summary>
-        private static readonly string VARSON = "CSPTG";
+        private const string VARSON = "CSPTG";
 
         /// <summary>
         /// The max code length for metaphone is 4
@@ -91,7 +91,7 @@ namespace Lucene.Net.Analysis.Phonetic.Language
         /// <returns>A metaphone code corresponding to the string supplied.</returns>
         public virtual string GetMetaphone(string txt)
         {
-            bool hard = false;
+            bool hard; // LUCENENET: IDE0059: Remove unnecessary value assignment
             if (txt == null || txt.Length == 0)
             {
                 return "";
@@ -442,27 +442,7 @@ namespace Lucene.Net.Analysis.Phonetic.Language
             return n + 1 == wdsz;
         }
 
-        // LUCENENET specific - in .NET we don't need an object overload, since strings are sealed anyway.
-        //    /**
-        //     * Encodes an Object using the metaphone algorithm.  This method
-        //     * is provided in order to satisfy the requirements of the
-        //     * Encoder interface, and will throw an EncoderException if the
-        //     * supplied object is not of type java.lang.String.
-        //     *
-        //     * @param obj Object to encode
-        //     * @return An object (or type java.lang.String) containing the
-        //     *         metaphone code which corresponds to the String supplied.
-        //     * @throws EncoderException if the parameter supplied is not
-        //     *                          of type java.lang.String
-        //     */
-        //    @Override
-        //public object encode(object obj) 
-        //    {
-        //    if (!(obj is String)) {
-        //            throw new EncoderException("Parameter supplied to Metaphone encode is not of type java.lang.String");
-        //        }
-        //    return GetMetaphone((String) obj);
-        //    }
+        // LUCENENET specific - in .NET we don't need an object overload of Encode(), since strings are sealed anyway.
 
         /// <summary>
         /// Encodes a string using the <see cref="Metaphone"/> algorithm.
diff --git a/src/Lucene.Net.Analysis.Phonetic/Language/Nysiis.cs b/src/Lucene.Net.Analysis.Phonetic/Language/Nysiis.cs
index d2b713e..a09a70a 100644
--- a/src/Lucene.Net.Analysis.Phonetic/Language/Nysiis.cs
+++ b/src/Lucene.Net.Analysis.Phonetic/Language/Nysiis.cs
@@ -119,8 +119,8 @@ namespace Lucene.Net.Analysis.Phonetic.Language
         private static readonly Regex PAT_EE_IE = new Regex("(EE|IE)$", RegexOptions.Compiled);
         private static readonly Regex PAT_DT_ETC = new Regex("(DT|RT|RD|NT|ND)$", RegexOptions.Compiled);
 
-        private static readonly char SPACE = ' ';
-        private static readonly int TRUE_LENGTH = 6;
+        private const char SPACE = ' ';
+        private const int TRUE_LENGTH = 6;
 
         /// <summary>
         /// Tests if the given character is a vowel.
@@ -234,28 +234,7 @@ namespace Lucene.Net.Analysis.Phonetic.Language
             this.strict = strict;
         }
 
-        // LUCENENET specific - in .NET we don't need an object overload, since strings are sealed anyway.
-        //    /**
-        //     * Encodes an Object using the NYSIIS algorithm. This method is provided in order to satisfy the requirements of the
-        //     * Encoder interface, and will throw an <see cref="EncoderException"/> if the supplied object is not of type
-        //     * <see cref="string"/>.
-        //     *
-        //     * @param obj
-        //     *            Object to encode
-        //     * @return An object (or a <see cref="string"/>) containing the NYSIIS code which corresponds to the given String.
-        //     * @throws EncoderException
-        //     *            if the parameter supplied is not of a <see cref="string"/>
-        //     * @throws IllegalArgumentException
-        //     *            if a character is not mapped
-        //     */
-        //    @Override
-        //public object Encode(object obj) 
-        //    {
-        //    if (!(obj is String)) {
-        //            throw new EncoderException("Parameter supplied to Nysiis encode is not of type java.lang.String");
-        //        }
-        //    return this.nysiis((String) obj);
-        //    }
+        // LUCENENET specific - in .NET we don't need an object overload of Encode(), since strings are sealed anyway.
 
         /// <summary>
         /// Encodes a string using the NYSIIS algorithm.
diff --git a/src/Lucene.Net.Analysis.Phonetic/Language/RefinedSoundex.cs b/src/Lucene.Net.Analysis.Phonetic/Language/RefinedSoundex.cs
index e0f9071..bf6383b 100644
--- a/src/Lucene.Net.Analysis.Phonetic/Language/RefinedSoundex.cs
+++ b/src/Lucene.Net.Analysis.Phonetic/Language/RefinedSoundex.cs
@@ -107,28 +107,7 @@ namespace Lucene.Net.Analysis.Phonetic.Language
             return SoundexUtils.Difference(this, s1, s2);
         }
 
-        // LUCENENET specific - in .NET we don't need an object overload, since strings are sealed anyway.
-        //    /**
-        //     * Encodes an Object using the refined soundex algorithm. This method is
-        //     * provided in order to satisfy the requirements of the Encoder interface,
-        //     * and will throw an EncoderException if the supplied object is not of type
-        //     * java.lang.String.
-        //     *
-        //     * @param obj
-        //     *                  Object to encode
-        //     * @return An object (or type java.lang.String) containing the refined
-        //     *             soundex code which corresponds to the String supplied.
-        //     * @throws EncoderException
-        //     *                  if the parameter supplied is not of type java.lang.String
-        //     */
-        //    @Override
-        //public virtual object Encode(object obj) 
-        //    {
-        //    if (!(obj is String)) {
-        //            throw new EncoderException("Parameter supplied to RefinedSoundex encode is not of type java.lang.String");
-        //        }
-        //    return soundex((String) obj);
-        //    }
+        // LUCENENET specific - in .NET we don't need an object overload of Encode(), since strings are sealed anyway.
 
         /// <summary>
         /// Encodes a string using the refined soundex algorithm.
diff --git a/src/Lucene.Net.Analysis.Phonetic/Language/Soundex.cs b/src/Lucene.Net.Analysis.Phonetic/Language/Soundex.cs
index aaea825..570c21c 100644
--- a/src/Lucene.Net.Analysis.Phonetic/Language/Soundex.cs
+++ b/src/Lucene.Net.Analysis.Phonetic/Language/Soundex.cs
@@ -213,27 +213,7 @@ namespace Lucene.Net.Analysis.Phonetic.Language
             return SoundexUtils.Difference(this, s1, s2);
         }
 
-        // LUCENENET specific - in .NET we don't need an object overload, since strings are sealed anyway.
-        //    /**
-        //     * Encodes an Object using the soundex algorithm. This method is provided in order to satisfy the requirements of
-        //     * the Encoder interface, and will throw an EncoderException if the supplied object is not of type java.lang.String.
-        //     *
-        //     * @param obj
-        //     *                  Object to encode
-        //     * @return An object (or type java.lang.String) containing the soundex code which corresponds to the String
-        //     *             supplied.
-        //     * @throws EncoderException
-        //     *                  if the parameter supplied is not of type java.lang.String
-        //     * @throws IllegalArgumentException
-        //     *                  if a character is not mapped
-        //     */
-        //public virtual Object encode(object obj) 
-        //    {
-        //    if (!(obj is string)) {
-        //            throw new EncoderException("Parameter supplied to Soundex encode is not of type java.lang.String");
-        //        }
-        //    return soundex((string) obj);
-        //    }
+        // LUCENENET specific - in .NET we don't need an object overload of Encode(), since strings are sealed anyway.
 
         /// <summary>
         /// Encodes a string using the soundex algorithm.
diff --git a/src/Lucene.Net.Analysis.Phonetic/PhoneticFilter.cs b/src/Lucene.Net.Analysis.Phonetic/PhoneticFilter.cs
index bffc0e5..a6664f3 100644
--- a/src/Lucene.Net.Analysis.Phonetic/PhoneticFilter.cs
+++ b/src/Lucene.Net.Analysis.Phonetic/PhoneticFilter.cs
@@ -29,9 +29,9 @@ namespace Lucene.Net.Analysis.Phonetic
     public sealed class PhoneticFilter : TokenFilter
     {
         /// <summary>true if encoded tokens should be added as synonyms</summary>
-        private bool inject = true;
+        private readonly bool inject = true; // LUCENENET: marked readonly
         /// <summary>phonetic encoder</summary>
-        private IStringEncoder encoder = null;
+        private readonly IStringEncoder encoder = null; // LUCENENET: marked readonly
         /// <summary>captured state, non-null when <c>inject=true</c> and a token is buffered</summary>
         private State save = null;
         private readonly ICharTermAttribute termAtt;
diff --git a/src/Lucene.Net.Analysis.Phonetic/PhoneticFilterFactory.cs b/src/Lucene.Net.Analysis.Phonetic/PhoneticFilterFactory.cs
index c4c8880..529e315 100644
--- a/src/Lucene.Net.Analysis.Phonetic/PhoneticFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Phonetic/PhoneticFilterFactory.cs
@@ -69,12 +69,12 @@ namespace Lucene.Net.Analysis.Phonetic
     public class PhoneticFilterFactory : TokenFilterFactory, IResourceLoaderAware
     {
         /// <summary>parameter name: either a short name or a full class name</summary>
-        public static readonly string ENCODER = "encoder";
+        public const string ENCODER = "encoder";
         /// <summary>parameter name: true if encoded tokens should be added as synonyms</summary>
-        public static readonly string INJECT = "inject"; // boolean
+        public const string INJECT = "inject"; // boolean
                                                          /** parameter name: restricts the length of the phonetic code */
-        public static readonly string MAX_CODE_LENGTH = "maxCodeLength";
-        private static readonly string PACKAGE_CONTAINING_ENCODERS = "Lucene.Net.Analysis.Phonetic.Language.";
+        public const string MAX_CODE_LENGTH = "maxCodeLength";
+        private const string PACKAGE_CONTAINING_ENCODERS = "Lucene.Net.Analysis.Phonetic.Language.";
 
         //Effectively constants; uppercase keys
         private static readonly IDictionary<string, Type> registry = new Dictionary<string, Type> // LUCENENET: Avoid static constructors (see https://github.com/apache/lucenenet/pull/224#issuecomment-469284006)
@@ -108,9 +108,9 @@ namespace Lucene.Net.Analysis.Phonetic
             {
                 maxCodeLength = null;
             }
-            if (!(args.Count == 0))
+            if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.SmartCn/AnalyzerProfile.cs b/src/Lucene.Net.Analysis.SmartCn/AnalyzerProfile.cs
index ae18014..a3a9456 100644
--- a/src/Lucene.Net.Analysis.SmartCn/AnalyzerProfile.cs
+++ b/src/Lucene.Net.Analysis.SmartCn/AnalyzerProfile.cs
@@ -39,7 +39,7 @@ namespace Lucene.Net.Analysis.Cn.Smart
     /// <para/>
     /// @lucene.experimental
     /// </summary>
-    public class AnalyzerProfile
+    public static class AnalyzerProfile // LUCENENET specific: CA1052 Static holder types should be Static or NotInheritable
     {
         /// <summary>
         /// Global indicating the configured analysis data directory
@@ -80,28 +80,6 @@ namespace Lucene.Net.Analysis.Cn.Smart
             string currentPath = System.AppContext.BaseDirectory;
 #endif
 
-            //FileInfo[] cadidateFiles = new FileInfo[] { new FileInfo(currentPath + "/" + dirName),
-            //    new FileInfo(currentPath + "/bin/" + dirName)/*, new FileInfo("./" + propName),
-            //    new FileInfo("./lib/" + propName)*/ };
-            //for (int i = 0; i < cadidateFiles.Length; i++)
-            //{
-            //    FileInfo file = cadidateFiles[i];
-            //    if (file.Exists)
-            //    {
-            //        ANALYSIS_DATA_DIR = file.FullName;
-
-            //        //if (file.isDirectory())
-            //        //{
-            //        //    ANALYSIS_DATA_DIR = file.getAbsolutePath();
-            //        //}
-            //        //else if (file.isFile() && GetAnalysisDataDir(file).Length != 0)
-            //        //{
-            //        //    ANALYSIS_DATA_DIR = GetAnalysisDataDir(file);
-            //        //}
-            //        break;
-            //    }
-            //}
-
             string candidatePath = System.IO.Path.Combine(currentPath, dirName);
             if (Directory.Exists(candidatePath))
             {
@@ -127,45 +105,6 @@ namespace Lucene.Net.Analysis.Cn.Smart
             {
                 // ignore security errors
             }
-
-
-            //for (int i = 0; i < cadidateDirectories.Count; i++)
-            //{
-            //    DirectoryInfo dir = cadidateDirectories[i];
-            //    if (dir.Exists)
-            //    {
-            //        ANALYSIS_DATA_DIR = dir.FullName;
-            //        break;
-            //    }
-            //}
-
-            //if (ANALYSIS_DATA_DIR.Length == 0)
-            //{
-            //    // Dictionary directory cannot be found.
-            //    throw new Exception("WARNING: Can not find lexical dictionary directory!"
-            //     + " This will cause unpredictable exceptions in your application!"
-            //     + " Please refer to the manual to download the dictionaries.");
-            //}
-
         }
-
-        //private static string GetAnalysisDataDir(FileInfo propFile)
-        //{
-        //    Properties prop = new Properties();
-        //    try
-        //    {
-        //        string dir;
-        //        using (FileStream input = new FileStream(propFile.FullName, FileMode.Open, FileAccess.Read))
-        //        {
-        //            prop.load(new StreamReader(input, Encoding.UTF8));
-        //            dir = prop.getProperty("analysis.data.dir", "");
-        //        }
-        //        return dir;
-        //    }
-        //    catch (IOException e)
-        //    {
-        //        return "";
-        //    }
-        //}
     }
 }
diff --git a/src/Lucene.Net.Analysis.SmartCn/HMMChineseTokenizer.cs b/src/Lucene.Net.Analysis.SmartCn/HMMChineseTokenizer.cs
index 88cc069..8485fe2 100644
--- a/src/Lucene.Net.Analysis.SmartCn/HMMChineseTokenizer.cs
+++ b/src/Lucene.Net.Analysis.SmartCn/HMMChineseTokenizer.cs
@@ -89,7 +89,32 @@ namespace Lucene.Net.Analysis.Cn.Smart
         public override void Reset()
         {
             base.Reset();
+            tokens?.Dispose(); // LUCENENET specific: Dispose tokens before letting it go out of scope
             tokens = null;
         }
+
+        /// <summary>
+        /// Releases resources used by the <see cref="HMMChineseTokenizer"/> and
+        /// if overridden in a derived class, optionally releases unmanaged resources.
+        /// </summary>
+        /// <param name="disposing"><c>true</c> to release both managed and unmanaged resources;
+        /// <c>false</c> to release only unmanaged resources.</param>
+
+        // LUCENENET specific
+        protected override void Dispose(bool disposing)
+        {
+            try
+            {
+                if (disposing)
+                {
+                    tokens?.Dispose(); // LUCENENET specific - dispose tokens and set to null
+                    tokens = null;
+                }
+            }
+            finally
+            {
+                base.Dispose(disposing);
+            }
+        }
     }
 }
diff --git a/src/Lucene.Net.Analysis.SmartCn/HMMChineseTokenizerFactory.cs b/src/Lucene.Net.Analysis.SmartCn/HMMChineseTokenizerFactory.cs
index 32e9885..abf93da 100644
--- a/src/Lucene.Net.Analysis.SmartCn/HMMChineseTokenizerFactory.cs
+++ b/src/Lucene.Net.Analysis.SmartCn/HMMChineseTokenizerFactory.cs
@@ -44,7 +44,7 @@ namespace Lucene.Net.Analysis.Cn.Smart
         {
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.SmartCn/Hhmm/AbstractDictionary.cs b/src/Lucene.Net.Analysis.SmartCn/Hhmm/AbstractDictionary.cs
index 83b4614..67e9a84 100644
--- a/src/Lucene.Net.Analysis.SmartCn/Hhmm/AbstractDictionary.cs
+++ b/src/Lucene.Net.Analysis.SmartCn/Hhmm/AbstractDictionary.cs
@@ -36,18 +36,18 @@ namespace Lucene.Net.Analysis.Cn.Smart.Hhmm
         /// First Chinese Character in GB2312 (15 * 94)
         /// Characters in GB2312 are arranged in a grid of 94 * 94, 0-14 are unassigned or punctuation.
         /// </summary>
-        public static readonly int GB2312_FIRST_CHAR = 1410;
+        public const int GB2312_FIRST_CHAR = 1410;
 
         /// <summary>
         /// Last Chinese Character in GB2312 (87 * 94). 
         /// Characters in GB2312 are arranged in a grid of 94 * 94, 88-94 are unassigned.
         /// </summary>
-        public static readonly int GB2312_CHAR_NUM = 87 * 94;
+        public const int GB2312_CHAR_NUM = 87 * 94;
 
         /// <summary>
         /// Dictionary data contains 6768 Chinese characters with frequency statistics.
         /// </summary>
-        public static readonly int CHAR_NUM_IN_FILE = 6768;
+        public const int CHAR_NUM_IN_FILE = 6768;
 
         // =====================================================
         // code +0 +1 +2 +3 +4 +5 +6 +7 +8 +9 +A +B +C +D +E +F
diff --git a/src/Lucene.Net.Analysis.SmartCn/Hhmm/BiSegGraph.cs b/src/Lucene.Net.Analysis.SmartCn/Hhmm/BiSegGraph.cs
index 9861a31..fd01294 100644
--- a/src/Lucene.Net.Analysis.SmartCn/Hhmm/BiSegGraph.cs
+++ b/src/Lucene.Net.Analysis.SmartCn/Hhmm/BiSegGraph.cs
@@ -32,11 +32,11 @@ namespace Lucene.Net.Analysis.Cn.Smart.Hhmm
     /// </summary>
     internal class BiSegGraph
     {
-        private IDictionary<int, IList<SegTokenPair>> tokenPairListTable = new Dictionary<int, IList<SegTokenPair>>();
+        private readonly IDictionary<int, IList<SegTokenPair>> tokenPairListTable = new Dictionary<int, IList<SegTokenPair>>(); // LUCENENET: marked readonly
 
         private IList<SegToken> segTokenList;
 
-        private static BigramDictionary bigramDict = BigramDictionary.GetInstance();
+        private static readonly BigramDictionary bigramDict = BigramDictionary.GetInstance(); // LUCENENET: marked readonly
 
         public BiSegGraph(SegGraph segGraph)
         {
@@ -50,7 +50,7 @@ namespace Lucene.Net.Analysis.Cn.Smart.Hhmm
         private void GenerateBiSegGraph(SegGraph segGraph)
         {
             double smooth = 0.1;
-            int wordPairFreq = 0;
+            int wordPairFreq; // LUCENENET: IDE0059: Remove unnecessary value assignment
             int maxStart = segGraph.MaxStart;
             double oneWordFreq, weight, tinyDouble = 1.0 / Utility.MAX_FREQUENCE;
 
@@ -60,7 +60,7 @@ namespace Lucene.Net.Analysis.Cn.Smart.Hhmm
             segTokenList = segGraph.MakeIndex();
             // Because the beginning position of startToken is -1, therefore startToken can be obtained when key = -1
             int key = -1;
-            IList<SegToken> nextTokens = null;
+            IList<SegToken> nextTokens; // LUCENENET: IDE0059: Remove unnecessary value assignment
             while (key < maxStart)
             {
                 if (segGraph.IsStartExist(key))
@@ -140,8 +140,7 @@ namespace Lucene.Net.Analysis.Cn.Smart.Hhmm
         /// <returns><see cref="T:IList{SegTokenPair}"/> of token pairs. </returns>
         public virtual IList<SegTokenPair> GetToList(int to)
         {
-            IList<SegTokenPair> result;
-            tokenPairListTable.TryGetValue(to, out result);
+            tokenPairListTable.TryGetValue(to, out IList<SegTokenPair> result);
             return result;
         }
 
@@ -154,8 +153,10 @@ namespace Lucene.Net.Analysis.Cn.Smart.Hhmm
             int to = tokenPair.To;
             if (!IsToExist(to))
             {
-                List<SegTokenPair> newlist = new List<SegTokenPair>();
-                newlist.Add(tokenPair);
+                List<SegTokenPair> newlist = new List<SegTokenPair>
+                {
+                    tokenPair
+                };
                 tokenPairListTable[to] = newlist;
             }
             else
diff --git a/src/Lucene.Net.Analysis.SmartCn/Hhmm/BigramDictionary.cs b/src/Lucene.Net.Analysis.SmartCn/Hhmm/BigramDictionary.cs
index 9ca0b57..ab7bb5d 100644
--- a/src/Lucene.Net.Analysis.SmartCn/Hhmm/BigramDictionary.cs
+++ b/src/Lucene.Net.Analysis.SmartCn/Hhmm/BigramDictionary.cs
@@ -35,11 +35,11 @@ namespace Lucene.Net.Analysis.Cn.Smart.Hhmm
         {
         }
 
-        public static readonly char WORD_SEGMENT_CHAR = '@';
+        public const char WORD_SEGMENT_CHAR = '@';
 
         private static BigramDictionary singleInstance;
 
-        public static readonly int PRIME_BIGRAM_LENGTH = 402137;
+        public const int PRIME_BIGRAM_LENGTH = 402137;
 
         /// <summary>
         /// The word associations are stored as FNV1 hashcodes, which have a small probability of collision, but save memory.  
@@ -50,11 +50,11 @@ namespace Lucene.Net.Analysis.Cn.Smart.Hhmm
 
         private int max = 0;
 
-        private int repeat = 0;
+        //private int repeat = 0; // LUCENENET: Never read
 
         // static Logger log = Logger.getLogger(BigramDictionary.class);
 
-        private static object syncLock = new object();
+        private static readonly object syncLock = new object();
 
         public static BigramDictionary GetInstance()
         {
@@ -78,21 +78,6 @@ namespace Lucene.Net.Analysis.Cn.Smart.Hhmm
                     {
                         singleInstance.Load(dictRoot);
                     }
-
-
-                    //try
-                    //{
-                    //    singleInstance.Load();
-                    //}
-                    //catch (IOException e)
-                    //{
-                    //    string dictRoot = AnalyzerProfile.ANALYSIS_DATA_DIR;
-                    //    singleInstance.Load(dictRoot);
-                    //}
-                    //catch (TypeLoadException e)
-                    //{
-                    //    throw new Exception(e.ToString(), e);
-                    //}
                 }
                 return singleInstance;
             }
@@ -173,30 +158,21 @@ namespace Lucene.Net.Analysis.Cn.Smart.Hhmm
 
         private void LoadFromInputStream(Stream serialObjectInputStream)
         {
-            //ObjectInputStream input = new ObjectInputStream(serialObjectInputStream);
-            //bigramHashTable = (long[])input.readObject();
-            //frequencyTable = (int[])input.readObject();
-            //// log.info("load bigram dict from serialization.");
-            //input.close();
-
-            using (var reader = new BinaryReader(serialObjectInputStream))
-            //using (var reader = new DataInputStream(serialObjectInputStream))
+            using var reader = new BinaryReader(serialObjectInputStream);
+            // Read bigramHashTable
+            int bhLen = reader.ReadInt32();
+            bigramHashTable = new long[bhLen];
+            for (int i = 0; i < bhLen; i++)
             {
-                // Read bigramHashTable
-                int bhLen = reader.ReadInt32();
-                bigramHashTable = new long[bhLen];
-                for (int i = 0; i < bhLen; i++)
-                {
-                    bigramHashTable[i] = reader.ReadInt64();
-                }
+                bigramHashTable[i] = reader.ReadInt64();
+            }
 
-                // Read frequencyTable
-                int fLen = reader.ReadInt32();
-                frequencyTable = new int[fLen];
-                for (int i = 0; i < fLen; i++)
-                {
-                    frequencyTable[i] = reader.ReadInt32();
-                }
+            // Read frequencyTable
+            int fLen = reader.ReadInt32();
+            frequencyTable = new int[fLen];
+            for (int i = 0; i < fLen; i++)
+            {
+                frequencyTable[i] = reader.ReadInt32();
             }
 
             // log.info("load bigram dict from serialization.");
@@ -206,36 +182,26 @@ namespace Lucene.Net.Analysis.Cn.Smart.Hhmm
         {
             try
             {
-                //ObjectOutputStream output = new ObjectOutputStream(new FileStream(
-                //    serialObj.FullName, FileMode.Create, FileAccess.Write));
-                //output.writeObject(bigramHashTable);
-                //output.writeObject(frequencyTable);
-                //output.close();
-                
-                using (Stream output = new FileStream(serialObj.FullName, FileMode.Create, FileAccess.Write))
+                using Stream output = new FileStream(serialObj.FullName, FileMode.Create, FileAccess.Write);
+                using BinaryWriter writer = new BinaryWriter(output);
+                int bhLen = bigramHashTable.Length;
+                writer.Write(bhLen);
+                for (int i = 0; i < bhLen; i++)
                 {
-                    using (BinaryWriter writer = new BinaryWriter(output))
-                    {
-                        int bhLen = bigramHashTable.Length;
-                        writer.Write(bhLen);
-                        for (int i = 0; i < bhLen; i++)
-                        {
-                            writer.Write(bigramHashTable[i]);
-                        }
+                    writer.Write(bigramHashTable[i]);
+                }
 
-                        int fLen = frequencyTable.Length;
-                        writer.Write(fLen);
-                        for (int i = 0; i < fLen; i++)
-                        {
-                            writer.Write(frequencyTable[i]);
-                        }
-                    }
+                int fLen = frequencyTable.Length;
+                writer.Write(fLen);
+                for (int i = 0; i < fLen; i++)
+                {
+                    writer.Write(frequencyTable[i]);
                 }
                 // log.info("serialize bigram dict.");
             }
-#pragma warning disable 168
+#pragma warning disable 168, IDE0059
             catch (Exception e)
-#pragma warning restore 168
+#pragma warning restore 168, IDE0059
             {
                 // log.warn(e.getMessage());
             }
@@ -243,10 +209,8 @@ namespace Lucene.Net.Analysis.Cn.Smart.Hhmm
 
         private void Load()
         {
-            using (Stream input = this.GetType().FindAndGetManifestResourceStream("bigramdict.mem"))
-            {
-                LoadFromInputStream(input);
-            }
+            using Stream input = this.GetType().FindAndGetManifestResourceStream("bigramdict.mem");
+            LoadFromInputStream(input);
         }
 
         private void Load(string dictRoot)
@@ -296,64 +260,62 @@ namespace Lucene.Net.Analysis.Cn.Smart.Hhmm
             byte[] intBuffer = new byte[4];
             string tmpword;
             //using (RandomAccessFile dctFile = new RandomAccessFile(dctFilePath, "r"))
-            using (var dctFile = new FileStream(dctFilePath, FileMode.Open, FileAccess.Read))
-            {
+            using var dctFile = new FileStream(dctFilePath, FileMode.Open, FileAccess.Read);
 
-                // GB2312 characters 0 - 6768
-                for (i = GB2312_FIRST_CHAR; i < GB2312_FIRST_CHAR + CHAR_NUM_IN_FILE; i++)
+            // GB2312 characters 0 - 6768
+            for (i = GB2312_FIRST_CHAR; i < GB2312_FIRST_CHAR + CHAR_NUM_IN_FILE; i++)
+            {
+                string currentStr = GetCCByGB2312Id(i);
+                // if (i == 5231)
+                // System.out.println(i);
+
+                dctFile.Read(intBuffer, 0, intBuffer.Length);
+                // the dictionary was developed for C, and byte order must be converted to work with Java
+                cnt = ByteBuffer.Wrap(intBuffer).SetOrder(ByteOrder.LittleEndian).GetInt32();
+                if (cnt <= 0)
+                {
+                    continue;
+                }
+                total += cnt;
+                int j = 0;
+                while (j < cnt)
                 {
-                    string currentStr = GetCCByGB2312Id(i);
-                    // if (i == 5231)
-                    // System.out.println(i);
-
                     dctFile.Read(intBuffer, 0, intBuffer.Length);
-                    // the dictionary was developed for C, and byte order must be converted to work with Java
-                    cnt = ByteBuffer.Wrap(intBuffer).SetOrder(ByteOrder.LittleEndian).GetInt32();
-                    if (cnt <= 0)
-                    {
-                        continue;
-                    }
-                    total += cnt;
-                    int j = 0;
-                    while (j < cnt)
+                    buffer[0] = ByteBuffer.Wrap(intBuffer).SetOrder(ByteOrder.LittleEndian)
+                        .GetInt32();// frequency
+                    dctFile.Read(intBuffer, 0, intBuffer.Length);
+                    buffer[1] = ByteBuffer.Wrap(intBuffer).SetOrder(ByteOrder.LittleEndian)
+                        .GetInt32();// length
+                    dctFile.Read(intBuffer, 0, intBuffer.Length);
+                    // buffer[2] = ByteBuffer.wrap(intBuffer).order(
+                    // ByteOrder.LITTLE_ENDIAN).getInt();// handle
+
+                    length = buffer[1];
+                    if (length > 0)
                     {
-                        dctFile.Read(intBuffer, 0, intBuffer.Length);
-                        buffer[0] = ByteBuffer.Wrap(intBuffer).SetOrder(ByteOrder.LittleEndian)
-                            .GetInt32();// frequency
-                        dctFile.Read(intBuffer, 0, intBuffer.Length);
-                        buffer[1] = ByteBuffer.Wrap(intBuffer).SetOrder(ByteOrder.LittleEndian)
-                            .GetInt32();// length
-                        dctFile.Read(intBuffer, 0, intBuffer.Length);
-                        // buffer[2] = ByteBuffer.wrap(intBuffer).order(
-                        // ByteOrder.LITTLE_ENDIAN).getInt();// handle
-
-                        length = buffer[1];
-                        if (length > 0)
+                        byte[] lchBuffer = new byte[length];
+                        dctFile.Read(lchBuffer, 0, lchBuffer.Length);
+                        //tmpword = new String(lchBuffer, "GB2312");
+                        tmpword = Encoding.GetEncoding("GB2312").GetString(lchBuffer);
+                        //tmpword = Encoding.GetEncoding("hz-gb-2312").GetString(lchBuffer);
+                        if (i != 3755 + GB2312_FIRST_CHAR)
                         {
-                            byte[] lchBuffer = new byte[length];
-                            dctFile.Read(lchBuffer, 0, lchBuffer.Length);
-                            //tmpword = new String(lchBuffer, "GB2312");
-                            tmpword = Encoding.GetEncoding("GB2312").GetString(lchBuffer);
-                            //tmpword = Encoding.GetEncoding("hz-gb-2312").GetString(lchBuffer);
-                            if (i != 3755 + GB2312_FIRST_CHAR)
-                            {
-                                tmpword = currentStr + tmpword;
-                            }
-                            char[] carray = tmpword.ToCharArray();
-                            long hashId = Hash1(carray);
-                            int index = GetAvaliableIndex(hashId, carray);
-                            if (index != -1)
+                            tmpword = currentStr + tmpword;
+                        }
+                        char[] carray = tmpword.ToCharArray();
+                        long hashId = Hash1(carray);
+                        int index = GetAvaliableIndex(hashId, carray);
+                        if (index != -1)
+                        {
+                            if (bigramHashTable[index] == 0)
                             {
-                                if (bigramHashTable[index] == 0)
-                                {
-                                    bigramHashTable[index] = hashId;
-                                    // bigramStringTable[index] = tmpword;
-                                }
-                                frequencyTable[index] += buffer[0];
+                                bigramHashTable[index] = hashId;
+                                // bigramStringTable[index] = tmpword;
                             }
+                            frequencyTable[index] += buffer[0];
                         }
-                        j++;
                     }
+                    j++;
                 }
             }
             // log.info("load dictionary done! " + dctFilePath + " total:" + total);
@@ -400,13 +362,13 @@ namespace Lucene.Net.Analysis.Cn.Smart.Hhmm
                 hash2 = PRIME_BIGRAM_LENGTH + hash2;
             int index = hash1;
             int i = 1;
-            repeat++;
+            //repeat++; // LUCENENET: Never read
             while (bigramHashTable[index] != 0 && bigramHashTable[index] != hashId
                 && i < PRIME_BIGRAM_LENGTH)
             {
                 index = (hash1 + i * hash2) % PRIME_BIGRAM_LENGTH;
                 i++;
-                repeat++;
+                //repeat++; // LUCENENET: Never read
                 if (i > max)
                     max = i;
             }
diff --git a/src/Lucene.Net.Analysis.SmartCn/Hhmm/HHMMSegmenter.cs b/src/Lucene.Net.Analysis.SmartCn/Hhmm/HHMMSegmenter.cs
index 4940dba..610ce58 100644
--- a/src/Lucene.Net.Analysis.SmartCn/Hhmm/HHMMSegmenter.cs
+++ b/src/Lucene.Net.Analysis.SmartCn/Hhmm/HHMMSegmenter.cs
@@ -28,7 +28,7 @@ namespace Lucene.Net.Analysis.Cn.Smart.Hhmm
     /// </summary>
     public class HHMMSegmenter
     {
-        private static WordDictionary wordDict = WordDictionary.GetInstance();
+        private static readonly WordDictionary wordDict = WordDictionary.GetInstance(); // LUCENENET: marked readonly
 
         /// <summary>
         /// Create the <see cref="SegGraph"/> for a sentence.
@@ -43,7 +43,7 @@ namespace Lucene.Net.Analysis.Cn.Smart.Hhmm
             CharType[] charTypeArray = GetCharTypes(sentence);
             StringBuilder wordBuf = new StringBuilder();
             SegToken token;
-            int frequency = 0; // the number of times word appears.
+            int frequency; // the number of times word appears. // LUCENENET: IDE0059: Remove unnecessary value assignment
             bool hasFullWidth;
             WordType wordType;
             char[] charArray;
diff --git a/src/Lucene.Net.Analysis.SmartCn/Hhmm/SegGraph.cs b/src/Lucene.Net.Analysis.SmartCn/Hhmm/SegGraph.cs
index af563a0..a332311 100644
--- a/src/Lucene.Net.Analysis.SmartCn/Hhmm/SegGraph.cs
+++ b/src/Lucene.Net.Analysis.SmartCn/Hhmm/SegGraph.cs
@@ -33,7 +33,7 @@ namespace Lucene.Net.Analysis.Cn.Smart.Hhmm
         /// <summary>
         /// Map of start offsets to <see cref="T:IList{SegToken}"/> of tokens at that position
         /// </summary>
-        private IDictionary<int, IList<SegToken>> tokenListTable = new Dictionary<int, IList<SegToken>>();
+        private readonly IDictionary<int, IList<SegToken>> tokenListTable = new Dictionary<int, IList<SegToken>>(); // LUCENENET: marked readonly
 
         private int maxStart = -1;
 
@@ -45,8 +45,7 @@ namespace Lucene.Net.Analysis.Cn.Smart.Hhmm
         public virtual bool IsStartExist(int s)
         {
             //return tokenListTable.get(s) != null;
-            IList<SegToken> result;
-            return tokenListTable.TryGetValue(s, out result) && result != null;
+            return tokenListTable.TryGetValue(s, out IList<SegToken> result) && result != null;
         }
 
         /// <summary>
@@ -56,8 +55,7 @@ namespace Lucene.Net.Analysis.Cn.Smart.Hhmm
         /// <returns><see cref="T:IList{SegToken}"/> of tokens at the specified start offset.</returns>
         public virtual IList<SegToken> GetStartList(int s)
         {
-            IList<SegToken> result;
-            tokenListTable.TryGetValue(s, out result);
+            tokenListTable.TryGetValue(s, out IList<SegToken> result);
             return result;
         }
 
@@ -103,8 +101,10 @@ namespace Lucene.Net.Analysis.Cn.Smart.Hhmm
             int s = token.StartOffset;
             if (!IsStartExist(s))
             {
-                List<SegToken> newlist = new List<SegToken>();
-                newlist.Add(token);
+                List<SegToken> newlist = new List<SegToken>
+                {
+                    token
+                };
                 tokenListTable[s] = newlist;
             }
             else
diff --git a/src/Lucene.Net.Analysis.SmartCn/Hhmm/SegToken.cs b/src/Lucene.Net.Analysis.SmartCn/Hhmm/SegToken.cs
index f0bdea4..85c1fec 100644
--- a/src/Lucene.Net.Analysis.SmartCn/Hhmm/SegToken.cs
+++ b/src/Lucene.Net.Analysis.SmartCn/Hhmm/SegToken.cs
@@ -1,5 +1,6 @@
 // lucene version compatibility level: 4.8.1
 using Lucene.Net.Support;
+using System.Diagnostics.CodeAnalysis;
 
 namespace Lucene.Net.Analysis.Cn.Smart.Hhmm
 {
@@ -31,6 +32,7 @@ namespace Lucene.Net.Analysis.Cn.Smart.Hhmm
         /// Character array containing token text
         /// </summary>
         [WritableArray]
+        [SuppressMessage("Performance", "CA1819:Properties should not return arrays", Justification = "Lucene's design requires some array properties")]
         public char[] CharArray { get; set; }
 
         /// <summary>
diff --git a/src/Lucene.Net.Analysis.SmartCn/Hhmm/WordDictionary.cs b/src/Lucene.Net.Analysis.SmartCn/Hhmm/WordDictionary.cs
index 8b4bce3..2aadb8d 100644
--- a/src/Lucene.Net.Analysis.SmartCn/Hhmm/WordDictionary.cs
+++ b/src/Lucene.Net.Analysis.SmartCn/Hhmm/WordDictionary.cs
@@ -40,7 +40,7 @@ namespace Lucene.Net.Analysis.Cn.Smart.Hhmm
         /// <summary>
         /// Large prime number for hash function
         /// </summary>
-        public static readonly int PRIME_INDEX_LENGTH = 12071;
+        public const int PRIME_INDEX_LENGTH = 12071;
 
         /// <summary>
         /// wordIndexTable guarantees to hash all Chinese characters in Unicode into 
@@ -68,7 +68,7 @@ namespace Lucene.Net.Analysis.Cn.Smart.Hhmm
 
         // static Logger log = Logger.getLogger(WordDictionary.class);
 
-        private static object syncLock = new object();
+        private static readonly object syncLock = new object();
 
         /// <summary>
         /// Get the singleton dictionary instance.
@@ -96,21 +96,6 @@ namespace Lucene.Net.Analysis.Cn.Smart.Hhmm
                     {
                         singleInstance.Load(dictRoot);
                     }
-
-
-                    //try
-                    //{
-                    //    singleInstance.Load();
-                    //}
-                    //catch (IOException e)
-                    //{
-                    //    string wordDictRoot = AnalyzerProfile.ANALYSIS_DATA_DIR;
-                    //    singleInstance.Load(wordDictRoot);
-                    //}
-                    //catch (TypeLoadException e)
-                    //{
-                    //    throw new Exception(e.ToString(), e);
-                    //}
                 }
                 return singleInstance;
             }
@@ -165,10 +150,8 @@ namespace Lucene.Net.Analysis.Cn.Smart.Hhmm
         /// <exception cref="IOException">If there is a low-level I/O error.</exception>
         public virtual void Load()
         {
-            using (Stream input = this.GetType().FindAndGetManifestResourceStream("coredict.mem"))
-            {
-                LoadFromObjectInputStream(input);
-            }
+            using Stream input = this.GetType().FindAndGetManifestResourceStream("coredict.mem");
+            LoadFromObjectInputStream(input);
         }
 
         private bool LoadFromObj(FileInfo serialObj)
@@ -217,76 +200,65 @@ namespace Lucene.Net.Analysis.Cn.Smart.Hhmm
 
         private void LoadFromObjectInputStream(Stream serialObjectInputStream)
         {
-            //ObjectInputStream input = new ObjectInputStream(serialObjectInputStream);
-            //wordIndexTable = (short[])input.ReadObject();
-            //charIndexTable = (char[])input.ReadObject();
-            //wordItem_charArrayTable = (char[][][])input.ReadObject();
-            //wordItem_frequencyTable = (int[][])input.ReadObject();
-            //// log.info("load core dict from serialization.");
-            //input.close();
-
-            using (var reader = new BinaryReader(serialObjectInputStream))
-            //using (var reader = new DataInputStream(serialObjectInputStream))
-            {
+            using var reader = new BinaryReader(serialObjectInputStream);
 
-                // Read wordIndexTable
-                int wiLen = reader.ReadInt32();
-                wordIndexTable = new short[wiLen];
-                for (int i = 0; i < wiLen; i++)
-                {
-                    wordIndexTable[i] = reader.ReadInt16();
-                }
+            // Read wordIndexTable
+            int wiLen = reader.ReadInt32();
+            wordIndexTable = new short[wiLen];
+            for (int i = 0; i < wiLen; i++)
+            {
+                wordIndexTable[i] = reader.ReadInt16();
+            }
 
-                // Read charIndexTable
-                int ciLen = reader.ReadInt32();
-                charIndexTable = new char[ciLen];
-                for (int i = 0; i < ciLen; i++)
-                {
-                    charIndexTable[i] = reader.ReadChar();
-                }
+            // Read charIndexTable
+            int ciLen = reader.ReadInt32();
+            charIndexTable = new char[ciLen];
+            for (int i = 0; i < ciLen; i++)
+            {
+                charIndexTable[i] = reader.ReadChar();
+            }
 
-                // Read wordItem_charArrayTable
-                int caDim1 = reader.ReadInt32();
-                if (caDim1 > -1)
+            // Read wordItem_charArrayTable
+            int caDim1 = reader.ReadInt32();
+            if (caDim1 > -1)
+            {
+                wordItem_charArrayTable = new char[caDim1][][];
+                for (int i = 0; i < caDim1; i++)
                 {
-                    wordItem_charArrayTable = new char[caDim1][][];
-                    for (int i = 0; i < caDim1; i++)
+                    int caDim2 = reader.ReadInt32();
+                    if (caDim2 > -1)
                     {
-                        int caDim2 = reader.ReadInt32();
-                        if (caDim2 > -1)
+                        wordItem_charArrayTable[i] = new char[caDim2][];
+                        for (int j = 0; j < caDim2; j++)
                         {
-                            wordItem_charArrayTable[i] = new char[caDim2][];
-                            for (int j = 0; j < caDim2; j++)
+                            int caDim3 = reader.ReadInt32();
+                            if (caDim3 > -1)
                             {
-                                int caDim3 = reader.ReadInt32();
-                                if (caDim3 > -1)
+                                wordItem_charArrayTable[i][j] = new char[caDim3];
+                                for (int k = 0; k < caDim3; k++)
                                 {
-                                    wordItem_charArrayTable[i][j] = new char[caDim3];
-                                    for (int k = 0; k < caDim3; k++)
-                                    {
-                                        wordItem_charArrayTable[i][j][k] = reader.ReadChar();
-                                    }
+                                    wordItem_charArrayTable[i][j][k] = reader.ReadChar();
                                 }
                             }
                         }
                     }
                 }
+            }
 
-                // Read wordItem_frequencyTable
-                int fDim1 = reader.ReadInt32();
-                if (fDim1 > -1)
+            // Read wordItem_frequencyTable
+            int fDim1 = reader.ReadInt32();
+            if (fDim1 > -1)
+            {
+                wordItem_frequencyTable = new int[fDim1][];
+                for (int i = 0; i < fDim1; i++)
                 {
-                    wordItem_frequencyTable = new int[fDim1][];
-                    for (int i = 0; i < fDim1; i++)
+                    int fDim2 = reader.ReadInt32();
+                    if (fDim2 > -1)
                     {
-                        int fDim2 = reader.ReadInt32();
-                        if (fDim2 > -1)
+                        wordItem_frequencyTable[i] = new int[fDim2];
+                        for (int j = 0; j < fDim2; j++)
                         {
-                            wordItem_frequencyTable[i] = new int[fDim2];
-                            for (int j = 0; j < fDim2; j++)
-                            {
-                                wordItem_frequencyTable[i][j] = reader.ReadInt32();
-                            }
+                            wordItem_frequencyTable[i][j] = reader.ReadInt32();
                         }
                     }
                 }
@@ -299,73 +271,60 @@ namespace Lucene.Net.Analysis.Cn.Smart.Hhmm
         {
             try
             {
-                //ObjectOutputStream output = new ObjectOutputStream(new FileStream(
-                //    serialObj.FullName, FileMode.Create, FileAccess.Write));
-                //output.writeObject(wordIndexTable);
-                //output.writeObject(charIndexTable);
-                //output.writeObject(wordItem_charArrayTable);
-                //output.writeObject(wordItem_frequencyTable);
-                //output.close();
-                //// log.info("serialize core dict.");
-
-                using (Stream stream = new FileStream(serialObj.FullName, FileMode.Create, FileAccess.Write))
+                using Stream stream = new FileStream(serialObj.FullName, FileMode.Create, FileAccess.Write);
+                using var writer = new BinaryWriter(stream);
+                // Write wordIndexTable
+                int wiLen = wordIndexTable.Length;
+                writer.Write(wiLen);
+                for (int i = 0; i < wiLen; i++)
                 {
-                    using (var writer = new BinaryWriter(stream))
-                    {
-                        // Write wordIndexTable
-                        int wiLen = wordIndexTable.Length;
-                        writer.Write(wiLen);
-                        for (int i = 0; i < wiLen; i++)
-                        {
-                            writer.Write(wordIndexTable[i]);
-                        }
+                    writer.Write(wordIndexTable[i]);
+                }
 
-                        // Write charIndexTable
-                        int ciLen = charIndexTable.Length;
-                        writer.Write(ciLen);
-                        for (int i = 0; i < ciLen; i++)
-                        {
-                            writer.Write(charIndexTable[i]);
-                        }
+                // Write charIndexTable
+                int ciLen = charIndexTable.Length;
+                writer.Write(ciLen);
+                for (int i = 0; i < ciLen; i++)
+                {
+                    writer.Write(charIndexTable[i]);
+                }
 
-                        // Write wordItem_charArrayTable
-                        int caDim1 = wordItem_charArrayTable == null ? -1 : wordItem_charArrayTable.Length;
-                        writer.Write(caDim1);
-                        for (int i = 0; i < caDim1; i++)
+                // Write wordItem_charArrayTable
+                int caDim1 = wordItem_charArrayTable == null ? -1 : wordItem_charArrayTable.Length;
+                writer.Write(caDim1);
+                for (int i = 0; i < caDim1; i++)
+                {
+                    int caDim2 = wordItem_charArrayTable[i] == null ? -1 : wordItem_charArrayTable[i].Length;
+                    writer.Write(caDim2);
+                    for (int j = 0; j < caDim2; j++)
+                    {
+                        int caDim3 = wordItem_charArrayTable[i][j] == null ? -1 : wordItem_charArrayTable[i][j].Length;
+                        writer.Write(caDim3);
+                        for (int k = 0; k < caDim3; k++)
                         {
-                            int caDim2 = wordItem_charArrayTable[i] == null ? -1 : wordItem_charArrayTable[i].Length;
-                            writer.Write(caDim2);
-                            for (int j = 0; j < caDim2; j++)
-                            {
-                                int caDim3 = wordItem_charArrayTable[i][j] == null ? -1 : wordItem_charArrayTable[i][j].Length;
-                                writer.Write(caDim3);
-                                for (int k = 0; k < caDim3; k++)
-                                {
-                                    writer.Write(wordItem_charArrayTable[i][j][k]);
-                                }
-                            }
+                            writer.Write(wordItem_charArrayTable[i][j][k]);
                         }
+                    }
+                }
 
-                        // Write wordItem_frequencyTable
-                        int fDim1 = wordItem_frequencyTable == null ? -1 : wordItem_frequencyTable.Length;
-                        writer.Write(fDim1);
-                        for (int i = 0; i < fDim1; i++)
-                        {
-                            int fDim2 = wordItem_frequencyTable[i] == null ? -1 : wordItem_frequencyTable[i].Length;
-                            writer.Write(fDim2);
-                            for (int j = 0; j < fDim2; j++)
-                            {
-                                writer.Write(wordItem_frequencyTable[i][j]);
-                            }
-                        }
+                // Write wordItem_frequencyTable
+                int fDim1 = wordItem_frequencyTable == null ? -1 : wordItem_frequencyTable.Length;
+                writer.Write(fDim1);
+                for (int i = 0; i < fDim1; i++)
+                {
+                    int fDim2 = wordItem_frequencyTable[i] == null ? -1 : wordItem_frequencyTable[i].Length;
+                    writer.Write(fDim2);
+                    for (int j = 0; j < fDim2; j++)
+                    {
+                        writer.Write(wordItem_frequencyTable[i][j]);
                     }
                 }
 
                 // log.info("serialize core dict.");
             }
-#pragma warning disable 168
+#pragma warning disable 168, IDE0059
             catch (Exception e)
-#pragma warning restore 168
+#pragma warning restore 168, IDE0059
             {
                 // log.warn(e.getMessage());
             }
@@ -386,7 +345,6 @@ namespace Lucene.Net.Analysis.Cn.Smart.Hhmm
             buffer = new int[3];
             byte[] intBuffer = new byte[4];
             string tmpword;
-            //using (RandomAccessFile dctFile = new RandomAccessFile(dctFilePath, "r"))
             using (var dctFile = new FileStream(dctFilePath, FileMode.Open, FileAccess.Read))
             {
 
@@ -430,11 +388,7 @@ namespace Lucene.Net.Analysis.Cn.Smart.Hhmm
                         {
                             byte[] lchBuffer = new byte[length];
                             dctFile.Read(lchBuffer, 0, lchBuffer.Length);
-                            //tmpword = new String(lchBuffer, "GB2312");
                             tmpword = Encoding.GetEncoding("GB2312").GetString(lchBuffer);
-                            //tmpword = Encoding.GetEncoding("hz-gb-2312").GetString(lchBuffer);
-                            // indexTable[i].wordItems[j].word = tmpword;
-                            // wordItemTable[i][j].charArray = tmpword.toCharArray();
                             wordItem_charArrayTable[i][j] = tmpword.ToCharArray();
                         }
                         else
diff --git a/src/Lucene.Net.Analysis.SmartCn/SentenceTokenizer.cs b/src/Lucene.Net.Analysis.SmartCn/SentenceTokenizer.cs
index 08b5a31..791cd1d 100644
--- a/src/Lucene.Net.Analysis.SmartCn/SentenceTokenizer.cs
+++ b/src/Lucene.Net.Analysis.SmartCn/SentenceTokenizer.cs
@@ -36,7 +36,7 @@ namespace Lucene.Net.Analysis.Cn.Smart
         /// <summary>
         /// End of sentence punctuation: 。,!?;,!?;
         /// </summary>
-        private readonly static string PUNCTION = "。,!?;,!?;";
+        private const string PUNCTION = "。,!?;,!?;";
 
         private readonly StringBuilder buffer = new StringBuilder();
 
diff --git a/src/Lucene.Net.Analysis.SmartCn/SmartChineseAnalyzer.cs b/src/Lucene.Net.Analysis.SmartCn/SmartChineseAnalyzer.cs
index a4c6cb1..8e2e62c 100644
--- a/src/Lucene.Net.Analysis.SmartCn/SmartChineseAnalyzer.cs
+++ b/src/Lucene.Net.Analysis.SmartCn/SmartChineseAnalyzer.cs
@@ -137,7 +137,7 @@ namespace Lucene.Net.Analysis.Cn.Smart
         /// <param name="stopWords"><see cref="CharArraySet"/> of stopwords to use.</param>
         public SmartChineseAnalyzer(LuceneVersion matchVersion, CharArraySet stopWords)
         {
-            this.stopWords = stopWords == null ? CharArraySet.EMPTY_SET : stopWords;
+            this.stopWords = stopWords ?? CharArraySet.EMPTY_SET;
             this.matchVersion = matchVersion;
         }
 
diff --git a/src/Lucene.Net.Analysis.SmartCn/SmartChineseSentenceTokenizerFactory.cs b/src/Lucene.Net.Analysis.SmartCn/SmartChineseSentenceTokenizerFactory.cs
index bd24d13..52c5470 100644
--- a/src/Lucene.Net.Analysis.SmartCn/SmartChineseSentenceTokenizerFactory.cs
+++ b/src/Lucene.Net.Analysis.SmartCn/SmartChineseSentenceTokenizerFactory.cs
@@ -40,7 +40,7 @@ namespace Lucene.Net.Analysis.Cn.Smart
         {
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.SmartCn/SmartChineseWordTokenFilterFactory.cs b/src/Lucene.Net.Analysis.SmartCn/SmartChineseWordTokenFilterFactory.cs
index b7a2385..771448f 100644
--- a/src/Lucene.Net.Analysis.SmartCn/SmartChineseWordTokenFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.SmartCn/SmartChineseWordTokenFilterFactory.cs
@@ -43,7 +43,7 @@ namespace Lucene.Net.Analysis.Cn.Smart
         {
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.SmartCn/Utility.cs b/src/Lucene.Net.Analysis.SmartCn/Utility.cs
index 0615bbf..2750386 100644
--- a/src/Lucene.Net.Analysis.SmartCn/Utility.cs
+++ b/src/Lucene.Net.Analysis.SmartCn/Utility.cs
@@ -23,7 +23,7 @@ namespace Lucene.Net.Analysis.Cn.Smart
     /// <para/>
     /// @lucene.experimental
     /// </summary>
-    public class Utility
+    public static class Utility // LUCENENET specific: CA1052 Static holder types should be Static or NotInheritable
     {
         public static readonly char[] STRING_CHAR_ARRAY = "未##串".ToCharArray();
 
diff --git a/src/Lucene.Net.Analysis.SmartCn/WordSegmenter.cs b/src/Lucene.Net.Analysis.SmartCn/WordSegmenter.cs
index 6ad831d..66bd89c 100644
--- a/src/Lucene.Net.Analysis.SmartCn/WordSegmenter.cs
+++ b/src/Lucene.Net.Analysis.SmartCn/WordSegmenter.cs
@@ -29,9 +29,9 @@ namespace Lucene.Net.Analysis.Cn.Smart
     /// </summary>
     internal class WordSegmenter
     {
-        private HHMMSegmenter hhmmSegmenter = new HHMMSegmenter();
+        private readonly HHMMSegmenter hhmmSegmenter = new HHMMSegmenter(); // LUCENENET: marked readonly
 
-        private SegTokenFilter tokenFilter = new SegTokenFilter();
+        private readonly SegTokenFilter tokenFilter = new SegTokenFilter(); // LUCENENET: marked readonly
 
         /// <summary>
         /// Segment a sentence into words with <see cref="HHMMSegmenter"/>
diff --git a/src/Lucene.Net.Analysis.SmartCn/WordTokenFilter.cs b/src/Lucene.Net.Analysis.SmartCn/WordTokenFilter.cs
index 5af9a4e..84c97d8 100644
--- a/src/Lucene.Net.Analysis.SmartCn/WordTokenFilter.cs
+++ b/src/Lucene.Net.Analysis.SmartCn/WordTokenFilter.cs
@@ -31,7 +31,7 @@ namespace Lucene.Net.Analysis.Cn.Smart
     [Obsolete("Use HMMChineseTokenizer instead.")]
     public sealed class WordTokenFilter : TokenFilter
     {
-        private WordSegmenter wordSegmenter;
+        private readonly WordSegmenter wordSegmenter; // LUCENENET: marked readonly
 
         private IEnumerator<SegToken> tokenIter;
 
@@ -109,7 +109,32 @@ namespace Lucene.Net.Analysis.Cn.Smart
         public override void Reset()
         {
             base.Reset();
+            tokenIter?.Dispose(); // LUCENENET specific
             tokenIter = null;
         }
+
+        /// <summary>
+        /// Releases resources used by the <see cref="WordTokenFilter"/> and
+        /// if overridden in a derived class, optionally releases unmanaged resources.
+        /// </summary>
+        /// <param name="disposing"><c>true</c> to release both managed and unmanaged resources;
+        /// <c>false</c> to release only unmanaged resources.</param>
+
+        // LUCENENET specific
+        protected override void Dispose(bool disposing)
+        {
+            try
+            {
+                if (disposing)
+                {
+                    tokenIter?.Dispose(); // LUCENENET specific - dispose tokenIter and set to null
+                    tokenIter = null;
+                }
+            }
+            finally
+            {
+                base.Dispose(disposing);
+            }
+        }
     }
 }
diff --git a/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Compile.cs b/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Compile.cs
index afec557..06ed084 100644
--- a/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Compile.cs
+++ b/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Compile.cs
@@ -66,16 +66,11 @@ namespace Egothor.Stemmer
     /// <summary>
     /// The Compile class is used to compile a stemmer table.
     /// </summary>
-    public class Compile
+    public static class Compile // LUCENENET specific: CA1052 Static holder types should be Static or NotInheritable
     {
-        static bool backward;
-        static bool multi;
-        static Trie trie;
-
-        /// <summary>
-        /// no instantiation
-        /// </summary>
-        private Compile() { }
+        private static bool backward;
+        private static bool multi;
+        private static Trie trie;
 
         /// <summary>
         /// Entry point to the Compile application.
@@ -219,12 +214,10 @@ namespace Egothor.Stemmer
                     trie.PrintInfo(Console.Out, prefix + " ");
                 }
 
-                using (DataOutputStream os = new DataOutputStream(
-                    new FileStream(stemmerTable + ".out", FileMode.OpenOrCreate, FileAccess.Write)))
-                {
-                    os.WriteUTF(args[0]);
-                    trie.Store(os);
-                }
+                using DataOutputStream os = new DataOutputStream(
+                    new FileStream(stemmerTable + ".out", FileMode.OpenOrCreate, FileAccess.Write));
+                os.WriteUTF(args[0]);
+                trie.Store(os);
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Diff.cs b/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Diff.cs
index 556a8e5..7130629 100644
--- a/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Diff.cs
+++ b/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Diff.cs
@@ -71,15 +71,15 @@ namespace Egothor.Stemmer
     /// </summary>
     public class Diff
     {
-        int sizex = 0;
-        int sizey = 0;
-        int[][] net;
-        int[][] way;
+        private int sizex = 0;
+        private int sizey = 0;
+        private int[][] net;
+        private int[][] way;
 
-        int INSERT;
-        int DELETE;
-        int REPLACE;
-        int NOOP;
+        private readonly int INSERT; // LUCENENET: marked readonly
+        private readonly int DELETE; // LUCENENET: marked readonly
+        private readonly int REPLACE; // LUCENENET: marked readonly
+        private readonly int NOOP; // LUCENENET: marked readonly
 
         /// <summary>
         /// Constructor for the Diff object.
@@ -324,7 +324,7 @@ namespace Egothor.Stemmer
             if (deletes != @base)
             {
                 result.Append("D" + (deletes));
-                deletes = @base;
+                //deletes = @base; // LUCENENET: IDE0059: Remove unnecessary value assignment
             }
 
             return result.ToString();
diff --git a/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/DiffIt.cs b/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/DiffIt.cs
index 21c5a3b..874ad53 100644
--- a/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/DiffIt.cs
+++ b/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/DiffIt.cs
@@ -68,17 +68,11 @@ namespace Egothor.Stemmer
     /// The DiffIt class is a means generate patch commands from an already prepared
     /// stemmer table.
     /// </summary>
-    public class DiffIt
+    public static class DiffIt // LUCENENET specific: CA1052 Static holder types should be Static or NotInheritable
     {
-        /// <summary>
-        /// no instantiation
-        /// </summary>
-        private DiffIt() { }
-
         internal static int Get(int i, string s)
         {
-            int result;
-            if (!int.TryParse(s.Substring(i, 1), NumberStyles.Integer, CultureInfo.InvariantCulture, out result))
+            if (!int.TryParse(s.Substring(i, 1), NumberStyles.Integer, CultureInfo.InvariantCulture, out int result))
             {
                 return 1;
             }
@@ -124,31 +118,29 @@ namespace Egothor.Stemmer
                 // System.out.println("[" + args[i] + "]");
                 Diff diff = new Diff(ins, del, rep, nop);
 
-                using (TextReader input = new StreamReader(new FileStream(stemmerTable, FileMode.Open, FileAccess.Read), Encoding.GetEncoding(charset)))
+                using TextReader input = new StreamReader(new FileStream(stemmerTable, FileMode.Open, FileAccess.Read), Encoding.GetEncoding(charset));
+                string line;
+                while ((line = input.ReadLine()) != null)
                 {
-                    string line;
-                    while ((line = input.ReadLine()) != null)
+                    try
                     {
-                        try
+                        line = line.ToLowerInvariant();
+                        StringTokenizer st = new StringTokenizer(line);
+                        st.MoveNext();
+                        string stem = st.Current;
+                        Console.WriteLine(stem + " -a");
+                        while (st.MoveNext())
                         {
-                            line = line.ToLowerInvariant();
-                            StringTokenizer st = new StringTokenizer(line);
-                            st.MoveNext();
-                            string stem = st.Current;
-                            Console.WriteLine(stem + " -a");
-                            while (st.MoveNext())
+                            string token = st.Current;
+                            if (token.Equals(stem, StringComparison.Ordinal) == false)
                             {
-                                string token = st.Current;
-                                if (token.Equals(stem, StringComparison.Ordinal) == false)
-                                {
-                                    Console.WriteLine(stem + " " + diff.Exec(token, stem));
-                                }
+                                Console.WriteLine(stem + " " + diff.Exec(token, stem));
                             }
                         }
-                        catch (InvalidOperationException /*x*/)
-                        {
-                            // no base token (stem) on a line
-                        }
+                    }
+                    catch (InvalidOperationException /*x*/)
+                    {
+                        // no base token (stem) on a line
                     }
                 }
             }
diff --git a/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Gener.cs b/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Gener.cs
index b8defb9..05a3213 100644
--- a/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Gener.cs
+++ b/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Gener.cs
@@ -77,7 +77,7 @@ namespace Egothor.Stemmer
         public override Trie Optimize(Trie orig)
         {
             IList<string> cmds = orig.cmds;
-            IList<Row> rows = new List<Row>();
+            IList<Row> rows; // LUCENENET: IDE0059: Remove unnecessary value assignment
             IList<Row> orows = orig.rows;
             int[] remap = new int[orows.Count];
 
@@ -103,6 +103,7 @@ namespace Egothor.Stemmer
         /// <param name="in">the Row to test</param>
         /// <param name="remap">Description of the Parameter</param>
         /// <returns><c>true</c> if the Row should remain; otherwise, <c>false</c></returns>
+        [System.Diagnostics.CodeAnalysis.SuppressMessage("Performance", "CA1822:Mark members as static", Justification = "This is a shipped public API")]
         public bool Eat(Row @in, int[] remap)
         {
             int sum = 0;
diff --git a/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Lift.cs b/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Lift.cs
index d14a45c..d5d2282 100644
--- a/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Lift.cs
+++ b/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Lift.cs
@@ -67,7 +67,7 @@ namespace Egothor.Stemmer
     /// </summary>
     public class Lift : Reduce
     {
-        bool changeSkip;
+        private readonly bool changeSkip; // LUCENENET: marked readonly
 
         /// <summary>
         /// Constructor for the Lift object.
@@ -90,7 +90,7 @@ namespace Egothor.Stemmer
         public override Trie Optimize(Trie orig)
         {
             IList<string> cmds = orig.cmds;
-            IList<Row> rows = new List<Row>();
+            IList<Row> rows; // LUCENENET: IDE0059: Remove unnecessary value assignment
             IList<Row> orows = orig.rows;
             int[] remap = new int[orows.Count];
 
diff --git a/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/MultiTrie.cs b/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/MultiTrie.cs
index c69c679..80e152e 100644
--- a/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/MultiTrie.cs
+++ b/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/MultiTrie.cs
@@ -72,7 +72,7 @@ namespace Egothor.Stemmer
 
         protected List<Trie> m_tries = new List<Trie>();
 
-        int BY = 1;
+        private readonly int BY = 1; // LUCENENET: marked readonly
 
         /// <summary>
         /// Constructor for the <see cref="MultiTrie"/> object.
diff --git a/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Trie.cs b/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Trie.cs
index 0e45f78..f438c3c 100644
--- a/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Trie.cs
+++ b/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Trie.cs
@@ -248,7 +248,7 @@ namespace Egothor.Stemmer
             int cmd = -1;
             StrEnum e = new StrEnum(key, forward);
             char ch;
-            char aux;
+            //char aux; // LUCENENET: IDE0059: Remove unnecessary value assignment
 
             for (int i = 0; i < key.Length;)
             {
@@ -267,7 +267,7 @@ namespace Egothor.Stemmer
                 {
                     if (i < key.Length)
                     {
-                        aux = e.Next();
+                        /*aux =*/e.Next(); // LUCENENET: IDE0059: Remove unnecessary value assignment
                     }
                     else
                     {
@@ -429,9 +429,9 @@ namespace Egothor.Stemmer
         /// </summary>
         internal class StrEnum
         {
-            private string s;
+            private readonly string s; // LUCENENET: marked readonly
             private int from;
-            private int by;
+            private readonly int by; // LUCENENET: marked readonly
 
             /// <summary>
             /// Constructor for the <see cref="StrEnum"/> object
diff --git a/src/Lucene.Net.Analysis.Stempel/Stempel/StempelPolishStemFilterFactory.cs b/src/Lucene.Net.Analysis.Stempel/Stempel/StempelPolishStemFilterFactory.cs
index fad0cf5..3cfe6b2 100644
--- a/src/Lucene.Net.Analysis.Stempel/Stempel/StempelPolishStemFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Stempel/Stempel/StempelPolishStemFilterFactory.cs
@@ -35,7 +35,7 @@ namespace Lucene.Net.Analysis.Stempel
         {
             if (args.Count > 0)
             {
-                throw new ArgumentException("Unknown parameters: " + args);
+                throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args));
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Stempel/Stempel/StempelStemmer.cs b/src/Lucene.Net.Analysis.Stempel/Stempel/StempelStemmer.cs
index 546ffb2..8a6f6ae 100644
--- a/src/Lucene.Net.Analysis.Stempel/Stempel/StempelStemmer.cs
+++ b/src/Lucene.Net.Analysis.Stempel/Stempel/StempelStemmer.cs
@@ -33,8 +33,8 @@ namespace Lucene.Net.Analysis.Stempel
     /// </summary>
     public class StempelStemmer
     {
-        private Trie stemmer = null;
-        private StringBuilder buffer = new StringBuilder();
+        private readonly Trie stemmer = null; // LUCENENET: marked readonly
+        private readonly StringBuilder buffer = new StringBuilder(); // LUCENENET: marked readonly
 
         /// <summary>
         /// Create a Stemmer using selected stemmer table
diff --git a/src/Lucene.Net.Benchmark/ByTask/Benchmark.cs b/src/Lucene.Net.Benchmark/ByTask/Benchmark.cs
index 6d54e00..3fb7a8f 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Benchmark.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Benchmark.cs
@@ -44,8 +44,8 @@ namespace Lucene.Net.Benchmarks.ByTask
     /// </remarks>
     public class Benchmark
     {
-        private PerfRunData runData;
-        private Algorithm algorithm;
+        private readonly PerfRunData runData; // LUCENENET: marked readonly
+        private readonly Algorithm algorithm; // LUCENENET: marked readonly
         private bool executed;
 
         public Benchmark(TextReader algReader)
diff --git a/src/Lucene.Net.Benchmark/ByTask/Feeds/DemoHTMLParser.cs b/src/Lucene.Net.Benchmark/ByTask/Feeds/DemoHTMLParser.cs
index 4bf4e8f..07f2a19 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Feeds/DemoHTMLParser.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Feeds/DemoHTMLParser.cs
@@ -229,8 +229,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds
 
             // properties 
             IDictionary<string, string> props = p.MetaTags;
-            string dateStr;
-            if (props.TryGetValue("date", out dateStr) && dateStr != null)
+            if (props.TryGetValue("date", out string dateStr) && dateStr != null)
             {
                 DateTime? newDate = trecSrc.ParseDate(dateStr);
                 if (newDate != null)
diff --git a/src/Lucene.Net.Benchmark/ByTask/Feeds/DirContentSource.cs b/src/Lucene.Net.Benchmark/ByTask/Feeds/DirContentSource.cs
index e39ada6..3868ba3 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Feeds/DirContentSource.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Feeds/DirContentSource.cs
@@ -82,7 +82,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds
             /* this seems silly ... there must be a better way ...
                not that this is good, but can it matter? */
 
-            private Comparer c = new Comparer();
+            private readonly Comparer c = new Comparer(); // LUCENENET: marked readonly
 
             private FileInfo current;
 
@@ -151,8 +151,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds
 
         private DateTime? ParseDate(string dateStr)
         {
-            DateTime temp;
-            if (DateTime.TryParseExact(dateStr, "dd-MMM-yyyy hh:mm:ss.fff", CultureInfo.InvariantCulture, DateTimeStyles.None, out temp))
+            if (DateTime.TryParseExact(dateStr, "dd-MMM-yyyy hh:mm:ss.fff", CultureInfo.InvariantCulture, DateTimeStyles.None, out DateTime temp))
             {
                 return temp;
             }
@@ -164,10 +163,17 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds
             return null;
         }
 
+        /// <summary>
+        /// Releases resources used by the <see cref="DirContentSource"/> and
+        /// if overridden in a derived class, optionally releases unmanaged resources.
+        /// </summary>
+        /// <param name="disposing"><c>true</c> to release both managed and unmanaged resources;
+        /// <c>false</c> to release only unmanaged resources.</param>
         protected override void Dispose(bool disposing)
         {
             if (disposing)
             {
+                inputFiles?.Dispose(); // LUCENENET specific - dispose inputFiles
                 inputFiles = null;
             }
         }
diff --git a/src/Lucene.Net.Benchmark/ByTask/Feeds/DocMaker.cs b/src/Lucene.Net.Benchmark/ByTask/Feeds/DocMaker.cs
index 2dc49e2..1ac16f3 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Feeds/DocMaker.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Feeds/DocMaker.cs
@@ -123,8 +123,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds
                     return new Field(name, "", ft);
                 }
 
-                Field f;
-                if (!fields.TryGetValue(name, out f) || f == null)
+                if (!fields.TryGetValue(name, out Field f) || f == null)
                 {
                     f = new Field(name, "", ft);
                     fields[name] = f;
@@ -249,13 +248,12 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds
             if (dateString != null)
             {
                 // LUCENENET: TryParseExact needs a non-nullable DateTime to work.
-                DateTime temp;
                 if (DateTime.TryParseExact(dateString, new string[] {
                     // Original format from Java
                     "dd-MMM-yyyy HH:mm:ss",
                     // Actual format from the test files...
                     "yyyyMMddHHmmss"
-                    }, CultureInfo.InvariantCulture, DateTimeStyles.None, out temp))
+                    }, CultureInfo.InvariantCulture, DateTimeStyles.None, out DateTime temp))
                 {
                     date = temp;
                 }
@@ -294,7 +292,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds
             // Set TITLE_FIELD
             string title = docData.Title;
             Field titleField = ds.GetField(TITLE_FIELD, m_valType);
-            titleField.SetStringValue(title == null ? "" : title);
+            titleField.SetStringValue(title ?? "");
             doc.Add(titleField);
 
             string body = docData.Body;
@@ -385,7 +383,9 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds
         {
             if (disposing)
             {
-                m_source.Dispose();
+                m_source?.Dispose();
+                leftovr?.Dispose(); // LUCENENET specific
+                docState?.Dispose(); // LUCENENET specific
             }
         }
 
diff --git a/src/Lucene.Net.Benchmark/ByTask/Feeds/EnwikiContentSource.cs b/src/Lucene.Net.Benchmark/ByTask/Feeds/EnwikiContentSource.cs
index a41899d..07bb721 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Feeds/EnwikiContentSource.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Feeds/EnwikiContentSource.cs
@@ -329,8 +329,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds
         /// </summary>
         private static int GetElementType(string elem)
         {
-            int? val;
-            ELEMENTS.TryGetValue(elem, out val);
+            ELEMENTS.TryGetValue(elem, out int? val);
             return val == null ? -1 : val.Value;
         }
 
diff --git a/src/Lucene.Net.Benchmark/ByTask/Feeds/EnwikiQueryMaker.cs b/src/Lucene.Net.Benchmark/ByTask/Feeds/EnwikiQueryMaker.cs
index 69179db..8967321 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Feeds/EnwikiQueryMaker.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Feeds/EnwikiQueryMaker.cs
@@ -37,7 +37,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds
     public class EnwikiQueryMaker : AbstractQueryMaker, IQueryMaker
     {
         // common and a few uncommon queries from wikipedia search logs
-        private static string[] STANDARD_QUERIES = { "Images catbox gif",
+        private static readonly string[] STANDARD_QUERIES = { "Images catbox gif", // LUCENENET: marked readonly
             "Imunisasi haram", "Favicon ico", "Michael jackson", "Unknown artist",
             "Lily Thai", "Neda", "The Last Song", "Metallica", "Nicola Tesla",
             "Max B", "Skil Corporation", "\"The 100 Greatest Artists of All Time\"",
@@ -98,18 +98,16 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds
             {
                 try
                 {
-
                     object query = qs[i];
                     Query q = null;
-                    if (query is string)
+                    if (query is string queryString)
                     {
-                        q = qp.Parse((string)query);
+                        q = qp.Parse(queryString);
 
                     }
-                    else if (query is Query)
+                    else if (query is Query queryObj)
                     {
-                        q = (Query)query;
-
+                        q = queryObj;
                     }
                     else
                     {
diff --git a/src/Lucene.Net.Benchmark/ByTask/Feeds/LineDocSource.cs b/src/Lucene.Net.Benchmark/ByTask/Feeds/LineDocSource.cs
index 219beb6..e3d2f0f 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Feeds/LineDocSource.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Feeds/LineDocSource.cs
@@ -186,7 +186,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds
         /// Construct with the header 
         /// </summary>
         /// <param name="header">header line found in the input file, or <c>null</c> if none.</param>
-        public LineParser(string[] header)
+        protected LineParser(string[] header) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected)
         {
             this.m_header = header;
         }
diff --git a/src/Lucene.Net.Benchmark/ByTask/Feeds/LongToEnglishQueryMaker.cs b/src/Lucene.Net.Benchmark/ByTask/Feeds/LongToEnglishQueryMaker.cs
index 78ac924..42f4a5c 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Feeds/LongToEnglishQueryMaker.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Feeds/LongToEnglishQueryMaker.cs
@@ -33,7 +33,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds
     /// </summary>
     public class Int64ToEnglishQueryMaker : IQueryMaker
     {
-        long counter = long.MinValue + 10;
+        private long counter = long.MinValue + 10;
         protected QueryParser m_parser;
 
         //// TODO: we could take param to specify locale...
diff --git a/src/Lucene.Net.Benchmark/ByTask/Feeds/ReutersContentSource.cs b/src/Lucene.Net.Benchmark/ByTask/Feeds/ReutersContentSource.cs
index a98c054..e7f38f7 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Feeds/ReutersContentSource.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Feeds/ReutersContentSource.cs
@@ -39,7 +39,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds
         // LUCENENET specific: DateFormatInfo not used
 
         private DirectoryInfo dataDir = null;
-        private List<FileInfo> inputFiles = new List<FileInfo>();
+        private readonly List<FileInfo> inputFiles = new List<FileInfo>(); // LUCENENET: marked readonly
         private int nextFile = 0;
         private int iteration = 0;
 
@@ -48,7 +48,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds
             base.SetConfig(config);
             DirectoryInfo workDir = new DirectoryInfo(config.Get("work.dir", "work"));
             string d = config.Get("docs.dir", "reuters-out");
-            dataDir = new DirectoryInfo(d);
+            dataDir = new DirectoryInfo(Path.Combine(workDir.FullName, d));
             inputFiles.Clear();
             CollectFiles(dataDir, inputFiles);
             if (inputFiles.Count == 0)
@@ -61,8 +61,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds
 
         private DateTime? ParseDate(string dateStr)
         {
-            DateTime temp;
-            if (DateTime.TryParseExact(dateStr, "dd-MMM-yyyy hh:mm:ss.fff", CultureInfo.InvariantCulture, DateTimeStyles.None, out temp))
+            if (DateTime.TryParseExact(dateStr, "dd-MMM-yyyy hh:mm:ss.fff", CultureInfo.InvariantCulture, DateTimeStyles.None, out DateTime temp))
             {
                 return temp;
             }
@@ -99,33 +98,31 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds
                 name = f.GetCanonicalPath() + "_" + iteration;
             }
 
-            using (TextReader reader = new StreamReader(new FileStream(f.FullName, FileMode.Open, FileAccess.Read), Encoding.UTF8))
+            using TextReader reader = new StreamReader(new FileStream(f.FullName, FileMode.Open, FileAccess.Read), Encoding.UTF8);
+            // First line is the date, 3rd is the title, rest is body
+            string dateStr = reader.ReadLine();
+            reader.ReadLine();// skip an empty line
+            string title = reader.ReadLine();
+            reader.ReadLine();// skip an empty line
+            StringBuilder bodyBuf = new StringBuilder(1024);
+            string line = null;
+            while ((line = reader.ReadLine()) != null)
             {
-                // First line is the date, 3rd is the title, rest is body
-                string dateStr = reader.ReadLine();
-                reader.ReadLine();// skip an empty line
-                string title = reader.ReadLine();
-                reader.ReadLine();// skip an empty line
-                StringBuilder bodyBuf = new StringBuilder(1024);
-                string line = null;
-                while ((line = reader.ReadLine()) != null)
-                {
-                    bodyBuf.Append(line).Append(' ');
-                }
-                reader.Dispose();
+                bodyBuf.Append(line).Append(' ');
+            }
+            reader.Dispose();
 
 
-                AddBytes(f.Length);
+            AddBytes(f.Length);
 
-                DateTime? date = ParseDate(dateStr.Trim());
+            DateTime? date = ParseDate(dateStr.Trim());
 
-                docData.Clear();
-                docData.Name = name;
-                docData.Body = bodyBuf.ToString();
-                docData.Title = title;
-                docData.SetDate(date);
-                return docData;
-            }
+            docData.Clear();
+            docData.Name = name;
+            docData.Body = bodyBuf.ToString();
+            docData.Title = title;
+            docData.SetDate(date);
+            return docData;
         }
 
         public override void ResetInputs()
diff --git a/src/Lucene.Net.Benchmark/ByTask/Feeds/ReutersQueryMaker.cs b/src/Lucene.Net.Benchmark/ByTask/Feeds/ReutersQueryMaker.cs
index 63ba165..238979d 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Feeds/ReutersQueryMaker.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Feeds/ReutersQueryMaker.cs
@@ -35,7 +35,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds
     /// </summary>
     public class ReutersQueryMaker : AbstractQueryMaker, IQueryMaker
     {
-        private static string[] STANDARD_QUERIES = {
+        private static readonly string[] STANDARD_QUERIES = { // LUCENENET: marked readonly
             //Start with some short queries
             "Salomon", "Comex", "night trading", "Japan Sony",
             //Try some Phrase Queries
@@ -79,18 +79,16 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds
             {
                 try
                 {
-
                     object query = qs[i];
                     Query q = null;
-                    if (query is string)
+                    if (query is string queryString)
                     {
-                        q = qp.Parse((string)query);
+                        q = qp.Parse(queryString);
 
                     }
-                    else if (query is Query)
+                    else if (query is Query queryObj)
                     {
-                        q = (Query)query;
-
+                        q = queryObj;
                     }
                     else
                     {
diff --git a/src/Lucene.Net.Benchmark/ByTask/Feeds/SimpleQueryMaker.cs b/src/Lucene.Net.Benchmark/ByTask/Feeds/SimpleQueryMaker.cs
index 130a3ab..fa14e8b 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Feeds/SimpleQueryMaker.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Feeds/SimpleQueryMaker.cs
@@ -54,9 +54,11 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds
             qq.Add(q1);
             Query q2 = new TermQuery(new Term(DocMaker.BODY_FIELD, "simple"));
             qq.Add(q2);
-            BooleanQuery bq = new BooleanQuery();
-            bq.Add(q1, Occur.MUST);
-            bq.Add(q2, Occur.MUST);
+            BooleanQuery bq = new BooleanQuery
+            {
+                { q1, Occur.MUST },
+                { q2, Occur.MUST }
+            };
             qq.Add(bq);
             qq.Add(qp.Parse("synthetic body"));
             qq.Add(qp.Parse("\"synthetic body\""));
diff --git a/src/Lucene.Net.Benchmark/ByTask/Feeds/SingleDocSource.cs b/src/Lucene.Net.Benchmark/ByTask/Feeds/SingleDocSource.cs
index 6b01faf..6fed314 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Feeds/SingleDocSource.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Feeds/SingleDocSource.cs
@@ -24,7 +24,7 @@
     {
         private int docID = 0;
 
-        internal static readonly string DOC_TEXT =
+        internal const string DOC_TEXT =
             "Well, this is just some plain text we use for creating the " +
             "test documents. It used to be a text from an online collection " +
             "devoted to first aid, but if there was there an (online) lawyers " +
diff --git a/src/Lucene.Net.Benchmark/ByTask/Feeds/SortableSingleDocSource.cs b/src/Lucene.Net.Benchmark/ByTask/Feeds/SortableSingleDocSource.cs
index c83828c..1c91e81 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Feeds/SortableSingleDocSource.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Feeds/SortableSingleDocSource.cs
@@ -32,7 +32,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds
     /// </summary>
     public class SortableSingleDocSource : SingleDocSource
     {
-        private static string[] COUNTRIES = new string[] {
+        private static readonly string[] COUNTRIES = new string[] { // LUCENENET: marked readonly
             "European Union", "United States", "Japan", "Germany", "China (PRC)",
             "United Kingdom", "France", "Italy", "Spain", "Canada", "Brazil", "Russia",
             "India", "South Korea", "Australia", "Mexico", "Netherlands", "Turkey",
@@ -79,10 +79,11 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds
         public override DocData GetNextDocData(DocData docData)
         {
             docData = base.GetNextDocData(docData);
-            var props = new Dictionary<string, string>();
-
-            // random int
-            props["sort_field"] = r.Next(sortRange).ToString(CultureInfo.InvariantCulture);
+            var props = new Dictionary<string, string>
+            {
+                // random int
+                ["sort_field"] = r.Next(sortRange).ToString(CultureInfo.InvariantCulture)
+            };
 
             // random string
             int len = NextInt32(2, 20);
diff --git a/src/Lucene.Net.Benchmark/ByTask/Feeds/SpatialDocMaker.cs b/src/Lucene.Net.Benchmark/ByTask/Feeds/SpatialDocMaker.cs
index f44512a..8953a47 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Feeds/SpatialDocMaker.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Feeds/SpatialDocMaker.cs
@@ -43,7 +43,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds
         public static readonly string SPATIAL_FIELD = "spatial";
 
         //cache spatialStrategy by round number
-        private static IDictionary<int, SpatialStrategy> spatialStrategyCache = new Dictionary<int, SpatialStrategy>();
+        private static readonly IDictionary<int, SpatialStrategy> spatialStrategyCache = new Dictionary<int, SpatialStrategy>(); // LUCENENET: marked readonly
 
         private SpatialStrategy strategy;
         private IShapeConverter shapeConverter;
@@ -55,8 +55,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds
         /// </summary>
         public static SpatialStrategy GetSpatialStrategy(int roundNumber)
         {
-            SpatialStrategy result;
-            if (!spatialStrategyCache.TryGetValue(roundNumber, out result) || result == null)
+            if (!spatialStrategyCache.TryGetValue(roundNumber, out SpatialStrategy result) || result == null)
             {
                 throw new InvalidOperationException("Strategy should have been init'ed by SpatialDocMaker by now");
             }
@@ -121,8 +120,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds
         public override void SetConfig(Config config, ContentSource source)
         {
             base.SetConfig(config, source);
-            SpatialStrategy existing;
-            if (!spatialStrategyCache.TryGetValue(config.RoundNumber, out existing) || existing == null)
+            if (!spatialStrategyCache.TryGetValue(config.RoundNumber, out SpatialStrategy existing) || existing == null)
             {
                 //new round; we need to re-initialize
                 strategy = MakeSpatialStrategy(config);
@@ -164,9 +162,8 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds
 
             public IShape Convert(IShape shape)
             {
-                if (shape is IPoint && (radiusDegrees != 0.0 || plusMinus != 0.0))
+                if ((radiusDegrees != 0.0 || plusMinus != 0.0) && shape is IPoint point)
                 {
-                    IPoint point = (IPoint)shape;
                     double radius = radiusDegrees;
                     if (plusMinus > 0.0)
                     {
diff --git a/src/Lucene.Net.Benchmark/ByTask/Feeds/SpatialFileQueryMaker.cs b/src/Lucene.Net.Benchmark/ByTask/Feeds/SpatialFileQueryMaker.cs
index d583d22..9d12160 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Feeds/SpatialFileQueryMaker.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Feeds/SpatialFileQueryMaker.cs
@@ -117,9 +117,9 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds
                 // makeFilter() and wrap
 
                 Filter filter = m_strategy.MakeFilter(args);
-                if (filter is QueryWrapperFilter)
+                if (filter is QueryWrapperFilter queryWrapperFilter)
                 {
-                    return ((QueryWrapperFilter)filter).Query;
+                    return queryWrapperFilter.Query;
                 }
                 else
                 {
diff --git a/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecContentSource.cs b/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecContentSource.cs
index 2e360f5..7149191 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecContentSource.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecContentSource.cs
@@ -89,7 +89,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds
         private readonly List<FileInfo> inputFiles = new List<FileInfo>();
         private int nextFile = 0;
         // Use to synchronize threads on reading from the TREC documents.
-        private object @lock = new object();
+        private readonly object @lock = new object(); // LUCENENET: marked readonly
 
         // Required for test
         internal TextReader reader;
@@ -143,7 +143,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds
                     if (collectMatchLine)
                     {
                         buf.Append(sep).Append(line);
-                        sep = NEW_LINE;
+                        //sep = NEW_LINE; // LUCENENET: IDE0059: Remove unnecessary value assignment - this skips out of the loop
                     }
                     return;
                 }
@@ -158,7 +158,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds
 
         internal virtual void OpenNextFile()
         {
-            Dispose();
+            DoClose();
             //currPathType = null; 
             while (true)
             {
@@ -216,7 +216,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds
             return null;
         }
 
-        protected override void Dispose(bool disposing)
+        private void DoClose() // LUCENENET specific - separate disposing from closing so those tasks that "reopen" can continue
         {
             if (reader == null)
             {
@@ -225,7 +225,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds
 
             try
             {
-                reader.Dispose();
+                reader?.Dispose();
             }
             catch (IOException e)
             {
@@ -238,6 +238,21 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds
             reader = null;
         }
 
+        /// <summary>
+        /// Releases resources used by the <see cref="TrecContentSource"/> and
+        /// if overridden in a derived class, optionally releases unmanaged resources.
+        /// </summary>
+        /// <param name="disposing"><c>true</c> to release both managed and unmanaged resources;
+        /// <c>false</c> to release only unmanaged resources.</param>
+        protected override void Dispose(bool disposing)
+        {
+            if (disposing)
+            {
+                DoClose();
+                trecDocBuffer?.Dispose(); // LUCENENET specific
+            }
+        }
+
         public override DocData GetNextDocData(DocData docData)
         {
             string name = null;
@@ -293,7 +308,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds
             lock (@lock)
             {
                 base.ResetInputs();
-                Dispose();
+                DoClose();
                 nextFile = 0;
                 iteration = 0;
             }
@@ -305,7 +320,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds
             // dirs
             DirectoryInfo workDir = new DirectoryInfo(config.Get("work.dir", "work"));
             string d = config.Get("docs.dir", "trec");
-            dataDir = new DirectoryInfo(d);
+            dataDir = new DirectoryInfo(Path.Combine(workDir.FullName, d));
             // files
             CollectFiles(dataDir, inputFiles);
             if (inputFiles.Count == 0)
diff --git a/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecDocParser.cs b/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecDocParser.cs
index 7f42706..03e991a 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecDocParser.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecDocParser.cs
@@ -58,7 +58,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds
 
 
         /// <summary>max length of walk up from file to its ancestors when looking for a known path type.</summary>
-        private static readonly int MAX_PATH_LENGTH = 10;
+        private const int MAX_PATH_LENGTH = 10;
 
         /// <summary>
         /// Compute the path type of a file by inspecting name of file and its parents.
@@ -66,8 +66,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds
         public static ParsePathType PathType(FileInfo f)
         {
             int pathLength = 0;
-            ParsePathType? ppt;
-            if (pathName2Type.TryGetValue(f.Name.ToUpperInvariant(), out ppt) && ppt != null)
+            if (pathName2Type.TryGetValue(f.Name.ToUpperInvariant(), out ParsePathType? ppt) && ppt != null)
             {
                 return ppt.Value;
             }
diff --git a/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecFBISParser.cs b/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecFBISParser.cs
index 530ccda..3d7f3c6 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecFBISParser.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecFBISParser.cs
@@ -26,15 +26,15 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds
     /// </summary>
     public class TrecFBISParser : TrecDocParser
     {
-        private static readonly string HEADER = "<HEADER>";
-        private static readonly string HEADER_END = "</HEADER>";
+        private const string HEADER = "<HEADER>";
+        private const string HEADER_END = "</HEADER>";
         private static readonly int HEADER_END_LENGTH = HEADER_END.Length;
 
-        private static readonly string DATE1 = "<DATE1>";
-        private static readonly string DATE1_END = "</DATE1>";
+        private const string DATE1 = "<DATE1>";
+        private const string DATE1_END = "</DATE1>";
 
-        private static readonly string TI = "<TI>";
-        private static readonly string TI_END = "</TI>";
+        private const string TI = "<TI>";
+        private const string TI_END = "</TI>";
 
         public override DocData Parse(DocData docData, string name, TrecContentSource trecSrc,
             StringBuilder docBuf, ParsePathType pathType)
diff --git a/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecFR94Parser.cs b/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecFR94Parser.cs
index d4a022d..b02dc4e 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecFR94Parser.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecFR94Parser.cs
@@ -26,17 +26,17 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds
     /// </summary>
     public class TrecFR94Parser : TrecDocParser
     {
-        private static readonly string TEXT = "<TEXT>";
+        private const string TEXT = "<TEXT>";
         private static readonly int TEXT_LENGTH = TEXT.Length;
-        private static readonly string TEXT_END = "</TEXT>";
+        private const string TEXT_END = "</TEXT>";
 
-        private static readonly string DATE = "<DATE>";
+        private const string DATE = "<DATE>";
         private static readonly string[] DATE_NOISE_PREFIXES = {
             "DATE:",
             "date:", //TODO improve date extraction for this format
             "t.c.",
         };
-        private static readonly string DATE_END = "</DATE>";
+        private const string DATE_END = "</DATE>";
 
         //TODO can we also extract title for this format?
 
diff --git a/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecFTParser.cs b/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecFTParser.cs
index 189f6cb..49fc7c1 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecFTParser.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecFTParser.cs
@@ -25,11 +25,11 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds
     /// </summary>
     public class TrecFTParser : TrecDocParser
     {
-        private static readonly string DATE = "<DATE>";
-        private static readonly string DATE_END = "</DATE>";
+        private const string DATE = "<DATE>";
+        private const string DATE_END = "</DATE>";
 
-        private static readonly string HEADLINE = "<HEADLINE>";
-        private static readonly string HEADLINE_END = "</HEADLINE>";
+        private const string HEADLINE = "<HEADLINE>";
+        private const string HEADLINE_END = "</HEADLINE>";
 
         public override DocData Parse(DocData docData, string name, TrecContentSource trecSrc,
             StringBuilder docBuf, ParsePathType pathType)
diff --git a/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecGov2Parser.cs b/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecGov2Parser.cs
index 595b566..d8af6e8 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecGov2Parser.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecGov2Parser.cs
@@ -27,11 +27,11 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds
     /// </summary>
     public class TrecGov2Parser : TrecDocParser
     {
-        private static readonly string DATE = "Date: ";
+        private const string DATE = "Date: ";
         private static readonly string DATE_END = TrecContentSource.NEW_LINE;
 
-        private static readonly string DOCHDR = "<DOCHDR>";
-        private static readonly string TERMINATING_DOCHDR = "</DOCHDR>";
+        private const string DOCHDR = "<DOCHDR>";
+        private const string TERMINATING_DOCHDR = "</DOCHDR>";
 
         public override DocData Parse(DocData docData, string name, TrecContentSource trecSrc,
             StringBuilder docBuf, ParsePathType pathType)
diff --git a/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecLATimesParser.cs b/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecLATimesParser.cs
index c5611e2..6cd0054 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecLATimesParser.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecLATimesParser.cs
@@ -25,14 +25,14 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds
     /// </summary>
     public class TrecLATimesParser : TrecDocParser
     {
-        private static readonly string DATE = "<DATE>";
-        private static readonly string DATE_END = "</DATE>";
-        private static readonly string DATE_NOISE = "day,"; // anything aftre the ',' 
+        private const string DATE = "<DATE>";
+        private const string DATE_END = "</DATE>";
+        private const string DATE_NOISE = "day,"; // anything aftre the ',' 
 
-        private static readonly string SUBJECT = "<SUBJECT>";
-        private static readonly string SUBJECT_END = "</SUBJECT>";
-        private static readonly string HEADLINE = "<HEADLINE>";
-        private static readonly string HEADLINE_END = "</HEADLINE>";
+        private const string SUBJECT = "<SUBJECT>";
+        private const string SUBJECT_END = "</SUBJECT>";
+        private const string HEADLINE = "<HEADLINE>";
+        private const string HEADLINE_END = "</HEADLINE>";
 
         public override DocData Parse(DocData docData, string name, TrecContentSource trecSrc,
             StringBuilder docBuf, ParsePathType pathType)
diff --git a/src/Lucene.Net.Benchmark/ByTask/PerfRunData.cs b/src/Lucene.Net.Benchmark/ByTask/PerfRunData.cs
index 007f74f..2101448 100644
--- a/src/Lucene.Net.Benchmark/ByTask/PerfRunData.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/PerfRunData.cs
@@ -62,31 +62,35 @@ namespace Lucene.Net.Benchmarks.ByTask
     /// </remarks>
     public class PerfRunData : IDisposable
     {
-        private Points points;
+        private readonly Points points; // LUCENENET: marked readonly
 
         // objects used during performance test run
         // directory, analyzer, docMaker - created at startup.
-        // reader, writer, searcher - maintained by basic tasks. 
+        // reader, writer, searcher - maintained by basic tasks.
+#pragma warning disable CA2213 // Disposable fields should be disposed
         private Store.Directory directory;
-        private IDictionary<string, AnalyzerFactory> analyzerFactories = new Dictionary<string, AnalyzerFactory>();
+#pragma warning restore CA2213 // Disposable fields should be disposed
+        private readonly IDictionary<string, AnalyzerFactory> analyzerFactories = new Dictionary<string, AnalyzerFactory>(); // LUCENENET: marked readonly
         private Analyzer analyzer;
-        private DocMaker docMaker;
-        private ContentSource contentSource;
-        private FacetSource facetSource;
+        private readonly DocMaker docMaker; // LUCENENET: marked readonly
+        private readonly ContentSource contentSource; // LUCENENET: marked readonly
+        private readonly FacetSource facetSource; // LUCENENET: marked readonly
         private CultureInfo locale;
 
+#pragma warning disable CA2213 // Disposable fields should be disposed
         private Store.Directory taxonomyDir;
+#pragma warning restore CA2213 // Disposable fields should be disposed
         private ITaxonomyWriter taxonomyWriter;
         private TaxonomyReader taxonomyReader;
 
         // we use separate (identical) instances for each "read" task type, so each can iterate the quries separately.
-        private IDictionary<Type, IQueryMaker> readTaskQueryMaker;
-        private Type qmkrClass;
+        private readonly IDictionary<Type, IQueryMaker> readTaskQueryMaker;
+        private readonly Type qmkrClass;
 
         private DirectoryReader indexReader;
         private IndexSearcher indexSearcher;
         private IndexWriter indexWriter;
-        private Config config;
+        private readonly Config config;
         private long startTimeMillis;
 
         private readonly IDictionary<string, object> perfObjects = new Dictionary<string, object>();
@@ -146,9 +150,9 @@ namespace Lucene.Net.Benchmarks.ByTask
                 List<IDisposable> perfObjectsToClose = new List<IDisposable>();
                 foreach (object obj in perfObjects.Values)
                 {
-                    if (obj is IDisposable)
+                    if (obj is IDisposable disposable)
                     {
-                        perfObjectsToClose.Add((IDisposable)obj);
+                        perfObjectsToClose.Add(disposable);
                     }
                 }
                 IOUtils.Dispose(perfObjectsToClose);
@@ -207,8 +211,7 @@ namespace Lucene.Net.Benchmarks.ByTask
         {
             lock (this)
             {
-                object result;
-                perfObjects.TryGetValue(key, out result);
+                perfObjects.TryGetValue(key, out object result);
                 return result;
             }
         }
@@ -443,8 +446,7 @@ namespace Lucene.Net.Benchmarks.ByTask
                 // mapping the query maker by task class allows extending/adding new search/read tasks
                 // without needing to modify this class.
                 Type readTaskClass = readTask.GetType();
-                IQueryMaker qm;
-                if (!readTaskQueryMaker.TryGetValue(readTaskClass, out qm) || qm == null)
+                if (!readTaskQueryMaker.TryGetValue(readTaskClass, out IQueryMaker qm) || qm == null)
                 {
                     try
                     {
diff --git a/src/Lucene.Net.Benchmark/ByTask/Programmatic/Sample.cs b/src/Lucene.Net.Benchmark/ByTask/Programmatic/Sample.cs
index 11f2a30..7f0078f 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Programmatic/Sample.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Programmatic/Sample.cs
@@ -25,8 +25,9 @@ namespace Lucene.Net.Benchmarks.ByTask.Programmatic
     /// <summary>
     /// Sample performance test written programmatically - no algorithm file is needed here.
     /// </summary>
-    public class Sample
+    public static class Sample // LUCENENET specific: CA1052 Static holder types should be Static or NotInheritable
     {
+        [System.Diagnostics.CodeAnalysis.SuppressMessage("Style", "IDE0060:Remove unused parameter", Justification = "Sample shows optional args[] parameter")]
         public static void Main(string[] args)
         {
             var p = InitProps();
@@ -69,22 +70,23 @@ namespace Lucene.Net.Benchmarks.ByTask.Programmatic
         // Sample programmatic settings. Could also read from file.
         private static IDictionary<string, string> InitProps()
         {
-            var p = new Dictionary<string, string>();
-            p["task.max.depth.log"] = "3";
-            p["max.buffered"] = "buf:10:10:100:100:10:10:100:100";
-            //p["doc.maker"] = "Lucene.Net.Benchmarks.ByTask.Feeds.ReutersContentSource, Lucene.Net.Benchmark";
-            p["log.step"] = "2000";
-            p["doc.delete.step"] = "8";
-            p["analyzer"] = "Lucene.Net.Analysis.Standard.StandardAnalyzer, Lucene.Net.Analysis.Common";
-            p["doc.term.vector"] = "false";
-            p["directory"] = "FSDirectory";
-            p["query.maker"] = "Lucene.Net.Benchmarks.ByTask.Feeds.ReutersQueryMaker, Lucene.Net.Benchmark";
-            p["doc.stored"] = "true";
-            p["docs.dir"] = "reuters-out";
-            p["compound"] = "cmpnd:true:true:true:true:false:false:false:false";
-            p["doc.tokenized"] = "true";
-            p["merge.factor"] = "mrg:10:100:10:100:10:100:10:100";
-            return p;
+            return new Dictionary<string, string>
+            {
+                ["task.max.depth.log"] = "3",
+                ["max.buffered"] = "buf:10:10:100:100:10:10:100:100",
+                //p["doc.maker"] = "Lucene.Net.Benchmarks.ByTask.Feeds.ReutersContentSource, Lucene.Net.Benchmark";
+                ["log.step"] = "2000",
+                ["doc.delete.step"] = "8",
+                ["analyzer"] = "Lucene.Net.Analysis.Standard.StandardAnalyzer, Lucene.Net.Analysis.Common",
+                ["doc.term.vector"] = "false",
+                ["directory"] = "FSDirectory",
+                ["query.maker"] = "Lucene.Net.Benchmarks.ByTask.Feeds.ReutersQueryMaker, Lucene.Net.Benchmark",
+                ["doc.stored"] = "true",
+                ["docs.dir"] = "reuters-out",
+                ["compound"] = "cmpnd:true:true:true:true:false:false:false:false",
+                ["doc.tokenized"] = "true",
+                ["merge.factor"] = "mrg:10:100:10:100:10:100:10:100"
+            };
         }
     }
 }
diff --git a/src/Lucene.Net.Benchmark/ByTask/Stats/Points.cs b/src/Lucene.Net.Benchmark/ByTask/Stats/Points.cs
index f08408c..b12985f 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Stats/Points.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Stats/Points.cs
@@ -29,7 +29,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Stats
         // stat points ordered by their start time. 
         // for now we collect points as TaskStats objects.
         // later might optimize to collect only native data.
-        private List<TaskStats> points = new List<TaskStats>();
+        private readonly List<TaskStats> points = new List<TaskStats>(); // LUCENENET: marked readonly
 
         private int nextTaskRunNum = 0;
 
@@ -38,7 +38,9 @@ namespace Lucene.Net.Benchmarks.ByTask.Stats
         /// <summary>
         /// Create a Points statistics object.
         /// </summary>
+#pragma warning disable IDE0060 // Remove unused parameter
         public Points(Config config)
+#pragma warning restore IDE0060 // Remove unused parameter
         {
         }
 
diff --git a/src/Lucene.Net.Benchmark/ByTask/Stats/Report.cs b/src/Lucene.Net.Benchmark/ByTask/Stats/Report.cs
index 49fc096..35760bb 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Stats/Report.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Stats/Report.cs
@@ -22,10 +22,10 @@
     /// </summary>
     public class Report
     {
-        private string text;
-        private int size;
-        private int outOf;
-        private int reported;
+        private readonly string text; // LUCENENET: marked readonly
+        private readonly int size; // LUCENENET: marked readonly
+        private readonly int outOf; // LUCENENET: marked readonly
+        private readonly int reported; // LUCENENET: marked readonly
 
         public Report(string text, int size, int reported, int outOf)
         {
diff --git a/src/Lucene.Net.Benchmark/ByTask/Stats/TaskStats.cs b/src/Lucene.Net.Benchmark/ByTask/Stats/TaskStats.cs
index 7c6d1a3..0010f97 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Stats/TaskStats.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Stats/TaskStats.cs
@@ -35,13 +35,13 @@ namespace Lucene.Net.Benchmarks.ByTask.Stats
 #endif
     {
         /// <summary>Task for which data was collected.</summary>
-        private PerfTask task;
+        private readonly PerfTask task; // LUCENENET: marked readonly
 
         /// <summary>Round in which task run started.</summary>
         private int round;
 
-        /// <summary>Task start time.</summary>
-        private long start;
+        ///// <summary>Task start time.</summary>
+        //private long start; // LUCENENET: Never read
 
         /// <summary>Task elapsed time.  elapsed >= 0 indicates run completion!</summary>
         private long elapsed = -1;
@@ -53,7 +53,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Stats
         private long maxUsedMem;
 
         /// <summary>Serial run number of this task run in the perf run.</summary>
-        private int taskRunNum;
+        private readonly int taskRunNum; // LUCENENET: marked readonly
 
         /// <summary>Number of other tasks that started to run while this task was still running.</summary>
         private int numParallelTasks;
@@ -83,7 +83,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Stats
             this.round = round;
             maxTotMem = GC.GetTotalMemory(false); //Runtime.getRuntime().totalMemory();
             maxUsedMem = maxTotMem; // - Runtime.getRuntime().freeMemory(); // LUCENENET TODO: available RAM
-            start = Stopwatch.GetTimestamp();
+            //start = Stopwatch.GetTimestamp(); // LUCENENET: Never read
         }
 
         /// <summary>
diff --git a/src/Lucene.Net.Benchmark/ByTask/Tasks/AddIndexesTask.cs b/src/Lucene.Net.Benchmark/ByTask/Tasks/AddIndexesTask.cs
index f4e2556..7236100 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Tasks/AddIndexesTask.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Tasks/AddIndexesTask.cs
@@ -94,8 +94,33 @@ namespace Lucene.Net.Benchmarks.ByTask.Tasks
 
         public override void TearDown()
         {
-            inputDir.Dispose();
+            inputDir?.Dispose();
+            inputDir = null; // LUCENENET specific
             base.TearDown();
         }
+
+        /// <summary>
+        /// Releases resources used by the <see cref="AddIndexesTask"/> and
+        /// if overridden in a derived class, optionally releases unmanaged resources.
+        /// </summary>
+        /// <param name="disposing"><c>true</c> to release both managed and unmanaged resources;
+        /// <c>false</c> to release only unmanaged resources.</param>
+
+        // LUCENENET specific
+        protected override void Dispose(bool disposing)
+        {
+            try
+            {
+                if (disposing)
+                {
+                    inputDir?.Dispose(); // LUCENENET specific - dispose tokens and set to null
+                    inputDir = null;
+                }
+            }
+            finally
+            {
+                base.Dispose(disposing);
+            }
+        }
     }
 }
diff --git a/src/Lucene.Net.Benchmark/ByTask/Tasks/AnalyzerFactoryTask.cs b/src/Lucene.Net.Benchmark/ByTask/Tasks/AnalyzerFactoryTask.cs
index cd730b6..50c0236 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Tasks/AnalyzerFactoryTask.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Tasks/AnalyzerFactoryTask.cs
@@ -72,16 +72,16 @@ namespace Lucene.Net.Benchmarks.ByTask.Tasks
     /// </remarks>
     public class AnalyzerFactoryTask : PerfTask
     {
-        private static readonly string LUCENE_ANALYSIS_PACKAGE_PREFIX = "Lucene.Net.Analysis.";
+        private const string LUCENE_ANALYSIS_PACKAGE_PREFIX = "Lucene.Net.Analysis.";
         private static readonly Regex ANALYSIS_COMPONENT_SUFFIX_PATTERN
             = new Regex("(?s:(?:(?:Token|Char)?Filter|Tokenizer)(?:Factory)?)$", RegexOptions.Compiled);
         private static readonly Regex TRAILING_DOT_ZERO_PATTERN = new Regex(@"\.0$", RegexOptions.Compiled);
 
         private enum ArgType { ANALYZER_ARG, ANALYZER_ARG_OR_CHARFILTER_OR_TOKENIZER, TOKENFILTER }
 
-        string factoryName = null;
-        int? positionIncrementGap = null;
-        int? offsetGap = null;
+        private string factoryName = null;
+        private int? positionIncrementGap = null;
+        private int? offsetGap = null;
         private readonly IList<CharFilterFactory> charFilterFactories = new List<CharFilterFactory>();
         private TokenizerFactory tokenizerFactory = null;
         private readonly IList<TokenFilterFactory> tokenFilterFactories = new List<TokenFilterFactory>();
@@ -481,10 +481,10 @@ namespace Lucene.Net.Benchmarks.ByTask.Tasks
                 {
                     throw new Exception("Line #" + GetLineNumber(stok) + ": ", e);
                 }
-                if (instance is IResourceLoaderAware)
+                if (instance is IResourceLoaderAware resourceLoaderAware)
                 {
                     DirectoryInfo baseDir = new DirectoryInfo(RunData.Config.Get("work.dir", "work"));
-                    ((IResourceLoaderAware)instance).Inform(new FilesystemResourceLoader(baseDir));
+                    resourceLoaderAware.Inform(new FilesystemResourceLoader(baseDir));
                 }
                 if (typeof(CharFilterFactory).IsAssignableFrom(clazz))
                 {
diff --git a/src/Lucene.Net.Benchmark/ByTask/Tasks/CloseIndexTask.cs b/src/Lucene.Net.Benchmark/ByTask/Tasks/CloseIndexTask.cs
index 2047494..a85683d 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Tasks/CloseIndexTask.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Tasks/CloseIndexTask.cs
@@ -34,7 +34,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Tasks
         {
         }
 
-        bool doWait = true;
+        private bool doWait = true;
 
         public override int DoLogic()
         {
diff --git a/src/Lucene.Net.Benchmark/ByTask/Tasks/CommitIndexTask.cs b/src/Lucene.Net.Benchmark/ByTask/Tasks/CommitIndexTask.cs
index 63859cd..9c7d080 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Tasks/CommitIndexTask.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Tasks/CommitIndexTask.cs
@@ -25,7 +25,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Tasks
     /// </summary>
     public class CommitIndexTask : PerfTask
     {
-        IDictionary<string, string> commitUserData;
+        private IDictionary<string, string> commitUserData;
 
         public CommitIndexTask(PerfRunData runData)
             : base(runData)
@@ -37,8 +37,10 @@ namespace Lucene.Net.Benchmarks.ByTask.Tasks
         public override void SetParams(string @params)
         {
             base.SetParams(@params);
-            commitUserData = new Dictionary<string, string>();
-            commitUserData[OpenReaderTask.USER_DATA] = @params;
+            commitUserData = new Dictionary<string, string>
+            {
+                [OpenReaderTask.USER_DATA] = @params
+            };
         }
 
         public override int DoLogic()
diff --git a/src/Lucene.Net.Benchmark/ByTask/Tasks/ConsumeContentSourceTask.cs b/src/Lucene.Net.Benchmark/ByTask/Tasks/ConsumeContentSourceTask.cs
index d35687b..50c39c8 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Tasks/ConsumeContentSourceTask.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Tasks/ConsumeContentSourceTask.cs
@@ -44,5 +44,28 @@ namespace Lucene.Net.Benchmarks.ByTask.Tasks
             dd.Value = source.GetNextDocData(dd.Value);
             return 1;
         }
+
+        /// <summary>
+        /// Releases resources used by the <see cref="ConsumeContentSourceTask"/> and
+        /// if overridden in a derived class, optionally releases unmanaged resources.
+        /// </summary>
+        /// <param name="disposing"><c>true</c> to release both managed and unmanaged resources;
+        /// <c>false</c> to release only unmanaged resources.</param>
+
+        // LUCENENET specific
+        protected override void Dispose(bool disposing)
+        {
+            try
+            {
+                if (disposing)
+                {
+                    dd.Dispose(); // LUCENENET specific - dispose dd
+                }
+            }
+            finally
+            {
+                base.Dispose(disposing);
+            }
+        }
     }
 }
diff --git a/src/Lucene.Net.Benchmark/ByTask/Tasks/CreateIndexTask.cs b/src/Lucene.Net.Benchmark/ByTask/Tasks/CreateIndexTask.cs
index 470fbb3..53a68a5 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Tasks/CreateIndexTask.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Tasks/CreateIndexTask.cs
@@ -191,9 +191,8 @@ namespace Lucene.Net.Benchmarks.ByTask.Tasks
                     throw new Exception("unable to instantiate class '" + mergePolicy + "' as merge policy", e);
                 }
                 iwConf.MergePolicy.NoCFSRatio = isCompound ? 1.0 : 0.0;
-                if (iwConf.MergePolicy is LogMergePolicy)
+                if (iwConf.MergePolicy is LogMergePolicy logMergePolicy)
                 {
-                    LogMergePolicy logMergePolicy = (LogMergePolicy)iwConf.MergePolicy;
                     logMergePolicy.MergeFactor = config.Get("merge.factor", OpenIndexTask.DEFAULT_MERGE_PFACTOR);
                 }
             }
diff --git a/src/Lucene.Net.Benchmark/ByTask/Tasks/ForceMergeTask.cs b/src/Lucene.Net.Benchmark/ByTask/Tasks/ForceMergeTask.cs
index 8226e11..61c49d1 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Tasks/ForceMergeTask.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Tasks/ForceMergeTask.cs
@@ -33,7 +33,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Tasks
         {
         }
 
-        int maxNumSegments = -1;
+        private int maxNumSegments = -1;
 
         public override int DoLogic()
         {
diff --git a/src/Lucene.Net.Benchmark/ByTask/Tasks/NewAnalyzerTask.cs b/src/Lucene.Net.Benchmark/ByTask/Tasks/NewAnalyzerTask.cs
index 28e95b3..d2a5dc8 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Tasks/NewAnalyzerTask.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Tasks/NewAnalyzerTask.cs
@@ -73,8 +73,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Tasks
                     analyzerName = typeof(Lucene.Net.Analysis.Standard.StandardAnalyzer).AssemblyQualifiedName;
                 }
                 // First, lookup analyzerName as a named analyzer factory
-                AnalyzerFactory factory;
-                if (RunData.AnalyzerFactories.TryGetValue(analyzerName, out factory) && null != factory)
+                if (RunData.AnalyzerFactories.TryGetValue(analyzerName, out AnalyzerFactory factory) && null != factory)
                 {
                     analyzer = factory.Create();
                 }
diff --git a/src/Lucene.Net.Benchmark/ByTask/Tasks/NewLocaleTask.cs b/src/Lucene.Net.Benchmark/ByTask/Tasks/NewLocaleTask.cs
index 34f4830..1762b37 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Tasks/NewLocaleTask.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Tasks/NewLocaleTask.cs
@@ -79,7 +79,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Tasks
             base.SetParams(@params);
             //language = country = variant = "";
             culture = "";
-            string ignore;
+            string _;
             StringTokenizer st = new StringTokenizer(@params, ",");
             if (st.MoveNext())
                 //language = st.nextToken();
@@ -87,7 +87,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Tasks
             if (st.MoveNext())
                 culture += "-" + st.Current;
             if (st.MoveNext())
-                ignore = st.Current;
+                _ = st.Current;
         }
 
         public override bool SupportsParams => true;
diff --git a/src/Lucene.Net.Benchmark/ByTask/Tasks/OpenReaderTask.cs b/src/Lucene.Net.Benchmark/ByTask/Tasks/OpenReaderTask.cs
index 28350cd..a7967e7 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Tasks/OpenReaderTask.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Tasks/OpenReaderTask.cs
@@ -43,7 +43,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Tasks
         public override int DoLogic()
         {
             Store.Directory dir = RunData.Directory;
-            DirectoryReader r = null;
+            DirectoryReader r; // LUCENENET: IDE0059: Remove unnecessary value assignment
             if (commitUserData != null)
             {
                 r = DirectoryReader.Open(OpenReaderTask.FindIndexCommit(dir, commitUserData));
diff --git a/src/Lucene.Net.Benchmark/ByTask/Tasks/PerfTask.cs b/src/Lucene.Net.Benchmark/ByTask/Tasks/PerfTask.cs
index feb4fce..1621480 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Tasks/PerfTask.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Tasks/PerfTask.cs
@@ -63,16 +63,16 @@ namespace Lucene.Net.Benchmarks.ByTask.Tasks
         , System.ICloneable
 #endif
     {
-        internal static readonly int DEFAULT_LOG_STEP = 1000;
+        internal const int DEFAULT_LOG_STEP = 1000;
 
-        private PerfRunData runData;
+        private readonly PerfRunData runData;
 
         // propeties that all tasks have
         private string name;
         private int depth = 0;
         protected int m_logStep;
         private int logStepCount = 0;
-        private int maxDepthLogStart = 0;
+        private readonly int maxDepthLogStart = 0; // LUCENENET: marked readonly
         private bool disableCounting = false;
         protected string m_params = null;
 
@@ -121,7 +121,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Tasks
             stopNow = true;
         }
 
-        public PerfTask(PerfRunData runData)
+        protected PerfTask(PerfRunData runData)
             : this()
         {
             this.runData = runData;
diff --git a/src/Lucene.Net.Benchmark/ByTask/Tasks/PrintReaderTask.cs b/src/Lucene.Net.Benchmark/ByTask/Tasks/PrintReaderTask.cs
index bd4f5f8..0e2f37f 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Tasks/PrintReaderTask.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Tasks/PrintReaderTask.cs
@@ -44,7 +44,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Tasks
         public override int DoLogic()
         {
             Directory dir = RunData.Directory;
-            IndexReader r = null;
+            IndexReader r; // LUCENENET: IDE0059: Remove unnecessary value assignment
             if (userData == null)
                 r = DirectoryReader.Open(dir);
             else
diff --git a/src/Lucene.Net.Benchmark/ByTask/Tasks/ReadTask.cs b/src/Lucene.Net.Benchmark/ByTask/Tasks/ReadTask.cs
index 43ea456..02ea9af 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Tasks/ReadTask.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Tasks/ReadTask.cs
@@ -48,7 +48,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Tasks
     {
         private readonly IQueryMaker queryMaker;
 
-        public ReadTask(PerfRunData runData)
+        protected ReadTask(PerfRunData runData) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected)
             : base(runData)
         {
             if (WithSearch)
@@ -89,7 +89,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Tasks
             // optionally warm and add num docs traversed to count
             if (WithWarm)
             {
-                Document doc = null;
+                Document doc; // LUCENENET: IDE0059: Remove unnecessary value assignment
                 IBits liveDocs = MultiFields.GetLiveDocs(reader);
                 for (int m = 0; m < reader.MaxDoc; m++)
                 {
@@ -260,7 +260,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Tasks
         /// </remarks>
         public virtual int TraversalSize => int.MaxValue;
 
-        internal static readonly int DEFAULT_SEARCH_NUM_HITS = 10;
+        internal const int DEFAULT_SEARCH_NUM_HITS = 10;
         private int numHits;
 
         public override void Setup()
diff --git a/src/Lucene.Net.Benchmark/ByTask/Tasks/ReadTokensTask.cs b/src/Lucene.Net.Benchmark/ByTask/Tasks/ReadTokensTask.cs
index d2b6058..03665dd 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Tasks/ReadTokensTask.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Tasks/ReadTokensTask.cs
@@ -76,19 +76,17 @@ namespace Lucene.Net.Benchmarks.ByTask.Tasks
                     continue;
                 }
 
-                using (TokenStream stream = field.GetTokenStream(analyzer))
-                {
-                    // reset the TokenStream to the first token
-                    stream.Reset();
+                using TokenStream stream = field.GetTokenStream(analyzer);
+                // reset the TokenStream to the first token
+                stream.Reset();
 
-                    ITermToBytesRefAttribute termAtt = stream.GetAttribute<ITermToBytesRefAttribute>();
-                    while (stream.IncrementToken())
-                    {
-                        termAtt.FillBytesRef();
-                        tokenCount++;
-                    }
-                    stream.End();
+                ITermToBytesRefAttribute termAtt = stream.GetAttribute<ITermToBytesRefAttribute>();
+                while (stream.IncrementToken())
+                {
+                    termAtt.FillBytesRef();
+                    tokenCount++;
                 }
+                stream.End();
             }
             totalTokenCount += tokenCount;
             return tokenCount;
@@ -103,9 +101,9 @@ namespace Lucene.Net.Benchmarks.ByTask.Tasks
 
         internal sealed class ReusableStringReader : TextReader
         {
-            int upto;
-            int left;
-            string s;
+            private int upto;
+            private int left;
+            private string s;
             internal void Init(string s)
             {
                 this.s = s;
@@ -156,5 +154,29 @@ namespace Lucene.Net.Benchmarks.ByTask.Tasks
 
             protected override void Dispose(bool disposing) { }
         }
+
+        /// <summary>
+        /// Releases resources used by the <see cref="ReadTokensTask"/> and
+        /// if overridden in a derived class, optionally releases unmanaged resources.
+        /// </summary>
+        /// <param name="disposing"><c>true</c> to release both managed and unmanaged resources;
+        /// <c>false</c> to release only unmanaged resources.</param>
... 50885 lines suppressed ...