You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucenenet.apache.org by ni...@apache.org on 2020/01/23 17:19:32 UTC

[lucenenet] 02/02: Upgraded (most) collections to utilize more suitable replacements from J2N (LUCENENET-616)

This is an automated email from the ASF dual-hosted git repository.

nightowl888 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/lucenenet.git

commit 0eaf76540b8de326d1aa9ca24f4b5d6425a9ae38
Author: Shad Storhaug <sh...@shadstorhaug.com>
AuthorDate: Thu Jan 23 21:00:56 2020 +0700

    Upgraded (most) collections to utilize more suitable replacements from J2N (LUCENENET-616)
    
    Squashed commit of the following:
    
    commit 0090923917ce9a3e1a0a557030de511c01bd15e0
    Author: Shad Storhaug <sh...@shadstorhaug.com>
    Date:   Thu Jan 23 20:24:26 2020 +0700
    
        Lucene.Net.Tests.Support.TestApiConsistency: Updated ignore list
    
    commit d8400740e9d3914a1044d2a4db775d7f80d619e7
    Author: Shad Storhaug <sh...@shadstorhaug.com>
    Date:   Thu Jan 23 13:16:04 2020 +0700
    
        Fixed some broken XML documentation links
    
    commit 1c5fc49856bcaaca0b60d34177564ac5e070aa6f
    Author: Shad Storhaug <sh...@shadstorhaug.com>
    Date:   Thu Jan 23 12:43:16 2020 +0700
    
        Upgraded J2N to 2.0.0-beta-0001, ICU4N to 60.1.0-alpha.197, and Morfologik.Stemming to 2.1.6-beta-0002
    
    commit 15ce25ecf8a4d1f3605233ea06a6d3c16a9cbd77
    Author: Shad Storhaug <sh...@shadstorhaug.com>
    Date:   Thu Jan 23 12:43:32 2020 +0700
    
        Deleted NuGet.config
    
    commit a47b2b5daae0c4de10ce9c118556801986ac309f
    Author: Shad Storhaug <sh...@shadstorhaug.com>
    Date:   Thu Jan 23 09:12:43 2020 +0700
    
        Lucene.Net.Index.IndexReader: Replaced ConcurrentSet with ConcurrentHashSet on parentReaders
    
    commit b720cf9ef567167243606b7f421bf174d7694d92
    Author: Shad Storhaug <sh...@shadstorhaug.com>
    Date:   Thu Jan 23 08:39:41 2020 +0700
    
        Lucene.Net.Search.ReferenceManager: Replaced ConcurrentSet with ConcurrentHashSet on refreshListeners
    
    commit 16b9e6e007268b181aa17d4a9585456091f6c31f
    Author: Shad Storhaug <sh...@shadstorhaug.com>
    Date:   Thu Jan 23 08:27:12 2020 +0700
    
        Lucene.Net.Util.VirtualMethod: Replaced ConcurrentSet with ConcurrentHashSet on singletonSet
    
    commit d4492ea98c6021f976759f2414a55be92045f071
    Author: Shad Storhaug <sh...@shadstorhaug.com>
    Date:   Thu Jan 23 08:24:01 2020 +0700
    
        Lucene.Net.TestFramework.Index.ThreadedIndexingAndSearchingTestCase: Changed ConcurrrentSet to ConcurrentHashSet. Also fixed a bug with the ConcurrentQueue not actually being populated because it was not being passed through to the thread that was populating it.
    
    commit 8a9c6a6973cdcadc168226446707b08e60f2fbf1
    Author: Shad Storhaug <sh...@shadstorhaug.com>
    Date:   Thu Jan 23 08:04:14 2020 +0700
    
        Lucene.Net.TestFramework.Store.MockDirectoryWrappper: Changed openLocks to use ConcurrentHashSet instead of ConcurrentSet
    
    commit 88f5052e2a6fb17dc864052c5791b2f5956be80f
    Author: Shad Storhaug <sh...@shadstorhaug.com>
    Date:   Thu Jan 23 07:47:45 2020 +0700
    
        Lucene.Net.Support.ConcurrentHashSet: Implemented ISet<T> (partially)
    
    commit 5d351d7192e608084ed59b685927e63c14da121c
    Author: Shad Storhaug <sh...@shadstorhaug.com>
    Date:   Thu Jan 23 07:30:06 2020 +0700
    
        Lucene.Net.Support: Added ConcurrentHashSet
    
    commit 6197416735bca9552bbb956adf0d3b8fcb434455
    Author: Shad Storhaug <sh...@shadstorhaug.com>
    Date:   Thu Jan 23 06:36:18 2020 +0700
    
        Lucene.Net.Support.Collections: Removed unused (and only partially implemented) NewSetFromMap method and related SetFromMap class
    
    commit 5e65d5e262403d9a0a77da692770471d7f61c558
    Author: Shad Storhaug <sh...@shadstorhaug.com>
    Date:   Thu Jan 23 06:34:17 2020 +0700
    
        Lucene.Net.Support.Collections: Factored out Singleton() method and used collection initializers instead
    
    commit aef40f1e50145ec5973cfbe0474dbbf99f486089
    Author: Shad Storhaug <sh...@shadstorhaug.com>
    Date:   Thu Jan 23 06:08:56 2020 +0700
    
        Lucene.Net.Support.Collections: Factored out ImplementsGenericInterface() in favor of J2N's implementation
    
    commit 6c13b7a87fcfd5091636ee8e6e2f809ba513551f
    Author: Shad Storhaug <sh...@shadstorhaug.com>
    Date:   Thu Jan 23 05:54:54 2020 +0700
    
        Upgraded Morfologik.Stemming to 2.1.6-beta-0004-gf258be02f5
    
    commit 59d56680fe1f6655bbf85cd395c49796a7a8b866
    Author: Shad Storhaug <sh...@shadstorhaug.com>
    Date:   Thu Jan 23 05:54:13 2020 +0700
    
        NuGet.config: Added Morfologik.Stemming preview feed
    
    commit 48b0431e7bd231215eca360af8f6af6a0dabd782
    Author: Shad Storhaug <sh...@shadstorhaug.com>
    Date:   Thu Jan 23 04:08:58 2020 +0700
    
        SWEEP: Upgraded to account for breaking changes (AsCharSequence() and BitOperation) in J2N.
    
    commit 32d89bbd31432572463b5eb4b509711825e3f45b
    Author: Shad Storhaug <sh...@shadstorhaug.com>
    Date:   Thu Jan 23 04:01:07 2020 +0700
    
        Upgraded J2N to 1.0.0-ci-0032-g1028fef3c2 and ICU4N to 60.1.0-alpha.203+513cf990e6
    
    commit 1c28e683f787d21191bc08e37f3535d4ebeb7489
    Author: Shad Storhaug <sh...@shadstorhaug.com>
    Date:   Thu Jan 23 03:43:21 2020 +0700
    
        Lucene.Net.Support: Renamed ExceptionToNullableEnumConvention > ExceptionToNullableEnumConventionAttribute
    
    commit 8ecaf7642a07fa7f9bce10e00830e2a93da533e4
    Author: Shad Storhaug <sh...@shadstorhaug.com>
    Date:   Wed Jan 22 13:30:53 2020 +0700
    
        BREAKING: Changed semantics of CharTermAttribute's Append() overloads to act more like the .NET StringBuilder class.
    
        1) The 3rd parameter was changed from an exclusive end index to a count.
        2) A null parameter on a single parameter overload will be a no-op instead of appending the text "null".
        3. A null parameter on a 3 parameter overload will be a no-op if both startIndex and count are 0, otherwise it will throw an ArgumentNullException.
    
    commit 250ccb3fae32c4fee4891db37d13bf710cd1b89b
    Author: Shad Storhaug <sh...@shadstorhaug.com>
    Date:   Wed Jan 22 10:01:40 2020 +0700
    
        SWEEP: Fixed using statements and whitespace in ~100 files
    
    commit b733c4d158e5e1de62c6b12797c589ac70b4c1c7
    Author: Shad Storhaug <sh...@shadstorhaug.com>
    Date:   Wed Jan 22 00:09:19 2020 +0700
    
        BREAKING: Factored out Character, ICharSequence, StringBuilderCharSequenceWrapper, StringBuilderExtensions, StringCharSequenceWrapper, and most StringExtensions methods in favor of J2N's implementation
    
    commit 236ebeaeb2b187c15b83fbf9707df883927be799
    Author: Shad Storhaug <sh...@shadstorhaug.com>
    Date:   Tue Jan 21 17:39:38 2020 +0700
    
        Lucene.Net.Search.FieldComparer: Replaced CompareTo() calls with JCG.Comparer<T>.Default.Compare(), factoring out Lucene.Net.Support.SignedZeroComparer in the process.
    
    commit 0dd5d40c2584b5a124d27e892a994ead20495a58
    Author: Shad Storhaug <sh...@shadstorhaug.com>
    Date:   Tue Jan 21 04:41:51 2020 +0700
    
        Lucene.Net.Support: Factored out GeneralKeyedCollection and AttributeItem in favor of J2N.Collections.Generic.LinkedDictionary
    
    commit 9d65cc71f7fef8b4712b0181d92dd3ecea0f7ebf
    Author: Shad Storhaug <sh...@shadstorhaug.com>
    Date:   Tue Jan 21 04:01:57 2020 +0700
    
        Lucene.Net.Support: Refactored ConcurrentHashSet into ConcurrentSet - a wrapper class that can be used to synchronize any set object (ordered or not), similar to how it was done in Java. Changed ordered concurrent set types back to the original type from Lucene.
    
    commit 20ce72dd26d6d60a5c507c44242812a7dcf4b1b4
    Author: Shad Storhaug <sh...@shadstorhaug.com>
    Date:   Mon Jan 20 15:15:21 2020 +0700
    
        Lucene.Net.TestFramework.Search.AssertingScorer: Changed to use ConditionalWeakTable/WeakDictionary
    
    commit 7a68b17a4d154b0d42130c0c12ba42d2b7fbd53b
    Author: Shad Storhaug <sh...@shadstorhaug.com>
    Date:   Mon Jan 20 05:40:17 2020 +0700
    
        Lucene.Net.Support.PriorityQueue: Factored out in favor of J2N's implementation
    
    commit 78f96bee8a3c2bf207f4445df85ed3111af9b797
    Author: Shad Storhaug <sh...@shadstorhaug.com>
    Date:   Mon Jan 20 05:15:21 2020 +0700
    
        Lucene.Net.Support: Factored out TreeSet and TreeDictionary and deleted all related files
    
    commit 9c39edfb1b60538e1797d49d6e8f24d52e77409e
    Author: Shad Storhaug <sh...@shadstorhaug.com>
    Date:   Mon Jan 20 04:28:15 2020 +0700
    
        Lucene.Net.Grouping.AbstractGroupingFacetCollector + Lucene.Net.Highlighter.PostingsHighlight.PostingsHighlighter + Lucene.Net.TestFramework.Analysis.MockCharFilter: Factored out TreeSet and TreeDictionary from Lucene.Net.Support in favor of J2N.Collections.Generic.SortedSet and J2N.Collections.Generic.SortedDictionary
    
    commit 902c5a0953e4a10eb7f4e96138b09372387f9575
    Author: Shad Storhaug <sh...@shadstorhaug.com>
    Date:   Mon Jan 20 03:26:44 2020 +0700
    
        Upgraded to J2N 1.0.0-ci-0028-gdf6487f0d2 and ICU4N 60.1.0-alpha.200.g70c0d4d663
    
    commit 107960688cee080b4efaa3eb4d1b96f664f62582
    Author: Shad Storhaug <sh...@shadstorhaug.com>
    Date:   Sat Jan 18 03:52:28 2020 +0700
    
        Lucene.Net.TestFramework.Support.JavaCompatibility.LuceneTestCase: Replaced SetEquals(expected, actual) call with J2N.Collections.Generic.SetEqualityComparer<T>.Aggressive.Equals(expected, actual)
    
    commit 3b2ab0bb7248d35cbd8aac65033ef4368a1b8887
    Author: Shad Storhaug <sh...@shadstorhaug.com>
    Date:   Sat Jan 18 01:36:26 2020 +0700
    
        Lucene.Net.Support: Factored out IdentityComparer, IdentityHashMap, and IdentityHashSet and used J2N.Runtime.CompilerServices.IdentityEqualityComparer in conjunction with standard Dictionary and HashSet types
    
    commit 5709d717ecbf58a557892c7e6fe164766eaff42b
    Author: Shad Storhaug <sh...@shadstorhaug.com>
    Date:   Fri Jan 17 23:07:03 2020 +0700
    
        Updated J2N to 60.1.0-alpha.198.g994ea3e2aa and ICU4N to 60.1.0-alpha.199.ga45db812af
    
    commit d9e7c033ec0748dbb716c9f6f065061c5d1370e8
    Author: Shad Storhaug <sh...@shadstorhaug.com>
    Date:   Fri Jan 17 06:29:33 2020 +0700
    
        Lucene.Net.Support.Collections: Factored out Swap and Shuffle in favor of J2N's implementation
    
    commit a8d8f477d97d35eba99e40c5d64502ee8db1ebe4
    Author: Shad Storhaug <sh...@shadstorhaug.com>
    Date:   Fri Jan 17 06:07:48 2020 +0700
    
        Lucene.Net.Collections: Factored out unmodifiable methods and related classes in favor of J2N's AsReadOnly() extension methods
    
    commit aed4d994fff776c1111abda33ee8fe6b6802edfd
    Author: Shad Storhaug <sh...@shadstorhaug.com>
    Date:   Fri Jan 17 05:48:19 2020 +0700
    
        Lucene.Net.Support: Removed Equatable, EquatableList, EquatableSet, and ToString/GetHashCode methods from Collections
    
    commit 5582ddbe20a05dc67f0f6ecf1bd330eca753bd44
    Author: Shad Storhaug <sh...@shadstorhaug.com>
    Date:   Thu Jan 16 19:44:16 2020 +0700
    
        Lucene.Net.Util.Fst: Use J2N.Collections.List<T> for the closing type of Outputs<T> to ensure the outputs can be compared for structural equality
    
    commit eb790619636ff208f8d21efac845fa4539a06d01
    Author: Shad Storhaug <sh...@shadstorhaug.com>
    Date:   Thu Jan 16 18:36:39 2020 +0700
    
        Lucene.Net.Util.Fst.ListOfOutputs::Merge(): Streamlined so we don't have so many casts
    
    commit 74d9fc227940baec451bc3b02b3d06621100105c
    Author: Shad Storhaug <sh...@shadstorhaug.com>
    Date:   Thu Jan 16 11:55:08 2020 +0700
    
        Updated J2N and ICU4N to latest preview versions
    
    commit 89bc2118b2dc1c3c79a344c099fb78b2daac946c
    Author: Shad Storhaug <sh...@shadstorhaug.com>
    Date:   Sun Jan 12 10:27:56 2020 +0700
    
        Removed FEATURE_HASHSET_CAPACITY, since J2N now has the full .NET Core 3.x implementation with a capacity constructor
    
    commit 958cdf3c82f862664407ccef02ca2ca8fd10bf2a
    Author: Shad Storhaug <sh...@shadstorhaug.com>
    Date:   Sun Jan 12 10:14:27 2020 +0700
    
        SWEEP: Removed dependencies on Collections.Equals() and Collections.GetHashCode()
    
    commit 841d0d4f0e6e5ec62d8c8c4b53a7135c668def2a
    Author: Shad Storhaug <sh...@shadstorhaug.com>
    Date:   Sun Jan 12 09:42:17 2020 +0700
    
        SWEEP: Lucene.Net.Support: Removed dependencies on Equatable, EquatableList, and EquatableSet, and replaced with collections from J2N
    
    commit 6e3c3c533f299995deacc1dce7362fc20ca7a03f
    Author: Shad Storhaug <sh...@shadstorhaug.com>
    Date:   Sun Jan 12 09:03:32 2020 +0700
    
        Upgraded to J2N 1.0.0-ci-0012-ga65c37c253
    
    commit d80e9dda73de4f9c6b1a01902f211bf17dcf1536
    Author: Shad Storhaug <sh...@shadstorhaug.com>
    Date:   Sat Jan 11 02:33:28 2020 +0700
    
        SWEEP: Swapped in structural equality comparers from J2N in place of Collections.ToString() and Collections.GetHashCode() from Lucene.Net.Support
    
    commit 1af3d46b5791cbfc0246baad54cdd2112d084fc4
    Author: Shad Storhaug <sh...@shadstorhaug.com>
    Date:   Sat Jan 11 02:28:31 2020 +0700
    
        Added naming exclusion for _Scratch class (which only exists to make a J2N namespace real for now)
    
    commit de3e0a10d7ef7274e33f3fb69303b8bef67b29d7
    Author: Shad Storhaug <sh...@shadstorhaug.com>
    Date:   Fri Jan 10 22:38:07 2020 +0700
    
        SWEEP: Factored out Arrays.AsList, which was causing both additional operational complexity and unnecessary memory allocations
    
    commit e1ead061df6ab5371979040ae8071b1bf8b18070
    Author: Shad Storhaug <sh...@shadstorhaug.com>
    Date:   Fri Jan 10 18:37:03 2020 +0700
    
        Lucene.Net.Util.Automaton.State: Implemented IEquatable<State>, changed enumerator to a struct
    
    commit 6c16c48bf000f2f075aa680f89b4e15fdace70e6
    Author: Shad Storhaug <sh...@shadstorhaug.com>
    Date:   Fri Jan 10 18:22:53 2020 +0700
    
        Lucene.Net.Support.ConcurrentHashSet: Implemented IStructuralEquatable and IFormattable
    
    commit ea81d7952f1b90030f7c78948a5c82e5b073bf46
    Author: Shad Storhaug <sh...@shadstorhaug.com>
    Date:   Fri Jan 10 18:16:46 2020 +0700
    
        SWEEP: Swapped out System.Collections.Generic.HashSet for J2N.Collections.Generic.HashSet
    
    commit 5d455451aef3c0144997b19fe952db92da81eb61
    Author: Shad Storhaug <sh...@shadstorhaug.com>
    Date:   Fri Jan 10 13:33:03 2020 +0700
    
        Formatting and documentation updates
    
    commit f3b4160f7d823fdf1cbf7810317197070b6678aa
    Author: Shad Storhaug <sh...@shadstorhaug.com>
    Date:   Fri Jan 10 07:00:57 2020 +0700
    
        SWEEP: Factored out C5's TreeSet, where possible.
    
    commit e8103d1c5eeab5ebeca80404e0887796d392a44c
    Author: Shad Storhaug <sh...@shadstorhaug.com>
    Date:   Fri Jan 10 06:40:59 2020 +0700
    
        SWEEP: Changed System.Collections.Generic.SortedSet to J2N.Collections.Generic.SortedSet
    
    commit a5325d0d9421e479bfda426cc3a519ce25df3e54
    Author: Shad Storhaug <sh...@shadstorhaug.com>
    Date:   Fri Jan 10 06:15:50 2020 +0700
    
        Lucene.Net.Analysis.Kuromoji.Dict.UserDictionary: Swapped out C5 TreeDictionary for J2N.Collections.Generic.SortedDictionary
    
    commit 309d4ebd2ae109e45792090e61a00833e5b4ee4f
    Author: Shad Storhaug <sh...@shadstorhaug.com>
    Date:   Fri Jan 10 06:06:40 2020 +0700
    
        SWEEP: Factored out System.Collections.Generic.SortedDictionary in favor of J2N.Collections.Generic.SortedDictionary
    
    commit fd03a03a6b878f04007ce398616ef045cf899f8a
    Author: Shad Storhaug <sh...@shadstorhaug.com>
    Date:   Fri Jan 10 05:30:08 2020 +0700
    
        SWEEP: Factored out Lucene.Net.Support.HashMap in favor of J2N.Collections.Generic.Dictionary
    
    commit c06f34e6001e83d057bc7f3f25e5bc4bf04d9c0a
    Author: Shad Storhaug <sh...@shadstorhaug.com>
    Date:   Fri Jan 10 04:31:58 2020 +0700
    
        Lucene.Net.Highlighter, Lucene.Net.Tests.Spatial: Swapped in LinkedHashSet from J2N, like in the original Lucene implementation
    
    commit d6a10e95c42a1c542920caabd609fde71a417ad8
    Author: Shad Storhaug <sh...@shadstorhaug.com>
    Date:   Fri Jan 10 04:30:15 2020 +0700
    
        Lucene.Net.Tests.Search.TestFieldCache::Test(): Simplified expression with LINQ query
    
    commit 8c11b73f6b61eb45e7e5ece1d016af87b9b97c8d
    Author: Shad Storhaug <sh...@shadstorhaug.com>
    Date:   Fri Jan 10 03:49:42 2020 +0700
    
        SWEEP: Factored out LinkedHashMap in favor of J2N's LinkedDictionary
    
    commit 659f94f0b8b16c961a3ff18e4d1bd53f7aa65865
    Author: Shad Storhaug <sh...@shadstorhaug.com>
    Date:   Fri Jan 10 03:47:25 2020 +0700
    
        Upgraded J2N to 1.0.0-ci-0001-g4f977e960e
    
    commit fd2c3249ee66a2fadd62f243dfbb9c59751f08c5
    Author: Shad Storhaug <sh...@shadstorhaug.com>
    Date:   Fri Jan 10 03:46:47 2020 +0700
    
        Added NuGet.config to import J2N preview feed on MyGet
---
 .rat-excludes                                      |    3 +-
 Directory.Build.targets                            |    2 -
 build/Dependencies.props                           |    6 +-
 .../Analysis/Ar/ArabicLetterTokenizer.cs           |   34 +-
 .../Analysis/Br/BrazilianStemFilter.cs             |   34 +-
 .../Analysis/Ca/CatalanAnalyzer.cs                 |    3 +-
 .../Analysis/CharFilter/HTMLStripCharFilter.cs     |   42 +-
 .../CharFilter/MappingCharFilterFactory.cs         |   30 +-
 .../Analysis/CharFilter/NormalizeCharMap.cs        |    3 +-
 .../Analysis/Cjk/CJKBigramFilter.cs                |   34 +-
 .../Analysis/Cjk/CJKTokenizer.cs                   |   34 +-
 .../Analysis/Ckb/SoraniNormalizer.cs               |   31 +-
 .../Analysis/Cn/ChineseFilter.cs                   |   33 +-
 .../Compound/CompoundWordTokenFilterBase.cs        |    6 +-
 .../Compound/Hyphenation/HyphenationTree.cs        |    6 +-
 .../Analysis/Core/LetterTokenizer.cs               |    4 +-
 .../Analysis/Core/LowerCaseTokenizer.cs            |    2 +-
 .../Analysis/Core/StopAnalyzer.cs                  |    9 +-
 .../Analysis/Core/TypeTokenFilterFactory.cs        |   22 +-
 .../Analysis/Core/WhitespaceTokenizer.cs           |    5 +-
 .../Analysis/De/GermanAnalyzer.cs                  |    6 +-
 .../Analysis/El/GreekLowerCaseFilter.cs            |   34 +-
 .../Analysis/El/GreekStemmer.cs                    |  115 +-
 .../Analysis/En/KStemmer.cs                        |   54 +-
 .../Analysis/En/PorterStemmer.cs                   |   80 +-
 .../Analysis/Fr/FrenchAnalyzer.cs                  |   33 +-
 .../Analysis/Ga/IrishAnalyzer.cs                   |   35 +-
 .../Analysis/Ga/IrishLowerCaseFilter.cs            |   34 +-
 .../Analysis/Hunspell/Dictionary.cs                |   61 +-
 .../Analysis/Hunspell/HunspellStemFilterFactory.cs |    4 +-
 .../Analysis/Hunspell/ISO8859_14Decoder.cs         |   30 +-
 .../Analysis/In/IndicTokenizer.cs                  |    4 +-
 .../Analysis/It/ItalianAnalyzer.cs                 |   35 +-
 .../Analysis/Miscellaneous/CodepointCountFilter.cs |    4 +-
 .../Analysis/Miscellaneous/PatternAnalyzer.cs      |   10 +-
 .../Miscellaneous/PerFieldAnalyzerWrapper.cs       |   12 +-
 .../Miscellaneous/StemmerOverrideFilter.cs         |   34 +-
 .../Miscellaneous/WordDelimiterFilterFactory.cs    |   33 +-
 .../Analysis/NGram/Lucene43EdgeNGramTokenizer.cs   |    2 +-
 .../Analysis/NGram/Lucene43NGramTokenizer.cs       |    2 +-
 .../Analysis/NGram/NGramTokenizer.cs               |   34 +-
 .../Pattern/PatternReplaceFilterFactory.cs         |   33 +-
 .../Analysis/Pattern/PatternTokenizer.cs           |    6 +-
 .../Analysis/Pt/RSLPStemmerBase.cs                 |    6 +-
 .../Analysis/Query/QueryAutoStopWordAnalyzer.cs    |    9 +-
 .../Analysis/Ru/RussianAnalyzer.cs                 |   33 +-
 .../Analysis/Ru/RussianLetterTokenizer.cs          |    4 +-
 .../Analysis/Synonym/SynonymFilter.cs              |   34 +-
 .../Analysis/Synonym/SynonymMap.cs                 |    8 +-
 .../Analysis/Th/ThaiTokenizer.cs                   |    2 +-
 .../Analysis/Th/ThaiWordFilter.cs                  |   30 +-
 .../Analysis/Tr/TurkishLowerCaseFilter.cs          |   34 +-
 .../Analysis/Util/AbstractAnalysisFactory.cs       |   14 +-
 .../Analysis/Util/AnalysisSPILoader.cs             |    8 +-
 .../Analysis/Util/CharArrayMap.cs                  |   39 +-
 .../Analysis/Util/CharArraySet.cs                  |   42 +-
 .../Analysis/Util/CharTokenizer.cs                 |    4 +-
 .../Analysis/Util/CharacterUtils.cs                |  112 +-
 .../Analysis/Util/OpenStringBuilder.cs             |   85 +-
 .../Analysis/Util/WordlistLoader.cs                |    3 +-
 .../Wikipedia/WikipediaTokenizerFactory.cs         |   30 +-
 .../Analysis/Icu/ICUNormalizer2CharFilter.cs       |    5 +-
 .../Dict/UserDictionary.cs                         |    9 +-
 .../JapaneseAnalyzer.cs                            |    3 +-
 .../JapanesePartOfSpeechStopFilterFactory.cs       |    3 +-
 .../JapaneseTokenizer.cs                           |    1 +
 .../Tools/ConnectionCostsBuilder.cs                |    2 +-
 .../Tools/TokenInfoDictionaryBuilder.cs            |    3 +-
 .../Tools/UnknownDictionaryBuilder.cs              |    4 +-
 .../Morfologik/MorfologikFilter.cs                 |    1 +
 .../BeiderMorseFilter.cs                           |    8 +-
 .../Language/Bm/Lang.cs                            |   10 +-
 .../Language/Bm/Languages.cs                       |   10 +-
 .../Language/Bm/PhoneticEngine.cs                  |   23 +-
 .../Language/Bm/Rule.cs                            |   19 +-
 .../Language/DaitchMokotoffSoundex.cs              |    1 +
 .../Hhmm/BigramDictionary.cs                       |    1 -
 .../Hhmm/WordDictionary.cs                         |    1 -
 .../Egothor.Stemmer/Row.cs                         |    3 +-
 .../ByTask/Feeds/ContentItemsSource.cs             |    3 +-
 .../ByTask/Feeds/DemoHTMLParser.cs                 |    5 +-
 .../ByTask/Feeds/DirContentSource.cs               |    4 +-
 .../ByTask/Feeds/LineDocSource.cs                  |    3 +-
 .../ByTask/Feeds/TrecContentSource.cs              |    6 +-
 .../ByTask/Feeds/TrecDocParser.cs                  |    8 +-
 .../ByTask/Feeds/TrecFBISParser.cs                 |    6 +-
 .../ByTask/Feeds/TrecFR94Parser.cs                 |    6 +-
 .../ByTask/Feeds/TrecGov2Parser.cs                 |    6 +-
 .../ByTask/Tasks/OpenReaderTask.cs                 |    4 +-
 src/Lucene.Net.Benchmark/ByTask/Tasks/ReadTask.cs  |    3 +-
 .../ByTask/Tasks/RepSumByNameRoundTask.cs          |    4 +-
 .../ByTask/Tasks/RepSumByNameTask.cs               |    4 +-
 .../ByTask/Tasks/RepSumByPrefRoundTask.cs          |    4 +-
 .../ByTask/Tasks/RepSumByPrefTask.cs               |    4 +-
 .../ByTask/Tasks/ReportTask.cs                     |    4 +-
 .../ByTask/Tasks/SearchTravRetHighlightTask.cs     |    8 +-
 .../Tasks/SearchTravRetLoadFieldSelectorTask.cs    |    3 +-
 .../Tasks/SearchTravRetVectorHighlightTask.cs      |    8 +-
 .../ByTask/Tasks/SearchWithSortTask.cs             |    4 +-
 .../ByTask/Tasks/WriteLineDocTask.cs               |    7 +-
 src/Lucene.Net.Benchmark/ByTask/Utils/Algorithm.cs |    4 +-
 src/Lucene.Net.Benchmark/Quality/QualityQuery.cs   |    2 +-
 .../Quality/Trec/QueryDriver.cs                    |    3 +-
 src/Lucene.Net.Benchmark/Quality/Trec/TrecJudge.cs |    4 +-
 .../Quality/Trec/TrecTopicsReader.cs               |    6 +-
 .../BlockTerms/BlockTermsReader.cs                 |    3 +-
 .../BlockTerms/BlockTermsWriter.cs                 |    2 +-
 .../BlockTerms/FixedGapTermsIndexReader.cs         |    2 -
 .../Bloom/BloomFilteringPostingsFormat.cs          |    4 +-
 .../Memory/DirectPostingsFormat.cs                 |    3 +-
 src/Lucene.Net.Codecs/Memory/FSTOrdTermsReader.cs  |    3 +-
 src/Lucene.Net.Codecs/Memory/FSTTermsReader.cs     |    3 +-
 .../Memory/MemoryDocValuesConsumer.cs              |    5 +-
 .../Memory/MemoryPostingsFormat.cs                 |    9 +-
 .../Pulsing/PulsingPostingsReader.cs               |    8 +-
 .../SimpleText/SimpleTextDocValuesReader.cs        |    2 +-
 .../SimpleText/SimpleTextDocValuesWriter.cs        |    3 +-
 .../SimpleText/SimpleTextFieldsReader.cs           |    8 +-
 .../SimpleText/SimpleTextSegmentInfoReader.cs      |    3 +-
 .../SimpleText/SimpleTextTermVectorsReader.cs      |   13 +-
 .../JS/JavascriptCompiler.cs                       |    7 +-
 src/Lucene.Net.Facet/DrillDownQuery.cs             |    3 +-
 src/Lucene.Net.Facet/FacetsConfig.cs               |    5 +-
 .../DefaultSortedSetDocValuesReaderState.cs        |   30 +-
 .../SortedSet/SortedSetDocValuesFacetCounts.cs     |    3 +-
 .../Taxonomy/CachedOrdinalsReader.cs               |    7 +-
 src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs      |    3 +-
 .../Taxonomy/Directory/DirectoryTaxonomyWriter.cs  |   11 +-
 src/Lucene.Net.Facet/Taxonomy/FacetLabel.cs        |    6 +-
 .../Taxonomy/FloatAssociationFacetField.cs         |   30 +-
 src/Lucene.Net.Facet/Taxonomy/LRUHashMap.cs        |   56 +-
 .../Taxonomy/ParallelTaxonomyArrays.cs             |   30 +-
 src/Lucene.Net.Facet/Taxonomy/TaxonomyFacets.cs    |    3 +-
 .../Taxonomy/WriterCache/CategoryPathUtils.cs      |    4 +-
 .../Taxonomy/WriterCache/CharBlockArray.cs         |   30 +-
 .../Taxonomy/WriterCache/CompactLabelToOrdinal.cs  |    2 +-
 .../AbstractDistinctValuesCollector.cs             |    3 +-
 .../AbstractFirstPassGroupingCollector.cs          |   38 +-
 .../AbstractGroupFacetCollector.cs                 |   78 +-
 .../AbstractSecondPassGroupingCollector.cs         |   34 +-
 .../Function/FunctionAllGroupsCollector.cs         |   41 +-
 .../Function/FunctionDistinctValuesCollector.cs    |    4 +-
 src/Lucene.Net.Grouping/GroupDocs.cs               |   30 +-
 src/Lucene.Net.Grouping/SearchGroup.cs             |   61 +-
 .../Term/TermAllGroupHeadsCollector.cs             |    4 +-
 .../Term/TermDistinctValuesCollector.cs            |   30 +-
 src/Lucene.Net.Grouping/TopGroups.cs               |   37 +-
 .../Highlight/QueryScorer.cs                       |   40 +-
 .../Highlight/QueryTermExtractor.cs                |   45 +-
 .../Highlight/QueryTermScorer.cs                   |   49 +-
 .../Highlight/WeightedSpanTermExtractor.cs         |   55 +-
 .../PostingsHighlight/MultiTermHighlighting.cs     |   32 +-
 .../PostingsHighlight/PostingsHighlighter.cs       |   55 +-
 .../VectorHighlight/BaseFragmentsBuilder.cs        |   30 +-
 .../VectorHighlight/FieldFragList.cs               |   30 +-
 .../VectorHighlight/FieldPhraseList.cs             |   30 +-
 .../VectorHighlight/FieldQuery.cs                  |   56 +-
 .../VectorHighlight/SimpleBoundaryScanner.cs       |    8 +-
 .../VectorHighlight/WeightedFieldFragList.cs       |   39 +-
 src/Lucene.Net.Join/TermsWithScoreCollector.cs     |   30 +-
 src/Lucene.Net.Join/ToChildBlockJoinQuery.cs       |   32 +-
 src/Lucene.Net.Join/ToParentBlockJoinCollector.cs  |   30 +-
 src/Lucene.Net.Join/ToParentBlockJoinQuery.cs      |   32 +-
 src/Lucene.Net.Misc/Document/LazyDocument.cs       |   33 +-
 .../Index/Sorter/SortingAtomicReader.cs            |   30 +-
 src/Lucene.Net.Misc/Misc/HighFreqTerms.cs          |    4 +-
 src/Lucene.Net.Misc/Util/Fst/ListOfOutputs.cs      |   27 +-
 src/Lucene.Net.Queries/BooleanFilter.cs            |    4 +-
 src/Lucene.Net.Queries/CommonTermsQuery.cs         |   20 +-
 src/Lucene.Net.Queries/CustomScoreQuery.cs         |    2 +-
 src/Lucene.Net.Queries/Function/BoostedQuery.cs    |    2 +-
 src/Lucene.Net.Queries/Function/ValueSource.cs     |   13 +-
 .../Function/ValueSources/EnumFieldSource.cs       |   15 +-
 .../Function/ValueSources/MultiBoolFunction.cs     |   12 +-
 .../Function/ValueSources/MultiFunction.cs         |   12 +-
 .../Function/ValueSources/VectorValueSource.cs     |   12 +-
 src/Lucene.Net.Queries/Mlt/MoreLikeThisQuery.cs    |    9 +-
 src/Lucene.Net.Queries/TermsFilter.cs              |    6 +-
 .../Classic/QueryParserBase.cs                     |    4 +-
 .../Flexible/Core/Nodes/FieldQueryNode.cs          |   10 +-
 .../Flexible/Core/Nodes/FuzzyQueryNode.cs          |    8 +-
 .../Flexible/Core/Nodes/PathQueryNode.cs           |    6 +-
 .../Flexible/Core/Nodes/QuotedFieldQueryNode.cs    |    8 +-
 .../Flexible/Core/Nodes/TextableQueryNode.cs       |    2 +-
 .../Flexible/Core/Parser/EscapeQuerySyntax.cs      |    2 +-
 .../Flexible/Core/Util/UnescapedCharSequence.cs    |   24 +-
 .../Standard/Builders/FuzzyQueryNodeBuilder.cs     |    4 +-
 .../Builders/MultiPhraseQueryNodeBuilder.cs        |    3 +-
 .../Builders/PrefixWildcardQueryNodeBuilder.cs     |    2 +-
 .../Flexible/Standard/Config/NumberDateFormat.cs   |    1 -
 .../Standard/Config/StandardQueryConfigHandler.cs  |    6 +-
 .../Standard/Nodes/PrefixWildcardQueryNode.cs      |    8 +-
 .../Flexible/Standard/Nodes/RegexpQueryNode.cs     |   12 +-
 .../Flexible/Standard/Nodes/WildcardQueryNode.cs   |    8 +-
 .../Standard/Parser/EscapeQuerySyntaxImpl.cs       |   16 +-
 .../Processors/AnalyzerQueryNodeProcessor.cs       |    6 +-
 .../LowercaseExpandedTermsQueryNodeProcessor.cs    |    4 +-
 .../Processors/OpenRangeQueryNodeProcessor.cs      |    8 +-
 .../Processors/TermRangeQueryNodeProcessor.cs      |    8 +-
 .../Processors/WildcardQueryNodeProcessor.cs       |   10 +-
 .../Simple/SimpleQueryParser.cs                    |    4 +-
 .../Surround/Parser/QueryParserTokenManager.cs     |    1 -
 .../Surround/Query/SimpleTerm.cs                   |    4 +-
 .../Surround/Query/SpanNearClauseFactory.cs        |    4 +-
 .../Xml/Builders/LikeThisQueryBuilder.cs           |    9 +-
 .../Http/ReplicationService.cs                     |    2 +-
 .../IndexAndTaxonomyRevision.cs                    |    4 +-
 .../IndexReplicationHandler.cs                     |    3 +-
 src/Lucene.Net.Replicator/ReplicationClient.cs     |    3 +-
 .../Queries/FuzzyLikeThisQuery.cs                  |    6 +-
 src/Lucene.Net.Sandbox/Queries/SlowFuzzyQuery.cs   |    3 +-
 .../Prefix/IntersectsPrefixTreeFilter.cs           |    1 -
 src/Lucene.Net.Spatial/Properties/AssemblyInfo.cs  |    1 -
 .../Util/CachingDoubleValueSource.cs               |    4 +-
 src/Lucene.Net.Suggest/Spell/CombineSuggestion.cs  |   30 +-
 src/Lucene.Net.Suggest/Spell/DirectSpellChecker.cs |  113 +-
 .../Spell/LuceneLevenshteinDistance.cs             |    3 +-
 .../Spell/SuggestWordFrequencyComparator.cs        |    2 +-
 .../Spell/SuggestWordScoreComparator.cs            |    2 +-
 .../Spell/WordBreakSpellChecker.cs                 |   24 +-
 .../Suggest/Analyzing/AnalyzingInfixSuggester.cs   |    9 +-
 .../Suggest/Analyzing/AnalyzingSuggester.cs        |   37 +-
 .../Suggest/Analyzing/BlendedInfixSuggester.cs     |    5 +-
 .../Suggest/Analyzing/FreeTextSuggester.cs         |    6 +-
 .../Suggest/Analyzing/FuzzySuggester.cs            |   10 +-
 .../Suggest/DocumentDictionary.cs                  |   26 +-
 .../Suggest/Fst/FSTCompletion.cs                   |    3 +-
 .../Suggest/Jaspell/JaspellTernarySearchTrie.cs    |    2 +-
 src/Lucene.Net.Suggest/Suggest/Lookup.cs           |    2 +-
 .../Suggest/SortedInputIterator.cs                 |    6 +-
 .../Analysis/MockCharFilter.cs                     |   13 +-
 .../Analysis/MockTokenFilter.cs                    |   20 +-
 .../Analysis/MockTokenizer.cs                      |    2 +-
 .../Analysis/VocabularyAssert.cs                   |    2 +-
 .../Codecs/Lucene3x/PreFlexRWTermVectorsWriter.cs  |    2 +-
 .../Codecs/Lucene3x/TermInfosWriter.cs             |    2 +-
 .../Codecs/Lucene40/Lucene40DocValuesWriter.cs     |    7 +-
 .../Codecs/Lucene42/Lucene42DocValuesConsumer.cs   |    5 +-
 .../Codecs/RAMOnly/RAMOnlyPostingsFormat.cs        |    7 +-
 .../Index/BaseDocValuesFormatTestCase.cs           |   12 +-
 .../Index/BaseIndexFileFormatTestCase.cs           |    4 +-
 .../Index/BasePostingsFormatTestCase.cs            |   26 +-
 .../Index/BaseStoredFieldsFormatTestCase.cs        |   20 +-
 .../Index/BaseTermVectorsFormatTestCase.cs         |   30 +-
 src/Lucene.Net.TestFramework/Index/DocHelper.cs    |    2 +-
 .../Index/MockRandomMergePolicy.cs                 |    5 +-
 src/Lucene.Net.TestFramework/Index/RandomCodec.cs  |   12 +-
 .../Index/ThreadedIndexingAndSearchingTestCase.cs  |   27 +-
 .../Search/AssertingScorer.cs                      |   26 +-
 src/Lucene.Net.TestFramework/Search/CheckHits.cs   |    7 +-
 .../Search/RandomSimilarityProvider.cs             |    3 +-
 .../Search/ShardSearchingTestBase.cs               |    5 +-
 .../Store/BaseDirectoryTestCase.cs                 |   26 +-
 .../Store/MockDirectoryWrapper.cs                  |   34 +-
 .../Support/ApiScanTestBase.cs                     |    2 +-
 .../Support/JavaCompatibility/LuceneTestCase.cs    |    7 +-
 .../Util/Automaton/AutomatonTestUtil.cs            |   18 +-
 src/Lucene.Net.TestFramework/Util/Fst/FSTTester.cs |    9 +-
 .../Util/LuceneTestCase.cs                         |   42 +-
 .../Util/TestRuleSetupAndRestoreClassEnv.cs        |    5 +-
 src/Lucene.Net.TestFramework/Util/TestUtil.cs      |   28 +-
 .../CharFilters/HTMLStripCharFilterTest.cs         |   17 +-
 .../Analysis/CharFilters/TestMappingCharFilter.cs  |    3 +-
 .../Analysis/Commongrams/CommonGramsFilterTest.cs  |    3 +-
 .../Compound/TestCompoundWordTokenFilter.cs        |   34 +-
 .../Analysis/Core/TestAllAnalyzersHaveFactories.cs |   43 +-
 .../Analysis/Core/TestBugInSomething.cs            |   37 +-
 .../Analysis/Core/TestClassicAnalyzer.cs           |   30 +-
 .../Analysis/Core/TestDuelingAnalyzers.cs          |   34 +-
 .../Analysis/Core/TestRandomChains.cs              |   62 +-
 .../Analysis/Core/TestStandardAnalyzer.cs          |   30 +-
 .../Analysis/Core/TestStopAnalyzer.cs              |    3 +-
 .../Analysis/Core/TestTypeTokenFilter.cs           |   35 +-
 .../Analysis/Core/TestUAX29URLEmailAnalyzer.cs     |   30 +-
 .../Analysis/Core/TestUAX29URLEmailTokenizer.cs    |   30 +-
 .../Analysis/Hunspell/StemmerTestBase.cs           |   32 +-
 .../Analysis/Hunspell/TestDictionary.cs            |   40 +-
 .../Analysis/Hunspell/TestHunspellStemFilter.cs    |    4 +-
 .../Analysis/Miscellaneous/PatternAnalyzerTest.cs  |   30 +-
 .../Miscellaneous/TestCapitalizationFilter.cs      |    3 +-
 .../Miscellaneous/TestCodepointCountFilter.cs      |   34 +-
 .../Analysis/Miscellaneous/TestKeepWordFilter.cs   |    5 +-
 .../Miscellaneous/TestPerFieldAnalyzerWrapper.cs   |    3 +-
 .../TestRemoveDuplicatesTokenFilter.cs             |   18 +-
 .../TestRemoveDuplicatesTokenFilterFactory.cs      |    4 +-
 .../Miscellaneous/TestStemmerOverrideFilter.cs     |   35 +-
 .../Analysis/NGram/EdgeNGramTokenFilterTest.cs     |    4 +-
 .../Analysis/NGram/EdgeNGramTokenizerTest.cs       |   30 +-
 .../Analysis/NGram/NGramTokenFilterTest.cs         |    4 +-
 .../Analysis/NGram/NGramTokenizerTest.cs           |   33 +-
 .../Query/QueryAutoStopWordAnalyzerTest.cs         |   41 +-
 .../Snowball/TestSnowballPorterFilterFactory.cs    |    4 +-
 .../Analysis/Synonym/TestSlowSynonymFilter.cs      |   41 +-
 .../Analysis/Synonym/TestSynonymMapFilter.cs       |    6 +-
 .../Analysis/Util/TestCharArrayMap.cs              |    3 +-
 .../Analysis/Util/TestCharArraySet.cs              |   51 +-
 .../Analysis/Util/TestCharTokenizers.cs            |    5 +-
 .../Analysis/Util/TestCharacterUtils.cs            |   18 +-
 .../Analysis/Util/TestFilesystemResourceLoader.cs  |   30 +-
 .../Analysis/Wikipedia/WikipediaTokenizerTest.cs   |    5 +-
 .../Collation/TestICUCollationKeyFilterFactory.cs  |    1 +
 .../TestJapaneseTokenizer.cs                       |    4 +-
 .../TestSearchMode.cs                              |    2 +-
 .../Morfologik/TestMorfologikAnalyzer.cs           |    6 +-
 .../Morfologik/TestMorfologikFilterFactory.cs      |    7 +-
 .../Language/Bm/BeiderMorseEncoderTest.cs          |    3 +-
 .../Language/Bm/CacheSubSequencePerformanceTest.cs |   17 +-
 .../Language/Bm/LanguageGuessingTest.cs            |   11 +-
 .../Language/Bm/PhoneticEngineRegressionTest.cs    |   41 +-
 .../Language/Bm/PhoneticEngineTest.cs              |    2 +-
 .../TestBeiderMorseFilter.cs                       |    3 +-
 .../ByTask/Feeds/DocMakerTest.cs                   |    1 -
 .../ByTask/Feeds/TrecContentSourceTest.cs          |    3 +-
 .../ByTask/Tasks/WriteEnwikiLineDocTaskTest.cs     |    4 +-
 .../ByTask/Tasks/WriteLineDocTaskTest.cs           |    5 +-
 .../Pulsing/TestPulsingReuse.cs                    |   39 +-
 .../JS/TestCustomFunctions.cs                      |  440 +-
 .../TestExpressionSorts.cs                         |    6 +-
 src/Lucene.Net.Tests.Facet/FacetTestCase.cs        |   15 +-
 .../Taxonomy/Directory/TestAddTaxonomy.cs          |    4 +-
 .../Directory/TestDirectoryTaxonomyReader.cs       |   14 +-
 .../Taxonomy/TestSearcherTaxonomyManager.cs        |    3 +-
 .../Taxonomy/TestTaxonomyCombined.cs               |    2 -
 .../Taxonomy/TestTaxonomyFacetCounts.cs            |    7 +-
 .../Taxonomy/TestTaxonomyFacetCounts2.cs           |   33 +-
 .../WriterCache/TestCompactLabelToOrdinal.cs       |   49 +-
 src/Lucene.Net.Tests.Facet/TestDrillSideways.cs    |    9 +-
 .../AllGroupHeadsCollectorTest.cs                  |    4 +-
 .../DistinctValuesCollectorTest.cs                 |   11 +-
 .../GroupFacetCollectorTest.cs                     |   66 +-
 src/Lucene.Net.Tests.Grouping/TestGrouping.cs      |   11 +-
 .../Highlight/HighlighterTest.cs                   |   36 +-
 .../PostingsHighlight/TestPostingsHighlighter.cs   |   30 +-
 .../TestPostingsHighlighterRanking.cs              |   35 +-
 .../PostingsHighlight/TestWholeBreakIterator.cs    |   30 +-
 .../VectorHighlight/AbstractTestCase.cs            |    2 +-
 .../BreakIteratorBoundaryScannerTest.cs            |   30 +-
 .../VectorHighlight/FastVectorHighlighterTest.cs   |   35 +-
 .../VectorHighlight/FieldQueryTest.cs              |   45 +-
 .../VectorHighlight/SimpleFragmentsBuilderTest.cs  |   35 +-
 src/Lucene.Net.Tests.Join/TestBlockJoin.cs         |   23 +-
 src/Lucene.Net.Tests.Join/TestJoinUtil.cs          |    7 +-
 .../Index/Memory/MemoryIndexTest.cs                |    5 +-
 .../Document/TestLazyDocument.cs                   |    5 +-
 .../Index/Sorter/SorterTestBase.cs                 |    5 +-
 .../Index/Sorter/TestEarlyTermination.cs           |    3 +-
 .../Index/Sorter/TestSortingMergePolicy.cs         |    3 +-
 src/Lucene.Net.Tests.Misc/Util/Fst/TestFSTsMisc.cs |   25 +-
 .../CommonTermsQueryTest.cs                        |    9 +-
 .../Function/TestFieldScoreQuery.cs                |    2 +-
 .../Function/TestOrdValues.cs                      |    2 +-
 src/Lucene.Net.Tests.Queries/TermFilterTest.cs     |    3 +-
 src/Lucene.Net.Tests.Queries/TermsFilterTest.cs    |   13 +-
 .../ComplexPhrase/TestComplexPhraseQuery.cs        |    7 +-
 .../Flexible/Core/Nodes/TestQueryNode.cs           |    6 +-
 .../Precedence/TestPrecedenceQueryParser.cs        |    3 +-
 .../Flexible/Standard/TestNumericQueryParser.cs    |   14 +-
 .../Simple/TestSimpleQueryParser.cs                |   10 +-
 .../Queries/DuplicateFilterTest.cs                 |    7 +-
 .../Queries/FuzzyLikeThisQueryTest.cs              |    9 +-
 .../Queries/TestSlowFuzzyQuery.cs                  |    5 +-
 .../Queries/TestSlowFuzzyQuery2.cs                 |    2 +-
 src/Lucene.Net.Tests.Spatial/PortedSolr3Test.cs    |    3 +-
 .../Prefix/SpatialOpRecursivePrefixTreeTest.cs     |   27 +-
 .../Prefix/TestRecursivePrefixTreeStrategy.cs      |    3 +-
 .../Prefix/TestTermQueryPrefixGridStrategy.cs      |    3 +-
 src/Lucene.Net.Tests.Spatial/SpatialTestCase.cs    |    2 +-
 src/Lucene.Net.Tests.Spatial/SpatialTestData.cs    |    2 +-
 src/Lucene.Net.Tests.Spatial/StrategyTestCase.cs   |    6 +-
 .../Spell/TestWordBreakSpellChecker.cs             |    4 +-
 .../Analyzing/AnalyzingInfixSuggesterTest.cs       |   12 +-
 .../Suggest/Analyzing/AnalyzingSuggesterTest.cs    |   14 +-
 .../Suggest/Analyzing/FuzzySuggesterTest.cs        |   36 +-
 .../Suggest/Analyzing/TestFreeTextSuggester.cs     |   10 +-
 .../Suggest/DocumentDictionaryTest.cs              |    5 +-
 .../Suggest/DocumentValueSourceDictionaryTest.cs   |    5 +-
 .../Suggest/Fst/FSTCompletionTest.cs               |    5 +-
 .../Suggest/Fst/WFSTCompletionTest.cs              |    5 +-
 .../Suggest/InputArrayIterator.cs                  |   35 +-
 .../Suggest/LookupBenchmarkTest.cs                 |   11 +-
 .../Suggest/TestInputIterator.cs                   |   15 +-
 .../Analysis/TestMockAnalyzer.cs                   |    2 +-
 .../Analysis/TestGraphTokenizers.cs                |    4 +-
 src/Lucene.Net.Tests/Analysis/TestMockAnalyzer.cs  |    2 +-
 .../TokenAttributes/TestCharTermAttributeImpl.cs   |  115 +-
 .../Compressing/AbstractTestLZ4CompressionMode.cs  |    2 +-
 .../Codecs/Lucene3x/TestSurrogates.cs              |    3 +-
 .../Codecs/Lucene3x/TestTermInfosReaderIndex.cs    |    3 +-
 .../Codecs/Lucene40/TestLucene40PostingsReader.cs  |    3 +-
 .../Codecs/Lucene40/TestReuseDocsEnum.cs           |    8 +-
 .../Codecs/Lucene41/TestBlockPostingsFormat3.cs    |   12 +-
 .../Codecs/PerField/TestPerFieldDocValuesFormat.cs |    8 +-
 .../Codecs/PerField/TestPerFieldPostingsFormat.cs  |    4 +-
 .../Document/TestBinaryDocument.cs                 |    2 +-
 src/Lucene.Net.Tests/Document/TestDocument.cs      |    2 +-
 src/Lucene.Net.Tests/Document/TestField.cs         |    1 +
 src/Lucene.Net.Tests/Index/Test2BTerms.cs          |    3 +-
 .../Index/TestBackwardsCompatibility.cs            |    8 +-
 .../Index/TestBackwardsCompatibility3x.cs          |    8 +-
 src/Lucene.Net.Tests/Index/TestBagOfPositions.cs   |    3 +-
 src/Lucene.Net.Tests/Index/TestBagOfPostings.cs    |    6 +-
 .../Index/TestBinaryDocValuesUpdates.cs            |    5 +-
 src/Lucene.Net.Tests/Index/TestCodecs.cs           |    5 +-
 src/Lucene.Net.Tests/Index/TestDeletionPolicy.cs   |    3 +-
 src/Lucene.Net.Tests/Index/TestDirectoryReader.cs  |   14 +-
 .../Index/TestDirectoryReaderReopen.cs             |    3 +-
 src/Lucene.Net.Tests/Index/TestDoc.cs              |    5 +-
 src/Lucene.Net.Tests/Index/TestDocTermOrds.cs      |   12 +-
 .../Index/TestDocValuesWithThreads.cs              |    3 +-
 .../Index/TestDocumentsWriterDeleteQueue.cs        |   15 +-
 src/Lucene.Net.Tests/Index/TestDuelingCodecs.cs    |    2 +-
 src/Lucene.Net.Tests/Index/TestIndexFileDeleter.cs |   11 +-
 src/Lucene.Net.Tests/Index/TestIndexWriter.cs      |   18 +-
 .../Index/TestIndexWriterConfig.cs                 |   12 +-
 .../Index/TestIndexWriterDelete.cs                 |    5 +-
 .../Index/TestIndexWriterOutOfFileDescriptors.cs   |    5 +-
 .../Index/TestIndexWriterReader.cs                 |    9 +-
 .../Index/TestIndexWriterUnicode.cs                |   12 +-
 src/Lucene.Net.Tests/Index/TestMaxTermFrequency.cs |    3 +-
 src/Lucene.Net.Tests/Index/TestMixedCodecs.cs      |    3 +-
 .../Index/TestMixedDocValuesUpdates.cs             |    3 +-
 src/Lucene.Net.Tests/Index/TestMultiFields.cs      |    5 +-
 src/Lucene.Net.Tests/Index/TestNeverDelete.cs      |    3 +-
 .../Index/TestNumericDocValuesUpdates.cs           |    5 +-
 src/Lucene.Net.Tests/Index/TestPayloads.cs         |    2 +-
 src/Lucene.Net.Tests/Index/TestPrefixCodedTerms.cs |   13 +-
 src/Lucene.Net.Tests/Index/TestSegmentMerger.cs    |    8 +-
 src/Lucene.Net.Tests/Index/TestSegmentReader.cs    |   13 +-
 src/Lucene.Net.Tests/Index/TestStressAdvance.cs    |    3 +-
 src/Lucene.Net.Tests/Index/TestStressIndexing2.cs  |    4 +-
 src/Lucene.Net.Tests/Index/TestTermsEnum.cs        |   13 +-
 src/Lucene.Net.Tests/Index/TestTermsEnum2.cs       |   10 +-
 src/Lucene.Net.Tests/Index/TestUniqueTermCount.cs  |    3 +-
 .../Search/Payloads/TestPayloadNearQuery.cs        |    2 +-
 .../Search/Spans/MultiSpansWrapper.cs              |    6 +-
 src/Lucene.Net.Tests/Search/Spans/TestBasics.cs    |    2 +-
 .../Search/Spans/TestFieldMaskingSpanQuery.cs      |    7 +-
 .../Search/Spans/TestPayloadSpans.cs               |   11 +-
 src/Lucene.Net.Tests/Search/TestBooleanQuery.cs    |    2 +-
 .../Search/TestBooleanQueryVisitSubscorers.cs      |    3 +-
 src/Lucene.Net.Tests/Search/TestBooleanScorer.cs   |    2 +-
 .../Search/TestCustomSearcherSort.cs               |    6 +-
 src/Lucene.Net.Tests/Search/TestDateSort.cs        |    2 +-
 src/Lucene.Net.Tests/Search/TestFieldCache.cs      |    4 +-
 src/Lucene.Net.Tests/Search/TestFuzzyQuery.cs      |    4 +-
 src/Lucene.Net.Tests/Search/TestMinShouldMatch2.cs |   48 +-
 .../Search/TestQueryWrapperFilter.cs               |    3 +-
 src/Lucene.Net.Tests/Search/TestRegexpQuery.cs     |    2 +-
 .../Search/TestSameScoresWithThreads.cs            |    3 +-
 src/Lucene.Net.Tests/Search/TestSearchAfter.cs     |    8 +-
 src/Lucene.Net.Tests/Search/TestShardSearching.cs  |    3 +-
 .../Search/TestSloppyPhraseQuery.cs                |    2 +-
 src/Lucene.Net.Tests/Search/TestSort.cs            |   25 +-
 src/Lucene.Net.Tests/Search/TestSortRandom.cs      |    9 +-
 src/Lucene.Net.Tests/Search/TestSubScorerFreqs.cs  |    5 +-
 .../Search/TestTimeLimitingCollector.cs            |    4 +-
 src/Lucene.Net.Tests/Store/TestDirectory.cs        |    7 +-
 .../Store/TestFileSwitchDirectory.cs               |   17 +-
 src/Lucene.Net.Tests/Store/TestFilterDirectory.cs  |    3 +-
 .../Store/TestNRTCachingDirectory.cs               |    7 +-
 src/Lucene.Net.Tests/Support/C5/ArrayBase.cs       |  485 --
 src/Lucene.Net.Tests/Support/C5/ArrayList.cs       | 2252 ------
 .../Support/C5/DropMultiplicity.cs                 |   38 -
 src/Lucene.Net.Tests/Support/C5/Events.cs          |  893 ---
 .../Support/C5/GenericCollectionTester.cs          |   88 -
 src/Lucene.Net.Tests/Support/C5/HashBag.cs         |  660 --
 src/Lucene.Net.Tests/Support/C5/Sorting.cs         |  239 -
 src/Lucene.Net.Tests/Support/C5/SupportClasses.cs  |  506 --
 .../Support/C5/TestTreeDictionary.cs               |  522 --
 src/Lucene.Net.Tests/Support/C5/WeakViewList.cs    |  105 -
 src/Lucene.Net.Tests/Support/C5/Wrappers.cs        | 2364 ------
 src/Lucene.Net.Tests/Support/TestApiConsistency.cs |    2 +-
 src/Lucene.Net.Tests/Support/TestCollections.cs    |  564 --
 src/Lucene.Net.Tests/Support/TestEquatableList.cs  |  164 -
 src/Lucene.Net.Tests/Support/TestEquatableSet.cs   |  164 -
 src/Lucene.Net.Tests/Support/TestHashMap.cs        |  220 -
 src/Lucene.Net.Tests/Support/TestLinkedHashMap.cs  |  359 -
 src/Lucene.Net.Tests/Support/TestPriorityQueue.cs  |  912 ---
 .../Support/TestStringBuilderExtensions.cs         |  383 -
 .../Support/TestStringExtensions.cs                |   59 -
 src/Lucene.Net.Tests/Support/TestTreeDictionary.cs |  819 ---
 src/Lucene.Net.Tests/Support/TestTreeSet.cs        | 3178 --------
 .../Util/Automaton/TestCompiledAutomaton.cs        |    3 +-
 .../Util/Automaton/TestDeterminizeLexicon.cs       |    6 +-
 .../Util/Automaton/TestUTF32ToUTF8.cs              |    5 +-
 src/Lucene.Net.Tests/Util/Fst/TestFSTs.cs          |   20 +-
 src/Lucene.Net.Tests/Util/TestBytesRef.cs          |    4 +-
 src/Lucene.Net.Tests/Util/TestBytesRefHash.cs      |   10 +-
 src/Lucene.Net.Tests/Util/TestCharsRef.cs          |   38 +-
 src/Lucene.Net.Tests/Util/TestCollectionUtil.cs    |   10 +-
 src/Lucene.Net.Tests/Util/TestFilterIterator.cs    |   49 +-
 src/Lucene.Net.Tests/Util/TestIdentityHashSet.cs   |   10 +-
 src/Lucene.Net.Tests/Util/TestMathUtil.cs          |    2 +-
 src/Lucene.Net.Tests/Util/TestNumericUtils.cs      |   53 +-
 src/Lucene.Net.Tests/Util/TestOfflineSorter.cs     |    2 +-
 .../Util/TestRecyclingByteBlockAllocator.cs        |    7 +-
 .../Util/TestRecyclingIntBlockAllocator.cs         |    7 +-
 src/Lucene.Net.Tests/Util/TestSentinelIntSet.cs    |    4 +-
 src/Lucene.Net.Tests/Util/TestUnicodeUtil.cs       |    7 +-
 src/Lucene.Net/Analysis/Analyzer.cs                |    4 +-
 src/Lucene.Net/Analysis/Token.cs                   |    3 +-
 .../Analysis/TokenAttributes/CharTermAttribute.cs  |  302 +-
 .../Analysis/TokenAttributes/ICharTermAttribute.cs |  176 +-
 src/Lucene.Net/Analysis/TokenStreamToAutomaton.cs  |   22 +-
 src/Lucene.Net/Codecs/BlockTreeTermsReader.cs      |    3 +-
 src/Lucene.Net/Codecs/BlockTreeTermsWriter.cs      |    1 +
 .../Compressing/CompressingTermVectorsReader.cs    |    1 -
 .../Compressing/CompressingTermVectorsWriter.cs    |    3 +-
 src/Lucene.Net/Codecs/Lucene3x/Lucene3xCodec.cs    |   43 +-
 src/Lucene.Net/Codecs/Lucene3x/Lucene3xFields.cs   |    5 +-
 .../Codecs/Lucene3x/Lucene3xNormsProducer.cs       |    5 +-
 .../Codecs/Lucene3x/Lucene3xSegmentInfoReader.cs   |    9 +-
 src/Lucene.Net/Codecs/Lucene3x/TermBuffer.cs       |    2 +-
 src/Lucene.Net/Codecs/Lucene3x/TermInfosReader.cs  |    2 +-
 .../Codecs/Lucene3x/TermInfosReaderIndex.cs        |    2 +-
 .../Codecs/Lucene40/Lucene40StoredFieldsReader.cs  |    1 -
 .../Codecs/Lucene40/Lucene40TermVectorsWriter.cs   |    2 +-
 .../Codecs/Lucene42/Lucene42FieldInfosReader.cs    |    4 +-
 .../Codecs/Lucene42/Lucene42NormsConsumer.cs       |    5 +-
 .../Codecs/Lucene45/Lucene45DocValuesConsumer.cs   |    5 +-
 .../Codecs/Lucene46/Lucene46FieldInfosReader.cs    |    4 +-
 .../Codecs/PerField/PerFieldDocValuesFormat.cs     |    9 +-
 .../Codecs/PerField/PerFieldPostingsFormat.cs      |    3 +-
 src/Lucene.Net/Codecs/TermVectorsWriter.cs         |    2 +-
 .../Document/DocumentStoredFieldVisitor.cs         |   13 +-
 src/Lucene.Net/Index/BaseCompositeReader.cs        |    7 +-
 src/Lucene.Net/Index/BufferedUpdates.cs            |   30 +-
 src/Lucene.Net/Index/BufferedUpdatesStream.cs      |    5 +-
 src/Lucene.Net/Index/CheckIndex.cs                 |    1 +
 src/Lucene.Net/Index/CompositeReaderContext.cs     |   17 +-
 src/Lucene.Net/Index/DocFieldProcessor.cs          |    5 +-
 src/Lucene.Net/Index/DocumentsWriter.cs            |    8 +-
 .../Index/DocumentsWriterFlushControl.cs           |    5 +-
 src/Lucene.Net/Index/DocumentsWriterPerThread.cs   |   37 +-
 .../Index/DocumentsWriterStallControl.cs           |    6 +-
 src/Lucene.Net/Index/FieldInfos.cs                 |    5 +-
 .../Index/FreqProxTermsWriterPerField.cs           |    1 +
 src/Lucene.Net/Index/IndexReader.cs                |    8 +-
 src/Lucene.Net/Index/IndexWriter.cs                |   21 +-
 src/Lucene.Net/Index/MergePolicy.cs                |    4 +-
 src/Lucene.Net/Index/MultiFields.cs                |    3 +-
 src/Lucene.Net/Index/NormsConsumerPerField.cs      |    2 +-
 src/Lucene.Net/Index/ParallelAtomicReader.cs       |   10 +-
 src/Lucene.Net/Index/ParallelCompositeReader.cs    |    5 +-
 src/Lucene.Net/Index/SegmentCommitInfo.cs          |    8 +-
 src/Lucene.Net/Index/SegmentCoreReaders.cs         |    3 +-
 src/Lucene.Net/Index/SegmentInfo.cs                |    4 +-
 src/Lucene.Net/Index/SegmentInfos.cs               |   19 +-
 src/Lucene.Net/Index/SegmentReader.cs              |    4 +-
 src/Lucene.Net/Index/Term.cs                       |    2 +-
 src/Lucene.Net/Index/TermVectorsConsumer.cs        |    1 +
 src/Lucene.Net/Index/TieredMergePolicy.cs          |    4 +-
 src/Lucene.Net/Search/BooleanQuery.cs              |    5 +-
 src/Lucene.Net/Search/DisjunctionMaxQuery.cs       |   29 +-
 src/Lucene.Net/Search/FieldCacheImpl.cs            |    2 +-
 src/Lucene.Net/Search/FieldComparator.cs           |  116 +-
 src/Lucene.Net/Search/FuzzyTermsEnum.cs            |   46 +-
 src/Lucene.Net/Search/IndexSearcher.cs             |    4 +-
 src/Lucene.Net/Search/MultiPhraseQuery.cs          |   11 +-
 src/Lucene.Net/Search/NGramPhraseQuery.cs          |    6 +-
 src/Lucene.Net/Search/Payloads/PayloadSpanUtil.cs  |    4 +-
 src/Lucene.Net/Search/PhraseQuery.cs               |    5 +-
 src/Lucene.Net/Search/ReferenceManager.cs          |    5 +-
 src/Lucene.Net/Search/SloppyPhraseScorer.cs        |   22 +-
 src/Lucene.Net/Search/Spans/NearSpansOrdered.cs    |    3 +-
 src/Lucene.Net/Search/Spans/NearSpansUnordered.cs  |    3 +-
 src/Lucene.Net/Search/Spans/SpanFirstQuery.cs      |    1 -
 .../Search/Spans/SpanNearPayloadCheckQuery.cs      |   30 +-
 src/Lucene.Net/Search/Spans/SpanNearQuery.cs       |   36 +-
 src/Lucene.Net/Search/Spans/SpanNotQuery.cs        |   10 +-
 src/Lucene.Net/Search/Spans/SpanOrQuery.cs         |   16 +-
 .../Search/Spans/SpanPayloadCheckQuery.cs          |   28 +-
 .../Search/Spans/SpanPositionRangeQuery.cs         |    1 -
 src/Lucene.Net/Search/Spans/SpanWeight.cs          |    3 +-
 src/Lucene.Net/Search/TopTermsRewrite.cs           |   32 +-
 src/Lucene.Net/Search/WildcardQuery.cs             |    2 +-
 src/Lucene.Net/Store/CompoundFileWriter.cs         |    3 +-
 src/Lucene.Net/Store/DataInput.cs                  |    3 +-
 src/Lucene.Net/Store/FileSwitchDirectory.cs        |    3 +-
 src/Lucene.Net/Store/NRTCachingDirectory.cs        |    3 +-
 src/Lucene.Net/Store/SingleInstanceLockFactory.cs  |    7 +-
 src/Lucene.Net/Store/TrackingDirectoryWrapper.cs   |   11 +-
 src/Lucene.Net/Support/Arrays.cs                   |    7 -
 src/Lucene.Net/Support/AssemblyUtils.cs            |    3 +-
 src/Lucene.Net/Support/AttributeItem.cs            |   42 -
 src/Lucene.Net/Support/C5.Support.cs               | 7637 --------------------
 src/Lucene.Net/Support/Character.cs                |  389 -
 src/Lucene.Net/Support/Collections.cs              |  970 +--
 src/Lucene.Net/Support/ConcurrentHashSet.cs        |  897 ++-
 src/Lucene.Net/Support/ConcurrentSet.cs            |  351 +
 src/Lucene.Net/Support/Equatable.cs                |   80 -
 src/Lucene.Net/Support/EquatableList.cs            |  385 -
 src/Lucene.Net/Support/EquatableSet.cs             |  463 --
 ... ExceptionToNullableEnumConventionAttribute.cs} |    2 +-
 src/Lucene.Net/Support/GeneralKeyedCollection.cs   |  114 -
 src/Lucene.Net/Support/HashMap.cs                  |  571 --
 src/Lucene.Net/Support/ICharSequence.cs            |   78 -
 src/Lucene.Net/Support/IdentityComparer.cs         |   87 -
 src/Lucene.Net/Support/IdentityHashMap.cs          |   39 -
 src/Lucene.Net/Support/IdentityHashSet.cs          |   34 -
 src/Lucene.Net/Support/LinkedHashMap.cs            |  534 --
 src/Lucene.Net/Support/ListExtensions.cs           |   27 -
 src/Lucene.Net/Support/PriorityQueue.cs            |  683 --
 src/Lucene.Net/Support/SetExtensions.cs            |   55 +-
 src/Lucene.Net/Support/SignedZeroComparer.cs       |   48 -
 .../Support/StringBuilderCharSequenceWrapper.cs    |   74 -
 src/Lucene.Net/Support/StringBuilderExtensions.cs  |  229 -
 .../Support/StringCharSequenceWrapper.cs           |   78 -
 src/Lucene.Net/Support/StringExtensions.cs         |  185 +-
 src/Lucene.Net/Support/TreeDictionary.cs           | 1299 ----
 src/Lucene.Net/Support/TreeSet.cs                  | 4204 -----------
 src/Lucene.Net/Support/WeakDictionary.cs           |   11 +-
 src/Lucene.Net/Util/AttributeSource.cs             |   50 +-
 src/Lucene.Net/Util/Automaton/Automaton.cs         |   67 +-
 src/Lucene.Net/Util/Automaton/BasicAutomata.cs     |    6 +-
 src/Lucene.Net/Util/Automaton/BasicOperations.cs   |   36 +-
 .../Util/Automaton/CharacterRunAutomaton.cs        |    4 +-
 .../Util/Automaton/DaciukMihovAutomatonBuilder.cs  |   24 +-
 .../Util/Automaton/LevenshteinAutomata.cs          |   17 +-
 .../Util/Automaton/MinimizationOperations.cs       |    9 +-
 src/Lucene.Net/Util/Automaton/RegExp.cs            |   34 +-
 src/Lucene.Net/Util/Automaton/SortedIntSet.cs      |    3 +-
 src/Lucene.Net/Util/Automaton/SpecialOperations.cs |   17 +-
 src/Lucene.Net/Util/Automaton/State.cs             |   59 +-
 src/Lucene.Net/Util/Automaton/Transition.cs        |   26 +-
 src/Lucene.Net/Util/Automaton/UTF32ToUTF8.cs       |    1 -
 src/Lucene.Net/Util/BroadWord.cs                   |    4 +-
 src/Lucene.Net/Util/BytesRef.cs                    |    3 +-
 src/Lucene.Net/Util/CharsRef.cs                    |   29 +-
 src/Lucene.Net/Util/CloseableThreadLocal.cs        |    4 +-
 src/Lucene.Net/Util/CollectionUtil.cs              |    2 +-
 src/Lucene.Net/Util/FieldCacheSanityChecker.cs     |    5 +-
 src/Lucene.Net/Util/Fst/BytesStore.cs              |    3 +-
 src/Lucene.Net/Util/Fst/FST.cs                     |   13 +-
 src/Lucene.Net/Util/Fst/NodeHash.cs                |   24 +-
 src/Lucene.Net/Util/Fst/Outputs.cs                 |    4 +
 src/Lucene.Net/Util/Fst/Util.cs                    |   15 +-
 src/Lucene.Net/Util/IntroSorter.cs                 |    1 -
 src/Lucene.Net/Util/MapOfSets.cs                   |    5 +-
 src/Lucene.Net/Util/MergedIterator.cs              |    2 +-
 src/Lucene.Net/Util/Mutable/MutableValue.cs        |    2 +-
 src/Lucene.Net/Util/OfflineSorter.cs               |    1 -
 src/Lucene.Net/Util/Packed/EliasFanoDecoder.cs     |    1 -
 src/Lucene.Net/Util/Packed/Packed64SingleBlock.cs  |    1 -
 src/Lucene.Net/Util/RamUsageEstimator.cs           |    6 +-
 src/Lucene.Net/Util/SPIClassIterator.cs            |    7 +-
 src/Lucene.Net/Util/StringHelper.cs                |    8 +-
 src/Lucene.Net/Util/UnicodeUtil.cs                 |    5 +-
 src/Lucene.Net/Util/VirtualMethod.cs               |    3 +-
 .../TestICUPostingsHighlighterRanking.cs           |    5 +-
 650 files changed, 5796 insertions(+), 37896 deletions(-)

diff --git a/.rat-excludes b/.rat-excludes
index 484d797..0e74d71 100644
--- a/.rat-excludes
+++ b/.rat-excludes
@@ -22,4 +22,5 @@ Sax/*
 JaspellTernarySearchTrie\.cs
 RectangularArrays\.cs
 LimitedConcurrencyLevelTaskScheduler\.cs
-Automaton/*
\ No newline at end of file
+Automaton/*
+ConcurrentHashSet\.cs
\ No newline at end of file
diff --git a/Directory.Build.targets b/Directory.Build.targets
index 774ba44..85ae211 100644
--- a/Directory.Build.targets
+++ b/Directory.Build.targets
@@ -29,8 +29,6 @@
 
     <DefineConstants>$(DefineConstants);FEATURE_CONDITIONALWEAKTABLE_ENUMERATOR</DefineConstants>
     <DefineConstants>$(DefineConstants);FEATURE_CONDITIONALWEAKTABLE_ADDORUPDATE</DefineConstants>
-
-    <DefineConstants>$(DefineConstants);FEATURE_HASHSET_CAPACITY</DefineConstants>
     
   </PropertyGroup>
 
diff --git a/build/Dependencies.props b/build/Dependencies.props
index a8b01c3..1d1687a 100644
--- a/build/Dependencies.props
+++ b/build/Dependencies.props
@@ -32,19 +32,19 @@
         https://github.com/apache/lucene-solr/tree/31d7ec7bbfdcd2c4cc61d9d35e962165410b65fe/lucene/analysis/icu/src/data/utr30
         Just make sure they are adjusted to the right version of ICU/Lucene.
     <ICU4NPackageVersion>[60.1,60.2)</ICU4NPackageVersion> -->
-    <ICU4NPackageVersion>60.1.0-alpha.193</ICU4NPackageVersion>
+    <ICU4NPackageVersion>60.1.0-alpha.197</ICU4NPackageVersion>
     <ICU4NCollationPackageVersion>$(ICU4NPackageVersion)</ICU4NCollationPackageVersion>
     <ICU4NCurrencyDataPackageVersion>$(ICU4NPackageVersion)</ICU4NCurrencyDataPackageVersion>
     <ICU4NLanguageDataPackageVersion>$(ICU4NPackageVersion)</ICU4NLanguageDataPackageVersion>
     <ICU4NRegionDataPackageVersion>$(ICU4NPackageVersion)</ICU4NRegionDataPackageVersion>
     <ICU4NTransliteratorPackageVersion>$(ICU4NPackageVersion)</ICU4NTransliteratorPackageVersion>
-    <J2NPackageVersion>1.0.0-beta-0001</J2NPackageVersion>
+    <J2NPackageVersion>2.0.0-beta-0001</J2NPackageVersion>
     <MicrosoftAspNetCoreHttpAbstractionsPackageVersion>1.0.3</MicrosoftAspNetCoreHttpAbstractionsPackageVersion>
     <MicrosoftAspNetCoreTestHostPackageVersion>1.0.3</MicrosoftAspNetCoreTestHostPackageVersion>
     <MicrosoftCSharpPackageVersion>4.4.0</MicrosoftCSharpPackageVersion>
     <MicrosoftExtensionsDependencyModelPackageVersion>2.0.0</MicrosoftExtensionsDependencyModelPackageVersion>
     <MicrosoftNETTestSdkPackageVersion>16.2.0</MicrosoftNETTestSdkPackageVersion>
-    <MorfologikFsaPackageVersion>2.1.6-beta-0001</MorfologikFsaPackageVersion>
+    <MorfologikFsaPackageVersion>2.1.6-beta-0002</MorfologikFsaPackageVersion>
     <MorfologikPolishPackageVersion>$(MorfologikFsaPackageVersion)</MorfologikPolishPackageVersion>
     <MorfologikStemmingPackageVersion>$(MorfologikFsaPackageVersion)</MorfologikStemmingPackageVersion>
     <MSTestTestFrameworkPackageVersion>2.0.0</MSTestTestFrameworkPackageVersion>
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Ar/ArabicLetterTokenizer.cs b/src/Lucene.Net.Analysis.Common/Analysis/Ar/ArabicLetterTokenizer.cs
index 84ccc30..fa5bfa1 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Ar/ArabicLetterTokenizer.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Ar/ArabicLetterTokenizer.cs
@@ -1,5 +1,5 @@
-using Lucene.Net.Analysis.Core;
-using Lucene.Net.Support;
+using J2N;
+using Lucene.Net.Analysis.Core;
 using Lucene.Net.Util;
 using System;
 using System.Globalization;
@@ -8,21 +8,21 @@ using System.IO;
 namespace Lucene.Net.Analysis.Ar
 {
     /*
-	 * Licensed to the Apache Software Foundation (ASF) under one or more
-	 * contributor license agreements.  See the NOTICE file distributed with
-	 * this work for additional information regarding copyright ownership.
-	 * The ASF licenses this file to You under the Apache License, Version 2.0
-	 * (the "License"); you may not use this file except in compliance with
-	 * the License.  You may obtain a copy of the License at
-	 *
-	 *     http://www.apache.org/licenses/LICENSE-2.0
-	 *
-	 * Unless required by applicable law or agreed to in writing, software
-	 * distributed under the License is distributed on an "AS IS" BASIS,
-	 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	 * See the License for the specific language governing permissions and
-	 * limitations under the License.
-	 */
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
 
     /// <summary>
     /// Tokenizer that breaks text into runs of letters and diacritics.
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Br/BrazilianStemFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/Br/BrazilianStemFilter.cs
index f26e496..9c7c1c6 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Br/BrazilianStemFilter.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Br/BrazilianStemFilter.cs
@@ -1,25 +1,25 @@
 using Lucene.Net.Analysis.TokenAttributes;
 using System;
-using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Analysis.Br
 {
     /*
-	 * Licensed to the Apache Software Foundation (ASF) under one or more
-	 * contributor license agreements.  See the NOTICE file distributed with
-	 * this work for additional information regarding copyright ownership.
-	 * The ASF licenses this file to You under the Apache License, Version 2.0
-	 * (the "License"); you may not use this file except in compliance with
-	 * the License.  You may obtain a copy of the License at
-	 *
-	 *     http://www.apache.org/licenses/LICENSE-2.0
-	 *
-	 * Unless required by applicable law or agreed to in writing, software
-	 * distributed under the License is distributed on an "AS IS" BASIS,
-	 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	 * See the License for the specific language governing permissions and
-	 * limitations under the License.
-	 */
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
 
     /// <summary>
     /// A <see cref="TokenFilter"/> that applies <see cref="BrazilianStemmer"/>.
@@ -37,7 +37,7 @@ namespace Lucene.Net.Analysis.Br
         /// <see cref="BrazilianStemmer"/> in use by this filter.
         /// </summary>
         private BrazilianStemmer stemmer = new BrazilianStemmer();
-        private HashSet<string> exclusions = null; // LUCENENET TODO: This is odd. No way to set it at all, so it cannot possibly have any values.
+        private JCG.HashSet<string> exclusions = null; // LUCENENET TODO: This is odd. No way to set it at all, so it cannot possibly have any values.
         private readonly ICharTermAttribute termAtt;
         private readonly IKeywordAttribute keywordAttr;
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Ca/CatalanAnalyzer.cs b/src/Lucene.Net.Analysis.Common/Analysis/Ca/CatalanAnalyzer.cs
index 006f9e3..75dbe60 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Ca/CatalanAnalyzer.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Ca/CatalanAnalyzer.cs
@@ -3,7 +3,6 @@ using Lucene.Net.Analysis.Miscellaneous;
 using Lucene.Net.Analysis.Snowball;
 using Lucene.Net.Analysis.Standard;
 using Lucene.Net.Analysis.Util;
-using Lucene.Net.Support;
 using Lucene.Net.Tartarus.Snowball.Ext;
 using Lucene.Net.Util;
 using System;
@@ -48,7 +47,7 @@ namespace Lucene.Net.Analysis.Ca
 
         private static readonly CharArraySet DEFAULT_ARTICLES = CharArraySet.UnmodifiableSet(
 #pragma warning disable 612, 618
-            new CharArraySet(LuceneVersion.LUCENE_CURRENT, Arrays.AsList("d", "l", "m", "n", "s", "t"), true));
+            new CharArraySet(LuceneVersion.LUCENE_CURRENT, new string[] { "d", "l", "m", "n", "s", "t" }, true));
 #pragma warning restore 612, 618
 
         /// <summary>
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/HTMLStripCharFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/HTMLStripCharFilter.cs
index 4031285..ad8119b 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/HTMLStripCharFilter.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/HTMLStripCharFilter.cs
@@ -1,5 +1,5 @@
-using Lucene.Net.Analysis.Util;
-using Lucene.Net.Support;
+using J2N;
+using Lucene.Net.Analysis.Util;
 using Lucene.Net.Util;
 using System;
 using System.Collections.Generic;
@@ -10,21 +10,21 @@ using System.IO;
 namespace Lucene.Net.Analysis.CharFilters
 {
     /*
-	 * Licensed to the Apache Software Foundation (ASF) under one or more
-	 * contributor license agreements.  See the NOTICE file distributed with
-	 * this work for additional information regarding copyright ownership.
-	 * The ASF licenses this file to You under the Apache License, Version 2.0
-	 * (the "License"); you may not use this file except in compliance with
-	 * the License.  You may obtain a copy of the License at
-	 *
-	 *     http://www.apache.org/licenses/LICENSE-2.0
-	 *
-	 * Unless required by applicable law or agreed to in writing, software
-	 * distributed under the License is distributed on an "AS IS" BASIS,
-	 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	 * See the License for the specific language governing permissions and
-	 * limitations under the License.
-	 */
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
 
     /// <summary>
     /// A <see cref="CharFilter"/> that wraps another <see cref="TextReader"/> and attempts to strip out HTML constructs.
@@ -31393,8 +31393,8 @@ namespace Lucene.Net.Analysis.CharFilters
                                 {
                                     outputSegment = entitySegment;
                                     outputSegment.Clear();
-                                    if (codePoint >= Character.MIN_SURROGATE
-                                        && codePoint <= Character.MAX_SURROGATE)
+                                    if (codePoint >= Character.MinSurrogate
+                                        && codePoint <= Character.MaxSurrogate)
                                     {
                                         outputSegment.UnsafeWrite(REPLACEMENT_CHARACTER);
                                     }
@@ -31653,8 +31653,8 @@ namespace Lucene.Net.Analysis.CharFilters
                                 {
                                     outputSegment = entitySegment;
                                     outputSegment.Clear();
-                                    if (codePoint >= Character.MIN_SURROGATE
-                                        && codePoint <= Character.MAX_SURROGATE)
+                                    if (codePoint >= Character.MinSurrogate
+                                        && codePoint <= Character.MaxSurrogate)
                                     {
                                         outputSegment.UnsafeWrite(REPLACEMENT_CHARACTER);
                                     }
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/MappingCharFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/MappingCharFilterFactory.cs
index ce116da..7d47f76 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/MappingCharFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/MappingCharFilterFactory.cs
@@ -7,21 +7,21 @@ using System.Text.RegularExpressions;
 namespace Lucene.Net.Analysis.CharFilters
 {
     /*
-	 * Licensed to the Apache Software Foundation (ASF) under one or more
-	 * contributor license agreements.  See the NOTICE file distributed with
-	 * this work for additional information regarding copyright ownership.
-	 * The ASF licenses this file to You under the Apache License, Version 2.0
-	 * (the "License"); you may not use this file except in compliance with
-	 * the License.  You may obtain a copy of the License at
-	 *
-	 *     http://www.apache.org/licenses/LICENSE-2.0
-	 *
-	 * Unless required by applicable law or agreed to in writing, software
-	 * distributed under the License is distributed on an "AS IS" BASIS,
-	 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	 * See the License for the specific language governing permissions and
-	 * limitations under the License.
-	 */
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
 
     /// <summary>
     /// Factory for <see cref="MappingCharFilter"/>. 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/NormalizeCharMap.cs b/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/NormalizeCharMap.cs
index 8183fb8..e50803e 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/NormalizeCharMap.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/NormalizeCharMap.cs
@@ -4,6 +4,7 @@ using System;
 using System.Collections.Generic;
 using System.Diagnostics;
 using System.IO;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Analysis.CharFilters
 {
@@ -83,7 +84,7 @@ namespace Lucene.Net.Analysis.CharFilters
         {
             // LUCENENET specific - we need to use StringComparer.Ordinal for the
             // sort order to correctly match Lucene, otherwise FST.Builder will throw Debug.Assert failures
-            private readonly IDictionary<string, string> pendingPairs = new SortedDictionary<string, string>(StringComparer.Ordinal);
+            private readonly IDictionary<string, string> pendingPairs = new JCG.SortedDictionary<string, string>(StringComparer.Ordinal);
 
             /// <summary>
             /// Records a replacement to be applied to the input
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Cjk/CJKBigramFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/Cjk/CJKBigramFilter.cs
index 647645d..85e565d 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Cjk/CJKBigramFilter.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Cjk/CJKBigramFilter.cs
@@ -1,26 +1,26 @@
-using Lucene.Net.Analysis.Standard;
+using J2N;
+using Lucene.Net.Analysis.Standard;
 using Lucene.Net.Analysis.TokenAttributes;
-using Lucene.Net.Support;
 using Lucene.Net.Util;
 
 namespace Lucene.Net.Analysis.Cjk
 {
     /*
-	 * Licensed to the Apache Software Foundation (ASF) under one or more
-	 * contributor license agreements.  See the NOTICE file distributed with
-	 * this work for additional information regarding copyright ownership.
-	 * The ASF licenses this file to You under the Apache License, Version 2.0
-	 * (the "License"); you may not use this file except in compliance with
-	 * the License.  You may obtain a copy of the License at
-	 *
-	 *     http://www.apache.org/licenses/LICENSE-2.0
-	 *
-	 * Unless required by applicable law or agreed to in writing, software
-	 * distributed under the License is distributed on an "AS IS" BASIS,
-	 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	 * See the License for the specific language governing permissions and
-	 * limitations under the License.
-	 */
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
 
     // LUCENENET specific - converted constants from CJKBigramFilter
     // into a flags enum.
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Cjk/CJKTokenizer.cs b/src/Lucene.Net.Analysis.Common/Analysis/Cjk/CJKTokenizer.cs
index 9388afa..7bf92e2 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Cjk/CJKTokenizer.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Cjk/CJKTokenizer.cs
@@ -1,5 +1,5 @@
-using Lucene.Net.Analysis.TokenAttributes;
-using Lucene.Net.Support;
+using J2N;
+using Lucene.Net.Analysis.TokenAttributes;
 using System;
 using System.IO;
 using System.Text.RegularExpressions;
@@ -7,21 +7,21 @@ using System.Text.RegularExpressions;
 namespace Lucene.Net.Analysis.Cjk
 {
     /*
-	 * Licensed to the Apache Software Foundation (ASF) under one or more
-	 * contributor license agreements.  See the NOTICE file distributed with
-	 * this work for additional information regarding copyright ownership.
-	 * The ASF licenses this file to You under the Apache License, Version 2.0
-	 * (the "License"); you may not use this file except in compliance with
-	 * the License.  You may obtain a copy of the License at
-	 *
-	 *     http://www.apache.org/licenses/LICENSE-2.0
-	 *
-	 * Unless required by applicable law or agreed to in writing, software
-	 * distributed under the License is distributed on an "AS IS" BASIS,
-	 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	 * See the License for the specific language governing permissions and
-	 * limitations under the License.
-	 */
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
 
     /// <summary>
     /// CJKTokenizer is designed for Chinese, Japanese, and Korean languages.
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Ckb/SoraniNormalizer.cs b/src/Lucene.Net.Analysis.Common/Analysis/Ckb/SoraniNormalizer.cs
index 78e6750..b9b8ef0 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Ckb/SoraniNormalizer.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Ckb/SoraniNormalizer.cs
@@ -1,25 +1,24 @@
 using Lucene.Net.Analysis.Util;
-using Lucene.Net.Support;
 using System.Globalization;
 
 namespace Lucene.Net.Analysis.Ckb
 {
     /*
-	 * Licensed to the Apache Software Foundation (ASF) under one or more
-	 * contributor license agreements.  See the NOTICE file distributed with
-	 * this work for additional information regarding copyright ownership.
-	 * The ASF licenses this file to You under the Apache License, Version 2.0
-	 * (the "License"); you may not use this file except in compliance with
-	 * the License.  You may obtain a copy of the License at
-	 *
-	 *     http://www.apache.org/licenses/LICENSE-2.0
-	 *
-	 * Unless required by applicable law or agreed to in writing, software
-	 * distributed under the License is distributed on an "AS IS" BASIS,
-	 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	 * See the License for the specific language governing permissions and
-	 * limitations under the License.
-	 */
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
 
     /// <summary>
     /// Normalizes the Unicode representation of Sorani text.
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Cn/ChineseFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/Cn/ChineseFilter.cs
index 47ff4a5..d7a7780 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Cn/ChineseFilter.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Cn/ChineseFilter.cs
@@ -1,6 +1,5 @@
 using Lucene.Net.Analysis.TokenAttributes;
 using Lucene.Net.Analysis.Util;
-using Lucene.Net.Support;
 using Lucene.Net.Util;
 using System;
 using System.Globalization;
@@ -8,21 +7,21 @@ using System.Globalization;
 namespace Lucene.Net.Analysis.Cn
 {
     /*
-	 * Licensed to the Apache Software Foundation (ASF) under one or more
-	 * contributor license agreements.  See the NOTICE file distributed with
-	 * this work for additional information regarding copyright ownership.
-	 * The ASF licenses this file to You under the Apache License, Version 2.0
-	 * (the "License"); you may not use this file except in compliance with
-	 * the License.  You may obtain a copy of the License at
-	 *
-	 *     http://www.apache.org/licenses/LICENSE-2.0
-	 *
-	 * Unless required by applicable law or agreed to in writing, software
-	 * distributed under the License is distributed on an "AS IS" BASIS,
-	 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	 * See the License for the specific language governing permissions and
-	 * limitations under the License.
-	 */
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
 
     /// <summary>
     /// A <see cref="TokenFilter"/> with a stop word table.  
@@ -59,7 +58,7 @@ namespace Lucene.Net.Analysis.Cn
         public ChineseFilter(TokenStream @in)
             : base(@in)
         {
-            stopTable = new CharArraySet(LuceneVersion.LUCENE_CURRENT, Arrays.AsList(STOP_WORDS), false);
+            stopTable = new CharArraySet(LuceneVersion.LUCENE_CURRENT, STOP_WORDS, false);
             termAtt = AddAttribute<ICharTermAttribute>();
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Compound/CompoundWordTokenFilterBase.cs b/src/Lucene.Net.Analysis.Common/Analysis/Compound/CompoundWordTokenFilterBase.cs
index 5e176af..9f16a66 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Compound/CompoundWordTokenFilterBase.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Compound/CompoundWordTokenFilterBase.cs
@@ -1,6 +1,6 @@
-using Lucene.Net.Analysis.TokenAttributes;
+using J2N.Text;
+using Lucene.Net.Analysis.TokenAttributes;
 using Lucene.Net.Analysis.Util;
-using Lucene.Net.Support;
 using Lucene.Net.Util;
 using System.Collections.Generic;
 using System.Diagnostics;
@@ -181,7 +181,7 @@ namespace Lucene.Net.Analysis.Compound
             /// Construct the compound token based on a slice of the current <see cref="CompoundWordTokenFilterBase.m_termAtt"/>. </summary>
             public CompoundToken(CompoundWordTokenFilterBase outerInstance, int offset, int length)
             {
-                this.txt = outerInstance.m_termAtt.SubSequence(offset, offset + length);
+                this.txt = outerInstance.m_termAtt.Subsequence(offset, length); // LUCENENET: Corrected 2nd Subsequence parameter
 
                 // offsets of the original word
                 int startOff = outerInstance.m_offsetAtt.StartOffset;
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Compound/Hyphenation/HyphenationTree.cs b/src/Lucene.Net.Analysis.Common/Analysis/Compound/Hyphenation/HyphenationTree.cs
index 3d43e34..a8959c4 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Compound/Hyphenation/HyphenationTree.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Compound/Hyphenation/HyphenationTree.cs
@@ -1,9 +1,9 @@
-using Lucene.Net.Support;
-using System;
+using System;
 using System.Collections.Generic;
 using System.IO;
 using System.Text;
 using System.Xml;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Analysis.Compound.Hyphenation
 {
@@ -57,7 +57,7 @@ namespace Lucene.Net.Analysis.Compound.Hyphenation
 
         public HyphenationTree()
         {
-            m_stoplist = new HashMap<string, IList<object>>(23); // usually a small table
+            m_stoplist = new JCG.Dictionary<string, IList<object>>(23); // usually a small table
             m_classmap = new TernaryTree();
             m_vspace = new ByteVector();
             m_vspace.Alloc(1); // this reserves index 0, which we don't use
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Core/LetterTokenizer.cs b/src/Lucene.Net.Analysis.Common/Analysis/Core/LetterTokenizer.cs
index 4b45693..c15c327 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Core/LetterTokenizer.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Core/LetterTokenizer.cs
@@ -1,5 +1,5 @@
-using Lucene.Net.Analysis.Util;
-using Lucene.Net.Support;
+using J2N;
+using Lucene.Net.Analysis.Util;
 using Lucene.Net.Util;
 using System.IO;
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Core/LowerCaseTokenizer.cs b/src/Lucene.Net.Analysis.Common/Analysis/Core/LowerCaseTokenizer.cs
index a3408b2..96d8958 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Core/LowerCaseTokenizer.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Core/LowerCaseTokenizer.cs
@@ -1,4 +1,4 @@
-using Lucene.Net.Support;
+using J2N;
 using Lucene.Net.Util;
 using System.IO;
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Core/StopAnalyzer.cs b/src/Lucene.Net.Analysis.Common/Analysis/Core/StopAnalyzer.cs
index e6dc000..6154be8 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Core/StopAnalyzer.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Core/StopAnalyzer.cs
@@ -1,5 +1,4 @@
 using Lucene.Net.Analysis.Util;
-using Lucene.Net.Support;
 using Lucene.Net.Util;
 using System.Collections.Generic;
 using System.IO;
@@ -45,10 +44,10 @@ namespace Lucene.Net.Analysis.Core
 
         private static CharArraySet LoadEnglishStopWordsSet() // LUCENENET: Avoid static constructors (see https://github.com/apache/lucenenet/pull/224#issuecomment-469284006)
         {
-            IList<string> stopWords = Arrays.AsList("a", "an", "and", "are", "as", "at", "be", 
-                "but", "by", "for", "if", "in", "into", "is", "it", "no", "not", "of", "on", 
-                "or", "such", "that", "the", "their", "then", "there", "these", "they", "this", 
-                "to", "was", "will", "with");
+            IList<string> stopWords = new string[] { "a", "an", "and", "are", "as", "at", "be",
+                "but", "by", "for", "if", "in", "into", "is", "it", "no", "not", "of", "on",
+                "or", "such", "that", "the", "their", "then", "there", "these", "they", "this",
+                "to", "was", "will", "with" };
 #pragma warning disable 612, 618
             var stopSet = new CharArraySet(LuceneVersion.LUCENE_CURRENT, stopWords, false);
 #pragma warning restore 612, 618
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Core/TypeTokenFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Core/TypeTokenFilterFactory.cs
index 38c69ab..682ef12 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Core/TypeTokenFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Core/TypeTokenFilterFactory.cs
@@ -1,7 +1,7 @@
 using Lucene.Net.Analysis.Util;
-using Lucene.Net.Support;
 using System.Collections.Generic;
 using System.Linq;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Analysis.Core
 {
@@ -39,7 +39,7 @@ namespace Lucene.Net.Analysis.Core
         private readonly bool useWhitelist;
         private readonly bool enablePositionIncrements;
         private readonly string stopTypesFiles;
-        private HashSet<string> stopTypes;
+        private JCG.HashSet<string> stopTypes;
 
         /// <summary>
         /// Creates a new <see cref="TypeTokenFilterFactory"/> </summary>
@@ -60,7 +60,7 @@ namespace Lucene.Net.Analysis.Core
             IList<string> files = SplitFileNames(stopTypesFiles);
             if (files.Count() > 0)
             {
-                stopTypes = new HashSet<string>();
+                stopTypes = new JCG.HashSet<string>();
                 foreach (string file in files)
                 {
                     IList<string> typesLines = GetLines(loader, file.Trim());
@@ -69,21 +69,9 @@ namespace Lucene.Net.Analysis.Core
             }
         }
 
-        public virtual bool EnablePositionIncrements
-        {
-            get
-            {
-                return enablePositionIncrements;
-            }
-        }
+        public virtual bool EnablePositionIncrements => enablePositionIncrements;
 
-        public virtual ICollection<string> StopTypes
-        {
-            get
-            {
-                return stopTypes;
-            }
-        }
+        public virtual ICollection<string> StopTypes => stopTypes;
 
         public override TokenStream Create(TokenStream input)
         {
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Core/WhitespaceTokenizer.cs b/src/Lucene.Net.Analysis.Common/Analysis/Core/WhitespaceTokenizer.cs
index cee9568..8381dce 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Core/WhitespaceTokenizer.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Core/WhitespaceTokenizer.cs
@@ -1,7 +1,6 @@
-using System.IO;
-using Lucene.Net.Analysis.Util;
-using Lucene.Net.Support;
+using Lucene.Net.Analysis.Util;
 using Lucene.Net.Util;
+using System.IO;
 
 namespace Lucene.Net.Analysis.Core
 {
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/De/GermanAnalyzer.cs b/src/Lucene.Net.Analysis.Common/Analysis/De/GermanAnalyzer.cs
index 00738f5..def3ff4 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/De/GermanAnalyzer.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/De/GermanAnalyzer.cs
@@ -1,10 +1,8 @@
-using Lucene.Net.Analysis;
-using Lucene.Net.Analysis.Core;
+using Lucene.Net.Analysis.Core;
 using Lucene.Net.Analysis.Miscellaneous;
 using Lucene.Net.Analysis.Snowball;
 using Lucene.Net.Analysis.Standard;
 using Lucene.Net.Analysis.Util;
-using Lucene.Net.Support;
 using Lucene.Net.Tartarus.Snowball.Ext;
 using Lucene.Net.Util;
 using System;
@@ -94,7 +92,7 @@ namespace Lucene.Net.Analysis.De
         {
             /// @deprecated in 3.1, remove in Lucene 5.0 (index bw compat) 
             [Obsolete("in 3.1, remove in Lucene 5.0 (index bw compat)")]
-            internal static readonly CharArraySet DEFAULT_SET_30 = CharArraySet.UnmodifiableSet(new CharArraySet(LuceneVersion.LUCENE_CURRENT, Arrays.AsList(GERMAN_STOP_WORDS), false));
+            internal static readonly CharArraySet DEFAULT_SET_30 = CharArraySet.UnmodifiableSet(new CharArraySet(LuceneVersion.LUCENE_CURRENT, GERMAN_STOP_WORDS, false));
             internal static readonly CharArraySet DEFAULT_SET = LoadDefaultSet();
             private static CharArraySet LoadDefaultSet() // LUCENENET: Avoid static constructors (see https://github.com/apache/lucenenet/pull/224#issuecomment-469284006)
             {
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/El/GreekLowerCaseFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/El/GreekLowerCaseFilter.cs
index 85f4bd3..1997ade 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/El/GreekLowerCaseFilter.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/El/GreekLowerCaseFilter.cs
@@ -1,26 +1,26 @@
-using Lucene.Net.Analysis.TokenAttributes;
+using J2N;
+using Lucene.Net.Analysis.TokenAttributes;
 using Lucene.Net.Analysis.Util;
-using Lucene.Net.Support;
 using Lucene.Net.Util;
 
 namespace Lucene.Net.Analysis.El
 {
     /*
-	 * Licensed to the Apache Software Foundation (ASF) under one or more
-	 * contributor license agreements.  See the NOTICE file distributed with
-	 * this work for additional information regarding copyright ownership.
-	 * The ASF licenses this file to You under the Apache License, Version 2.0
-	 * (the "License"); you may not use this file except in compliance with
-	 * the License.  You may obtain a copy of the License at
-	 *
-	 *     http://www.apache.org/licenses/LICENSE-2.0
-	 *
-	 * Unless required by applicable law or agreed to in writing, software
-	 * distributed under the License is distributed on an "AS IS" BASIS,
-	 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	 * See the License for the specific language governing permissions and
-	 * limitations under the License.
-	 */
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
 
     /// <summary>
     /// Normalizes token text to lower case, removes some Greek diacritics,
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/El/GreekStemmer.cs b/src/Lucene.Net.Analysis.Common/Analysis/El/GreekStemmer.cs
index 5c6a40a..e45a672 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/El/GreekStemmer.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/El/GreekStemmer.cs
@@ -1,25 +1,24 @@
 using Lucene.Net.Analysis.Util;
-using Lucene.Net.Support;
 using Lucene.Net.Util;
 
 namespace Lucene.Net.Analysis.El
 {
     /*
-	 * Licensed to the Apache Software Foundation (ASF) under one or more
-	 * contributor license agreements.  See the NOTICE file distributed with
-	 * this work for additional information regarding copyright ownership.
-	 * The ASF licenses this file to You under the Apache License, Version 2.0
-	 * (the "License"); you may not use this file except in compliance with
-	 * the License.  You may obtain a copy of the License at
-	 *
-	 *     http://www.apache.org/licenses/LICENSE-2.0
-	 *
-	 * Unless required by applicable law or agreed to in writing, software
-	 * distributed under the License is distributed on an "AS IS" BASIS,
-	 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	 * See the License for the specific language governing permissions and
-	 * limitations under the License.
-	 */
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
 
     /// <summary>
     /// A stemmer for Greek words, according to: <c>Development of a Stemmer for the
@@ -256,7 +255,7 @@ namespace Lucene.Net.Analysis.El
         }
 
 #pragma warning disable 612, 618
-        private static readonly CharArraySet exc4 = new CharArraySet(LuceneVersion.LUCENE_CURRENT, Arrays.AsList("θ", "δ", "ελ", "γαλ", "ν", "π", "ιδ", "παρ"), false);
+        private static readonly CharArraySet exc4 = new CharArraySet(LuceneVersion.LUCENE_CURRENT, new string[] { "θ", "δ", "ελ", "γαλ", "ν", "π", "ιδ", "παρ" }, false);
 #pragma warning restore 612, 618
 
         private int Rule4(char[] s, int len)
@@ -299,11 +298,11 @@ namespace Lucene.Net.Analysis.El
 #pragma warning disable 612, 618
             new CharArraySet(LuceneVersion.LUCENE_CURRENT,
 #pragma warning restore 612, 618
-            Arrays.AsList("αλ", "αδ", "ενδ", "αμαν", "αμμοχαλ", "ηθ", "ανηθ", 
-                "αντιδ", "φυσ", "βρωμ", "γερ", "εξωδ", "καλπ", "καλλιν", "καταδ", 
-                "μουλ", "μπαν", "μπαγιατ", "μπολ", "μποσ", "νιτ", "ξικ", "συνομηλ", 
-                "πετσ", "πιτσ", "πικαντ", "πλιατσ", "ποστελν", "πρωτοδ", "σερτ", 
-                "συναδ", "τσαμ", "υποδ", "φιλον", "φυλοδ", "χασ"), false);
+            new string[] { "αλ", "αδ", "ενδ", "αμαν", "αμμοχαλ", "ηθ", "ανηθ",
+                "αντιδ", "φυσ", "βρωμ", "γερ", "εξωδ", "καλπ", "καλλιν", "καταδ",
+                "μουλ", "μπαν", "μπαγιατ", "μπολ", "μποσ", "νιτ", "ξικ", "συνομηλ",
+                "πετσ", "πιτσ", "πικαντ", "πλιατσ", "ποστελν", "πρωτοδ", "σερτ",
+                "συναδ", "τσαμ", "υποδ", "φιλον", "φυλοδ", "χασ" }, false);
 
         private int Rule6(char[] s, int len)
         {
@@ -335,8 +334,8 @@ namespace Lucene.Net.Analysis.El
 #pragma warning disable 612, 618
             new CharArraySet(LuceneVersion.LUCENE_CURRENT,
 #pragma warning restore 612, 618
-            Arrays.AsList("αναπ", "αποθ", "αποκ", "αποστ", "βουβ", "ξεθ", "ουλ", 
-                "πεθ", "πικρ", "ποτ", "σιχ", "χ"), false);
+            new string[] { "αναπ", "αποθ", "αποκ", "αποστ", "βουβ", "ξεθ", "ουλ",
+                "πεθ", "πικρ", "ποτ", "σιχ", "χ" }, false);
 
         private int Rule7(char[] s, int len)
         {
@@ -376,25 +375,25 @@ namespace Lucene.Net.Analysis.El
 #pragma warning disable 612, 618
             LuceneVersion.LUCENE_CURRENT,
 #pragma warning restore 612, 618
-            Arrays.AsList("τρ", "τσ"), false);
+            new string[] { "τρ", "τσ" }, false);
 
         private static readonly CharArraySet exc8b = new CharArraySet(
 #pragma warning disable 612, 618
             LuceneVersion.LUCENE_CURRENT,
 #pragma warning restore 612, 618
-            Arrays.AsList("βετερ", "βουλκ", "βραχμ", "γ", "δραδουμ", "θ", "καλπουζ", 
-                "καστελ", "κορμορ", "λαοπλ", "μωαμεθ", "μ", "μουσουλμ", "ν", "ουλ", 
-                "π", "πελεκ", "πλ", "πολισ", "πορτολ", "σαρακατσ", "σουλτ", 
-                "τσαρλατ", "ορφ", "τσιγγ", "τσοπ", "φωτοστεφ", "χ", "ψυχοπλ", "αγ", 
-                "ορφ", "γαλ", "γερ", "δεκ", "διπλ", "αμερικαν", "ουρ", "πιθ", 
-                "πουριτ", "σ", "ζωντ", "ικ", "καστ", "κοπ", "λιχ", "λουθηρ", "μαιντ", 
-                "μελ", "σιγ", "σπ", "στεγ", "τραγ", "τσαγ", "φ", "ερ", "αδαπ", 
-                "αθιγγ", "αμηχ", "ανικ", "ανοργ", "απηγ", "απιθ", "ατσιγγ", "βασ", 
-                "βασκ", "βαθυγαλ", "βιομηχ", "βραχυκ", "διατ", "διαφ", "ενοργ", 
-                "θυσ", "καπνοβιομηχ", "καταγαλ", "κλιβ", "κοιλαρφ", "λιβ", 
-                "μεγλοβιομηχ", "μικροβιομηχ", "νταβ", "ξηροκλιβ", "ολιγοδαμ", 
-                "ολογαλ", "πενταρφ", "περηφ", "περιτρ", "πλατ", "πολυδαπ", "πολυμηχ", 
-                "στεφ", "ταβ", "τετ", "υπερηφ", "υποκοπ", "χαμηλοδαπ", "ψηλοταβ"), false);
+            new string[] { "βετερ", "βουλκ", "βραχμ", "γ", "δραδουμ", "θ", "καλπουζ",
+                "καστελ", "κορμορ", "λαοπλ", "μωαμεθ", "μ", "μουσουλμ", "ν", "ουλ",
+                "π", "πελεκ", "πλ", "πολισ", "πορτολ", "σαρακατσ", "σουλτ",
+                "τσαρλατ", "ορφ", "τσιγγ", "τσοπ", "φωτοστεφ", "χ", "ψυχοπλ", "αγ",
+                "ορφ", "γαλ", "γερ", "δεκ", "διπλ", "αμερικαν", "ουρ", "πιθ",
+                "πουριτ", "σ", "ζωντ", "ικ", "καστ", "κοπ", "λιχ", "λουθηρ", "μαιντ",
+                "μελ", "σιγ", "σπ", "στεγ", "τραγ", "τσαγ", "φ", "ερ", "αδαπ",
+                "αθιγγ", "αμηχ", "ανικ", "ανοργ", "απηγ", "απιθ", "ατσιγγ", "βασ",
+                "βασκ", "βαθυγαλ", "βιομηχ", "βραχυκ", "διατ", "διαφ", "ενοργ",
+                "θυσ", "καπνοβιομηχ", "καταγαλ", "κλιβ", "κοιλαρφ", "λιβ",
+                "μεγλοβιομηχ", "μικροβιομηχ", "νταβ", "ξηροκλιβ", "ολιγοδαμ",
+                "ολογαλ", "πενταρφ", "περηφ", "περιτρ", "πλατ", "πολυδαπ", "πολυμηχ",
+                "στεφ", "ταβ", "τετ", "υπερηφ", "υποκοπ", "χαμηλοδαπ", "ψηλοταβ" }, false);
 
         private int Rule8(char[] s, int len)
         {
@@ -454,9 +453,9 @@ namespace Lucene.Net.Analysis.El
 #pragma warning disable 612, 618
             LuceneVersion.LUCENE_CURRENT,
 #pragma warning restore 612, 618
-            Arrays.AsList("αβαρ", "βεν", "εναρ", "αβρ", "αδ", "αθ", "αν", "απλ", 
-                "βαρον", "ντρ", "σκ", "κοπ", "μπορ", "νιφ", "παγ", "παρακαλ", "σερπ", 
-                "σκελ", "συρφ", "τοκ", "υ", "δ", "εμ", "θαρρ", "θ"), false);
+            new string[] { "αβαρ", "βεν", "εναρ", "αβρ", "αδ", "αθ", "αν", "απλ",
+                "βαρον", "ντρ", "σκ", "κοπ", "μπορ", "νιφ", "παγ", "παρακαλ", "σερπ",
+                "σκελ", "συρφ", "τοκ", "υ", "δ", "εμ", "θαρρ", "θ" }, false);
 
         private int Rule9(char[] s, int len)
         {
@@ -557,10 +556,10 @@ namespace Lucene.Net.Analysis.El
 
 #pragma warning disable 612, 618
         private static readonly CharArraySet exc12a = new CharArraySet(LuceneVersion.LUCENE_CURRENT, 
-            Arrays.AsList("π", "απ", "συμπ", "ασυμπ", "ακαταπ", "αμεταμφ"), false);
+            new string[] { "π", "απ", "συμπ", "ασυμπ", "ακαταπ", "αμεταμφ" }, false);
 
         private static readonly CharArraySet exc12b = new CharArraySet(LuceneVersion.LUCENE_CURRENT, 
-            Arrays.AsList("αλ", "αρ", "εκτελ", "ζ", "μ", "ξ", "παρακαλ", "αρ", "προ", "νισ"), false);
+            new string[] { "αλ", "αρ", "εκτελ", "ζ", "μ", "ξ", "παρακαλ", "αρ", "προ", "νισ" }, false);
 #pragma warning restore 612, 618
 
         private int Rule12(char[] s, int len)
@@ -590,7 +589,7 @@ namespace Lucene.Net.Analysis.El
 #pragma warning disable 612, 618
             LuceneVersion.LUCENE_CURRENT,
 #pragma warning restore 612, 618
-            Arrays.AsList("διαθ", "θ", "παρακαταθ", "προσθ", "συνθ"), false);
+            new string[] { "διαθ", "θ", "παρακαταθ", "προσθ", "συνθ" }, false);
 
         private int Rule13(char[] s, int len)
         {
@@ -634,9 +633,9 @@ namespace Lucene.Net.Analysis.El
 #pragma warning disable 612, 618
             LuceneVersion.LUCENE_CURRENT,
 #pragma warning restore 612, 618
-            Arrays.AsList("φαρμακ", "χαδ", "αγκ", "αναρρ", "βρομ", "εκλιπ", "λαμπιδ", 
-                "λεχ", "μ", "πατ", "ρ", "λ", "μεδ", "μεσαζ", "υποτειν", "αμ", "αιθ", 
-                "ανηκ", "δεσποζ", "ενδιαφερ", "δε", "δευτερευ", "καθαρευ", "πλε", "τσα"), false);
+            new string[] { "φαρμακ", "χαδ", "αγκ", "αναρρ", "βρομ", "εκλιπ", "λαμπιδ",
+                "λεχ", "μ", "πατ", "ρ", "λ", "μεδ", "μεσαζ", "υποτειν", "αμ", "αιθ",
+                "ανηκ", "δεσποζ", "ενδιαφερ", "δε", "δευτερευ", "καθαρευ", "πλε", "τσα" }, false);
 
         private int Rule14(char[] s, int len)
         {
@@ -679,18 +678,18 @@ namespace Lucene.Net.Analysis.El
 #pragma warning disable 612, 618
             LuceneVersion.LUCENE_CURRENT,
 #pragma warning restore 612, 618
-            Arrays.AsList("αβαστ", "πολυφ", "αδηφ", "παμφ", "ρ", "ασπ", "αφ", "αμαλ", 
-                "αμαλλι", "ανυστ", "απερ", "ασπαρ", "αχαρ", "δερβεν", "δροσοπ", 
-                "ξεφ", "νεοπ", "νομοτ", "ολοπ", "ομοτ", "προστ", "προσωποπ", "συμπ", 
-                "συντ", "τ", "υποτ", "χαρ", "αειπ", "αιμοστ", "ανυπ", "αποτ", 
-                "αρτιπ", "διατ", "εν", "επιτ", "κροκαλοπ", "σιδηροπ", "λ", "ναυ", 
-                "ουλαμ", "ουρ", "π", "τρ", "μ"), false);
+            new string[] { "αβαστ", "πολυφ", "αδηφ", "παμφ", "ρ", "ασπ", "αφ", "αμαλ",
+                "αμαλλι", "ανυστ", "απερ", "ασπαρ", "αχαρ", "δερβεν", "δροσοπ",
+                "ξεφ", "νεοπ", "νομοτ", "ολοπ", "ομοτ", "προστ", "προσωποπ", "συμπ",
+                "συντ", "τ", "υποτ", "χαρ", "αειπ", "αιμοστ", "ανυπ", "αποτ",
+                "αρτιπ", "διατ", "εν", "επιτ", "κροκαλοπ", "σιδηροπ", "λ", "ναυ",
+                "ουλαμ", "ουρ", "π", "τρ", "μ" }, false);
 
         private static readonly CharArraySet exc15b = new CharArraySet(
 #pragma warning disable 612, 618
             LuceneVersion.LUCENE_CURRENT,
 #pragma warning restore 612, 618
-            Arrays.AsList("ψοφ", "ναυλοχ"), false);
+            new string[] { "ψοφ", "ναυλοχ" }, false);
 
         private int Rule15(char[] s, int len)
         {
@@ -735,7 +734,7 @@ namespace Lucene.Net.Analysis.El
 #pragma warning disable 612, 618
             LuceneVersion.LUCENE_CURRENT,
 #pragma warning restore 612, 618
-            Arrays.AsList("ν", "χερσον", "δωδεκαν", "ερημον", "μεγαλον", "επταν"), false);
+            new string[] { "ν", "χερσον", "δωδεκαν", "ερημον", "μεγαλον", "επταν" }, false);
 
         private int Rule16(char[] s, int len)
         {
@@ -763,7 +762,7 @@ namespace Lucene.Net.Analysis.El
 #pragma warning disable 612, 618
             LuceneVersion.LUCENE_CURRENT,
 #pragma warning restore 612, 618
-            Arrays.AsList("ασβ", "σβ", "αχρ", "χρ", "απλ", "αειμν", "δυσχρ", "ευχρ", "κοινοχρ", "παλιμψ"), false);
+            new string[] { "ασβ", "σβ", "αχρ", "χρ", "απλ", "αειμν", "δυσχρ", "ευχρ", "κοινοχρ", "παλιμψ" }, false);
 
         private int Rule17(char[] s, int len)
         {
@@ -783,7 +782,7 @@ namespace Lucene.Net.Analysis.El
 #pragma warning disable 612, 618
             LuceneVersion.LUCENE_CURRENT,
 #pragma warning restore 612, 618
-            Arrays.AsList("ν", "ρ", "σπι", "στραβομουτσ", "κακομουτσ", "εξων"), false);
+            new string[] { "ν", "ρ", "σπι", "στραβομουτσ", "κακομουτσ", "εξων" }, false);
 
         private int Rule18(char[] s, int len)
         {
@@ -814,7 +813,7 @@ namespace Lucene.Net.Analysis.El
 #pragma warning disable 612, 618
             LuceneVersion.LUCENE_CURRENT,
 #pragma warning restore 612, 618
-            Arrays.AsList("παρασουσ", "φ", "χ", "ωριοπλ", "αζ", "αλλοσουσ", "ασουσ"), false);
+            new string[] { "παρασουσ", "φ", "χ", "ωριοπλ", "αζ", "αλλοσουσ", "ασουσ" }, false);
 
         private int Rule19(char[] s, int len)
         {
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/En/KStemmer.cs b/src/Lucene.Net.Analysis.Common/Analysis/En/KStemmer.cs
index 3647dd8..0595379 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/En/KStemmer.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/En/KStemmer.cs
@@ -1,5 +1,5 @@
-using Lucene.Net.Analysis.Util;
-using Lucene.Net.Support;
+using J2N.Text;
+using Lucene.Net.Analysis.Util;
 using Lucene.Net.Util;
 using System;
 
@@ -820,8 +820,8 @@ namespace Lucene.Net.Analysis.En
 
                 DictEntry entry = WordInDict();
                 if (entry != null) 
-		        {
-			        if (!entry.exception) 
+                {
+                    if (!entry.exception) 
                     {
                         // if it's in the dictionary and
                         // not an exception
@@ -1101,9 +1101,9 @@ namespace Lucene.Net.Analysis.En
                     return;
                 }
                 word.Length = j; /*
-	                          * try removing -e/ance altogether
-	                          * (disappearance/disappear)
-	                          */
+                              * try removing -e/ance altogether
+                              * (disappearance/disappear)
+                              */
                 k = j - 1;
                 if (Lookup())
                 {
@@ -1376,9 +1376,9 @@ namespace Lucene.Net.Analysis.En
                 word.UnsafeWrite('e');
                 k = j + 1;
                 if (Lookup()) /*
-	                     * remove -ition and add `e', and check against the
-	                     * dictionary
-	                     */
+                         * remove -ition and add `e', and check against the
+                         * dictionary
+                         */
                 {
                     return; // (e.g., definition->define, opposition->oppose)
                 }
@@ -1401,9 +1401,9 @@ namespace Lucene.Net.Analysis.En
 
                 word.Length = j + 1;
                 word.UnsafeWrite('e'); /*
-	                              * remove -ation and add `e', and check against the
-	                              * dictionary
-	                              */
+                                  * remove -ation and add `e', and check against the
+                                  * dictionary
+                                  */
                 k = j + 1;
                 if (Lookup())
                 {
@@ -1411,9 +1411,9 @@ namespace Lucene.Net.Analysis.En
                 }
 
                 word.Length = j + 1; /*
-	                             * just remove -ation (resignation->resign) and
-	                             * check dictionary
-	                             */
+                                 * just remove -ation (resignation->resign) and
+                                 * check dictionary
+                                 */
                 k = j;
                 if (Lookup())
                 {
@@ -1439,9 +1439,9 @@ namespace Lucene.Net.Analysis.En
                 word.UnsafeWrite('y');
                 k = j + 1;
                 if (Lookup()) /*
-	                     * remove -ication and add `y', and check against the
-	                     * dictionary
-	                     */
+                         * remove -ication and add `y', and check against the
+                         * dictionary
+                         */
                 {
                     return; // (e.g., amplification -> amplify)
                 }
@@ -1606,14 +1606,14 @@ namespace Lucene.Net.Analysis.En
                 }
 
                 if ((j > 0) && (word[j - 1] == 'a') && (word[j] == 'l')) /*
-	                                                                              * always
-	                                                                              * convert
-	                                                                              * -
-	                                                                              * ally
-	                                                                              * to
-	                                                                              * -
-	                                                                              * al
-	                                                                              */
+                                                                                  * always
+                                                                                  * convert
+                                                                                  * -
+                                                                                  * ally
+                                                                                  * to
+                                                                                  * -
+                                                                                  * al
+                                                                                  */
                 {
                     return;
                 }
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/En/PorterStemmer.cs b/src/Lucene.Net.Analysis.Common/Analysis/En/PorterStemmer.cs
index 8b472af..40da730 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/En/PorterStemmer.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/En/PorterStemmer.cs
@@ -1,54 +1,52 @@
 using Lucene.Net.Support;
 using Lucene.Net.Util;
 using System;
-using System.Globalization;
-using System.IO;
 using System.Diagnostics.CodeAnalysis;
 
 namespace Lucene.Net.Analysis.En
 {
     /*
-	 * Licensed to the Apache Software Foundation (ASF) under one or more
-	 * contributor license agreements.  See the NOTICE file distributed with
-	 * this work for additional information regarding copyright ownership.
-	 * The ASF licenses this file to You under the Apache License, Version 2.0
-	 * (the "License"); you may not use this file except in compliance with
-	 * the License.  You may obtain a copy of the License at
-	 *
-	 *     http://www.apache.org/licenses/LICENSE-2.0
-	 *
-	 * Unless required by applicable law or agreed to in writing, software
-	 * distributed under the License is distributed on an "AS IS" BASIS,
-	 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	 * See the License for the specific language governing permissions and
-	 * limitations under the License.
-	 */
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
 
     /*
-	
-	   Porter stemmer in .NET. The original paper is in
-	
-	       Porter, 1980, An algorithm for suffix stripping, Program, Vol. 14,
-	       no. 3, pp 130-137,
-	
-	   See also http://www.tartarus.org/~martin/PorterStemmer/index.html
-	
-	   Bug 1 (reported by Gonzalo Parra 16/10/99) fixed as marked below.
-	   Tthe words 'aed', 'eed', 'oed' leave k at 'a' for step 3, and b[k-1]
-	   is then out outside the bounds of b.
-	
-	   Similarly,
-	
-	   Bug 2 (reported by Steve Dyrdahl 22/2/00) fixed as marked below.
-	   'ion' by itself leaves j = -1 in the test for 'ion' in step 5, and
-	   b[j] is then outside the bounds of b.
-	
-	   Release 3.
-	
-	   [ This version is derived from Release 3, modified by Brian Goetz to
-	     optimize for fewer object creations.  ]
-	
-	*/
+    
+       Porter stemmer in .NET. The original paper is in
+    
+           Porter, 1980, An algorithm for suffix stripping, Program, Vol. 14,
+           no. 3, pp 130-137,
+    
+       See also http://www.tartarus.org/~martin/PorterStemmer/index.html
+    
+       Bug 1 (reported by Gonzalo Parra 16/10/99) fixed as marked below.
+       Tthe words 'aed', 'eed', 'oed' leave k at 'a' for step 3, and b[k-1]
+       is then out outside the bounds of b.
+    
+       Similarly,
+    
+       Bug 2 (reported by Steve Dyrdahl 22/2/00) fixed as marked below.
+       'ion' by itself leaves j = -1 in the test for 'ion' in step 5, and
+       b[j] is then outside the bounds of b.
+    
+       Release 3.
+    
+       [ This version is derived from Release 3, modified by Brian Goetz to
+         optimize for fewer object creations.  ]
+    
+    */
 
     /// 
     /// <summary>
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Fr/FrenchAnalyzer.cs b/src/Lucene.Net.Analysis.Common/Analysis/Fr/FrenchAnalyzer.cs
index 7b9ebd8..b3f1d60 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Fr/FrenchAnalyzer.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Fr/FrenchAnalyzer.cs
@@ -3,7 +3,6 @@ using Lucene.Net.Analysis.Miscellaneous;
 using Lucene.Net.Analysis.Snowball;
 using Lucene.Net.Analysis.Standard;
 using Lucene.Net.Analysis.Util;
-using Lucene.Net.Support;
 using Lucene.Net.Util;
 using System;
 using System.IO;
@@ -12,21 +11,21 @@ using System.Text;
 namespace Lucene.Net.Analysis.Fr
 {
     /*
-	 * Licensed to the Apache Software Foundation (ASF) under one or more
-	 * contributor license agreements.  See the NOTICE file distributed with
-	 * this work for additional information regarding copyright ownership.
-	 * The ASF licenses this file to You under the Apache License, Version 2.0
-	 * (the "License"); you may not use this file except in compliance with
-	 * the License.  You may obtain a copy of the License at
-	 *
-	 *     http://www.apache.org/licenses/LICENSE-2.0
-	 *
-	 * Unless required by applicable law or agreed to in writing, software
-	 * distributed under the License is distributed on an "AS IS" BASIS,
-	 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	 * See the License for the specific language governing permissions and
-	 * limitations under the License.
-	 */
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
 
     /// <summary>
     /// <see cref="Analyzer"/> for French language. 
@@ -115,7 +114,7 @@ namespace Lucene.Net.Analysis.Fr
         {
             /// @deprecated (3.1) remove this in Lucene 5.0, index bw compat 
             [Obsolete("(3.1) remove this in Lucene 5.0, index bw compat")]
-            internal static readonly CharArraySet DEFAULT_STOP_SET_30 = CharArraySet.UnmodifiableSet(new CharArraySet(LuceneVersion.LUCENE_CURRENT, Arrays.AsList(FRENCH_STOP_WORDS), false));
+            internal static readonly CharArraySet DEFAULT_STOP_SET_30 = CharArraySet.UnmodifiableSet(new CharArraySet(LuceneVersion.LUCENE_CURRENT, FRENCH_STOP_WORDS, false));
             internal static readonly CharArraySet DEFAULT_STOP_SET = LoadDefaultStopSet();
             private static CharArraySet LoadDefaultStopSet() // LUCENENET: Avoid static constructors (see https://github.com/apache/lucenenet/pull/224#issuecomment-469284006)
             {
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Ga/IrishAnalyzer.cs b/src/Lucene.Net.Analysis.Common/Analysis/Ga/IrishAnalyzer.cs
index ed3c48d..a045269 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Ga/IrishAnalyzer.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Ga/IrishAnalyzer.cs
@@ -3,7 +3,6 @@ using Lucene.Net.Analysis.Miscellaneous;
 using Lucene.Net.Analysis.Snowball;
 using Lucene.Net.Analysis.Standard;
 using Lucene.Net.Analysis.Util;
-using Lucene.Net.Support;
 using Lucene.Net.Tartarus.Snowball.Ext;
 using Lucene.Net.Util;
 using System;
@@ -12,21 +11,21 @@ using System.IO;
 namespace Lucene.Net.Analysis.Ga
 {
     /*
-	 * Licensed to the Apache Software Foundation (ASF) under one or more
-	 * contributor license agreements.  See the NOTICE file distributed with
-	 * this work for additional information regarding copyright ownership.
-	 * The ASF licenses this file to You under the Apache License, Version 2.0
-	 * (the "License"); you may not use this file except in compliance with
-	 * the License.  You may obtain a copy of the License at
-	 *
-	 *     http://www.apache.org/licenses/LICENSE-2.0
-	 *
-	 * Unless required by applicable law or agreed to in writing, software
-	 * distributed under the License is distributed on an "AS IS" BASIS,
-	 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	 * See the License for the specific language governing permissions and
-	 * limitations under the License.
-	 */
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
 
     /// <summary>
     /// <see cref="Analyzer"/> for Irish.
@@ -43,7 +42,7 @@ namespace Lucene.Net.Analysis.Ga
 #pragma warning disable 612, 618
             LuceneVersion.LUCENE_CURRENT,
 #pragma warning restore 612, 618
-            Arrays.AsList("d", "m", "b"), true));
+            new string[] { "d", "m", "b" }, true));
 
         /// <summary>
         /// When StandardTokenizer splits t‑athair into {t, athair}, we don't
@@ -54,7 +53,7 @@ namespace Lucene.Net.Analysis.Ga
 #pragma warning disable 612, 618
             LuceneVersion.LUCENE_CURRENT,
 #pragma warning restore 612, 618
-            Arrays.AsList("h", "n", "t"), true));
+            new string[] { "h", "n", "t" }, true));
 
         /// <summary>
         /// Returns an unmodifiable instance of the default stop words set. </summary>
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Ga/IrishLowerCaseFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/Ga/IrishLowerCaseFilter.cs
index 74dd9d8..7caa55b 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Ga/IrishLowerCaseFilter.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Ga/IrishLowerCaseFilter.cs
@@ -1,24 +1,24 @@
-using Lucene.Net.Analysis.TokenAttributes;
-using Lucene.Net.Support;
+using J2N;
+using Lucene.Net.Analysis.TokenAttributes;
 
 namespace Lucene.Net.Analysis.Ga
 {
     /*
-	 * Licensed to the Apache Software Foundation (ASF) under one or more
-	 * contributor license agreements.  See the NOTICE file distributed with
-	 * this work for additional information regarding copyright ownership.
-	 * The ASF licenses this file to You under the Apache License, Version 2.0
-	 * (the "License"); you may not use this file except in compliance with
-	 * the License.  You may obtain a copy of the License at
-	 *
-	 *     http://www.apache.org/licenses/LICENSE-2.0
-	 *
-	 * Unless required by applicable law or agreed to in writing, software
-	 * distributed under the License is distributed on an "AS IS" BASIS,
-	 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	 * See the License for the specific language governing permissions and
-	 * limitations under the License.
-	 */
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
 
     /// <summary>
     /// Normalises token text to lower case, handling t-prothesis
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Dictionary.cs b/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Dictionary.cs
index 49b04c2..20dcfa7 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Dictionary.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Dictionary.cs
@@ -1,9 +1,12 @@
-using Lucene.Net.Store;
+using J2N.Collections.Generic.Extensions;
+using J2N.Text;
+using Lucene.Net.Store;
 using Lucene.Net.Support;
 using Lucene.Net.Support.IO;
 using Lucene.Net.Util;
 using Lucene.Net.Util.Automaton;
 using Lucene.Net.Util.Fst;
+using J2N;
 using System;
 using System.Collections.Generic;
 using System.Diagnostics;
@@ -11,25 +14,26 @@ using System.Globalization;
 using System.IO;
 using System.Text;
 using System.Text.RegularExpressions;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Analysis.Hunspell
 {
     /*
-	 * Licensed to the Apache Software Foundation (ASF) under one or more
-	 * contributor license agreements.  See the NOTICE file distributed with
-	 * this work for additional information regarding copyright ownership.
-	 * The ASF licenses this file to You under the Apache License, Version 2.0
-	 * (the "License"); you may not use this file except in compliance with
-	 * the License.  You may obtain a copy of the License at
-	 *
-	 *     http://www.apache.org/licenses/LICENSE-2.0
-	 *
-	 * Unless required by applicable law or agreed to in writing, software
-	 * distributed under the License is distributed on an "AS IS" BASIS,
-	 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	 * See the License for the specific language governing permissions and
-	 * limitations under the License.
-	 */
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
 
     /// <summary>
     /// In-memory structure for the dictionary (.dic) and affix (.aff)
@@ -262,17 +266,19 @@ namespace Lucene.Net.Analysis.Hunspell
         /// <exception cref="IOException"> Can be thrown while reading from the InputStream </exception>
         private void ReadAffixFile(Stream affixStream, Encoding decoder)
         {
-            SortedDictionary<string, IList<char?>> prefixes = new SortedDictionary<string, IList<char?>>(StringComparer.Ordinal);
-            SortedDictionary<string, IList<char?>> suffixes = new SortedDictionary<string, IList<char?>>(StringComparer.Ordinal);
-            IDictionary<string, int?> seenPatterns = new Dictionary<string, int?>();
+            JCG.SortedDictionary<string, IList<char?>> prefixes = new JCG.SortedDictionary<string, IList<char?>>(StringComparer.Ordinal);
+            JCG.SortedDictionary<string, IList<char?>> suffixes = new JCG.SortedDictionary<string, IList<char?>>(StringComparer.Ordinal);
+            IDictionary<string, int?> seenPatterns = new JCG.Dictionary<string, int?>();
 
             // zero condition -> 0 ord
             seenPatterns[".*"] = 0;
             patterns.Add(null);
 
             // zero strip -> 0 ord
-            IDictionary<string, int?> seenStrips = new LinkedHashMap<string, int?>();
-            seenStrips[""] = 0;
+            IDictionary<string, int?> seenStrips = new JCG.LinkedDictionary<string, int?>
+            {
+                [""] = 0
+            };
 
             var reader = new StreamReader(affixStream, decoder);
             string line = null;
@@ -372,7 +378,7 @@ namespace Lucene.Net.Analysis.Hunspell
             stripOffsets[currentIndex] = currentOffset;
         }
 
-        private FST<Int32sRef> AffixFST(SortedDictionary<string, IList<char?>> affixes)
+        private FST<Int32sRef> AffixFST(JCG.SortedDictionary<string, IList<char?>> affixes)
         {
             Int32SequenceOutputs outputs = Int32SequenceOutputs.Singleton;
             Builder<Int32sRef> builder = new Builder<Int32sRef>(FST.INPUT_TYPE.BYTE4, outputs);
@@ -395,7 +401,7 @@ namespace Lucene.Net.Analysis.Hunspell
         /// <summary>
         /// Parses a specific affix rule putting the result into the provided affix map
         /// </summary>
-        /// <param name="affixes"> <see cref="SortedDictionary{TKey, TValue}"/> where the result of the parsing will be put </param>
+        /// <param name="affixes"> <see cref="JCG.SortedDictionary{TKey, TValue}"/> where the result of the parsing will be put </param>
         /// <param name="header"> Header line of the affix rule </param>
         /// <param name="reader"> <see cref="TextReader"/> to read the content of the rule from </param>
         /// <param name="conditionPattern"> <see cref="string.Format(string, object[])"/> pattern to be used to generate the condition regex
@@ -403,7 +409,7 @@ namespace Lucene.Net.Analysis.Hunspell
         /// <param name="seenPatterns"> map from condition -> index of patterns, for deduplication. </param>
         /// <param name="seenStrips"></param>
         /// <exception cref="IOException"> Can be thrown while reading the rule </exception>
-        private void ParseAffix(SortedDictionary<string, IList<char?>> affixes, string header, TextReader reader, string conditionPattern, IDictionary<string, int?> seenPatterns, IDictionary<string, int?> seenStrips)
+        private void ParseAffix(JCG.SortedDictionary<string, IList<char?>> affixes, string header, TextReader reader, string conditionPattern, IDictionary<string, int?> seenPatterns, IDictionary<string, int?> seenStrips)
         {
             BytesRef scratch = new BytesRef();
             StringBuilder sb = new StringBuilder();
@@ -545,7 +551,7 @@ namespace Lucene.Net.Analysis.Hunspell
 
         private FST<CharsRef> ParseConversions(TextReader reader, int num)
         {
-            IDictionary<string, string> mappings = new SortedDictionary<string, string>(StringComparer.Ordinal);
+            IDictionary<string, string> mappings = new JCG.SortedDictionary<string, string>(StringComparer.Ordinal);
 
             for (int i = 0; i < num; i++)
             {
@@ -623,12 +629,11 @@ namespace Lucene.Net.Analysis.Hunspell
         internal static readonly IDictionary<string, string> CHARSET_ALIASES = LoadCharsetAliases();
         private static IDictionary<string, string> LoadCharsetAliases() // LUCENENET: Avoid static constructors (see https://github.com/apache/lucenenet/pull/224#issuecomment-469284006)
         {
-            IDictionary<string, string> m = new Dictionary<string, string>
+            return new Dictionary<string, string>
             {
                 ["microsoft-cp1251"] = "windows-1251",
                 ["TIS620-2533"] = "TIS-620"
-            };
-            return Collections.UnmodifiableMap(m);
+            }.AsReadOnly();
         }
 
         /// <summary>
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/HunspellStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/HunspellStemFilterFactory.cs
index eef53a4..31bb03f 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/HunspellStemFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/HunspellStemFilterFactory.cs
@@ -1,5 +1,5 @@
-using Lucene.Net.Analysis.Util;
-using Lucene.Net.Support;
+using J2N.Text;
+using Lucene.Net.Analysis.Util;
 using Lucene.Net.Util;
 using System;
 using System.Collections.Generic;
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/ISO8859_14Decoder.cs b/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/ISO8859_14Decoder.cs
index c281600..022cf5d 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/ISO8859_14Decoder.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/ISO8859_14Decoder.cs
@@ -5,21 +5,21 @@ using System.Text;
 namespace Lucene.Net.Analysis.Hunspell
 {
     /*
-	 * Licensed to the Apache Software Foundation (ASF) under one or more
-	 * contributor license agreements.  See the NOTICE file distributed with
-	 * this work for additional information regarding copyright ownership.
-	 * The ASF licenses this file to You under the Apache License, Version 2.0
-	 * (the "License"); you may not use this file except in compliance with
-	 * the License.  You may obtain a copy of the License at
-	 *
-	 *     http://www.apache.org/licenses/LICENSE-2.0
-	 *
-	 * Unless required by applicable law or agreed to in writing, software
-	 * distributed under the License is distributed on an "AS IS" BASIS,
-	 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	 * See the License for the specific language governing permissions and
-	 * limitations under the License.
-	 */
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
 
     // LUCENENET NOTE: This class was refactored from its Java counterpart.
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/In/IndicTokenizer.cs b/src/Lucene.Net.Analysis.Common/Analysis/In/IndicTokenizer.cs
index b0b8b59..e5fe4d6 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/In/IndicTokenizer.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/In/IndicTokenizer.cs
@@ -1,5 +1,5 @@
-using Lucene.Net.Analysis.Util;
-using Lucene.Net.Support;
+using J2N;
+using Lucene.Net.Analysis.Util;
 using Lucene.Net.Util;
 using System;
 using System.Globalization;
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/It/ItalianAnalyzer.cs b/src/Lucene.Net.Analysis.Common/Analysis/It/ItalianAnalyzer.cs
index 8419004..b64c2ae 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/It/ItalianAnalyzer.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/It/ItalianAnalyzer.cs
@@ -3,7 +3,6 @@ using Lucene.Net.Analysis.Miscellaneous;
 using Lucene.Net.Analysis.Snowball;
 using Lucene.Net.Analysis.Standard;
 using Lucene.Net.Analysis.Util;
-using Lucene.Net.Support;
 using Lucene.Net.Tartarus.Snowball.Ext;
 using Lucene.Net.Util;
 using System;
@@ -13,21 +12,21 @@ using System.Text;
 namespace Lucene.Net.Analysis.It
 {
     /*
-	 * Licensed to the Apache Software Foundation (ASF) under one or more
-	 * contributor license agreements.  See the NOTICE file distributed with
-	 * this work for additional information regarding copyright ownership.
-	 * The ASF licenses this file to You under the Apache License, Version 2.0
-	 * (the "License"); you may not use this file except in compliance with
-	 * the License.  You may obtain a copy of the License at
-	 *
-	 *     http://www.apache.org/licenses/LICENSE-2.0
-	 *
-	 * Unless required by applicable law or agreed to in writing, software
-	 * distributed under the License is distributed on an "AS IS" BASIS,
-	 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	 * See the License for the specific language governing permissions and
-	 * limitations under the License.
-	 */
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
 
     /// <summary>
     /// <see cref="Analyzer"/> for Italian.
@@ -52,8 +51,8 @@ namespace Lucene.Net.Analysis.It
 #pragma warning disable 612, 618
             LuceneVersion.LUCENE_CURRENT,
 #pragma warning restore 612, 618
-            Arrays.AsList("c", "l", "all", "dall", "dell", "nell", "sull", "coll", "pell", "gl", "agl", 
-                "dagl", "degl", "negl", "sugl", "un", "m", "t", "s", "v", "d"), true));
+            new string[] { "c", "l", "all", "dall", "dell", "nell", "sull", "coll", "pell", "gl", "agl",
+                "dagl", "degl", "negl", "sugl", "un", "m", "t", "s", "v", "d" }, true));
 
         /// <summary>
         /// Returns an unmodifiable instance of the default stop words set. </summary>
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/CodepointCountFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/CodepointCountFilter.cs
index a5f2085..8f0e73f 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/CodepointCountFilter.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/CodepointCountFilter.cs
@@ -1,6 +1,6 @@
-using Lucene.Net.Analysis.TokenAttributes;
+using J2N;
+using Lucene.Net.Analysis.TokenAttributes;
 using Lucene.Net.Analysis.Util;
-using Lucene.Net.Support;
 using Lucene.Net.Util;
 using System;
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/PatternAnalyzer.cs b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/PatternAnalyzer.cs
index ac4d888..3efb94f 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/PatternAnalyzer.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/PatternAnalyzer.cs
@@ -1,7 +1,6 @@
 using Lucene.Net.Analysis.Core;
 using Lucene.Net.Analysis.TokenAttributes;
 using Lucene.Net.Analysis.Util;
-using Lucene.Net.Support;
 using Lucene.Net.Util;
 using System;
 using System.IO;
@@ -68,9 +67,9 @@ namespace Lucene.Net.Analysis.Miscellaneous
         /// <c>"\\s+"</c>; Divides text at whitespaces (Character.isWhitespace(c)) </summary>
         public static readonly Regex WHITESPACE_PATTERN = new Regex("\\s+", RegexOptions.Compiled);
 
-        private static readonly CharArraySet EXTENDED_ENGLISH_STOP_WORDS = 
-            CharArraySet.UnmodifiableSet(new CharArraySet(LuceneVersion.LUCENE_CURRENT, 
-                Arrays.AsList(
+        private static readonly CharArraySet EXTENDED_ENGLISH_STOP_WORDS =
+            CharArraySet.UnmodifiableSet(new CharArraySet(LuceneVersion.LUCENE_CURRENT,
+                new string[] {
                     "a", "about", "above", "across", "adj", "after", "afterwards",
                     "again", "against", "albeit", "all", "almost", "alone", "along",
                     "already", "also", "although", "always", "among", "amongst", "an",
@@ -112,8 +111,7 @@ namespace Lucene.Net.Analysis.Miscellaneous
                     "with", "within", "without", "would", "xsubj", "xcal", "xauthor",
                     "xother ", "xnote", "yet", "you", "your", "yours", "yourself",
                     "yourselves"
-                
-                    ), true));
+                    }, true));
 
         /// <summary>
         /// A lower-casing word analyzer with English stop words (can be shared
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/PerFieldAnalyzerWrapper.cs b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/PerFieldAnalyzerWrapper.cs
index 8a47298..61a3c72 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/PerFieldAnalyzerWrapper.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/PerFieldAnalyzerWrapper.cs
@@ -1,5 +1,5 @@
-using Lucene.Net.Support;
-using System.Collections.Generic;
+using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Analysis.Miscellaneous
 {
@@ -75,15 +75,15 @@ namespace Lucene.Net.Analysis.Miscellaneous
         ///         <description>Use when sorted keys are required. <c>null</c> keys are not supported.</description>
         ///     </item>
         ///     <item>
-        ///         <term><see cref="HashMap{TKey, TValue}"/></term>
+        ///         <term><see cref="JCG.Dictionary{TKey, TValue}"/></term>
         ///         <description>Similar behavior as <see cref="Dictionary{TKey, TValue}"/>. <c>null</c> keys are supported.</description>
         ///     </item>
         ///     <item>
-        ///         <term><see cref="TreeDictionary{TKey, TValue}"/></term>
+        ///         <term><see cref="JCG.SortedDictionary{TKey, TValue}"/></term>
         ///         <description>Use when sorted keys are required. <c>null</c> keys are supported.</description>
         ///     </item>
         ///     <item>
-        ///         <term><see cref="LinkedHashMap{TKey, TValue}"/></term>
+        ///         <term><see cref="JCG.LinkedDictionary{TKey, TValue}"/></term>
         ///         <description>Use when insertion order must be preserved (<see cref="Dictionary{TKey, TValue}"/> preserves insertion
         ///             order only until items are removed). <c>null</c> keys are supported.</description>
         ///     </item>
@@ -98,7 +98,7 @@ namespace Lucene.Net.Analysis.Miscellaneous
             : base(PER_FIELD_REUSE_STRATEGY)
         {
             this.defaultAnalyzer = defaultAnalyzer;
-            this.fieldAnalyzers = fieldAnalyzers ?? new HashMap<string, Analyzer>(); // LUCENENET-615: Must support nullable keys
+            this.fieldAnalyzers = fieldAnalyzers ?? new JCG.Dictionary<string, Analyzer>(); // LUCENENET-615: Must support nullable keys
         }
 
         protected override Analyzer GetWrappedAnalyzer(string fieldName)
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/StemmerOverrideFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/StemmerOverrideFilter.cs
index 8356eed..e6421a8 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/StemmerOverrideFilter.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/StemmerOverrideFilter.cs
@@ -1,5 +1,5 @@
-using Lucene.Net.Analysis.TokenAttributes;
-using Lucene.Net.Support;
+using J2N;
+using Lucene.Net.Analysis.TokenAttributes;
 using Lucene.Net.Util;
 using Lucene.Net.Util.Fst;
 using System.Collections.Generic;
@@ -7,21 +7,21 @@ using System.Collections.Generic;
 namespace Lucene.Net.Analysis.Miscellaneous
 {
     /*
-	 * Licensed to the Apache Software Foundation (ASF) under one or more
-	 * contributor license agreements.  See the NOTICE file distributed with
-	 * this work for additional information regarding copyright ownership.
-	 * The ASF licenses this file to You under the Apache License, Version 2.0
-	 * (the "License"); you may not use this file except in compliance with
-	 * the License.  You may obtain a copy of the License at
-	 *
-	 *     http://www.apache.org/licenses/LICENSE-2.0
-	 *
-	 * Unless required by applicable law or agreed to in writing, software
-	 * distributed under the License is distributed on an "AS IS" BASIS,
-	 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	 * See the License for the specific language governing permissions and
-	 * limitations under the License.
-	 */
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
 
     /// <summary>
     /// Provides the ability to override any <see cref="KeywordAttribute"/> aware stemmer
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/WordDelimiterFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/WordDelimiterFilterFactory.cs
index bbf1eed..76035e7 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/WordDelimiterFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/WordDelimiterFilterFactory.cs
@@ -6,25 +6,26 @@ using System.Text.RegularExpressions;
 using Lucene.Net.Analysis.Util;
 using Lucene.Net.Support;
 using Lucene.Net.Util;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Analysis.Miscellaneous
 {
     /*
-	 * Licensed to the Apache Software Foundation (ASF) under one or more
-	 * contributor license agreements.  See the NOTICE file distributed with
-	 * this work for additional information regarding copyright ownership.
-	 * The ASF licenses this file to You under the Apache License, Version 2.0
-	 * (the "License"); you may not use this file except in compliance with
-	 * the License.  You may obtain a copy of the License at
-	 *
-	 *     http://www.apache.org/licenses/LICENSE-2.0
-	 *
-	 * Unless required by applicable law or agreed to in writing, software
-	 * distributed under the License is distributed on an "AS IS" BASIS,
-	 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	 * See the License for the specific language governing permissions and
-	 * limitations under the License.
-	 */
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
 
     /// <summary>
     /// Factory for <see cref="WordDelimiterFilter"/>.
@@ -143,7 +144,7 @@ namespace Lucene.Net.Analysis.Miscellaneous
         // parses a list of MappingCharFilter style rules into a custom byte[] type table
         private byte[] ParseTypes(IList<string> rules)
         {
-            IDictionary<char, byte> typeMap = new SortedDictionary<char, byte>();
+            IDictionary<char, byte> typeMap = new JCG.SortedDictionary<char, byte>();
             foreach (string rule in rules)
             {
                 Match m = typePattern.Match(rule);
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/NGram/Lucene43EdgeNGramTokenizer.cs b/src/Lucene.Net.Analysis.Common/Analysis/NGram/Lucene43EdgeNGramTokenizer.cs
index 4dadbed..208b3da 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/NGram/Lucene43EdgeNGramTokenizer.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/NGram/Lucene43EdgeNGramTokenizer.cs
@@ -274,7 +274,7 @@ namespace Lucene.Net.Analysis.NGram
             // grab gramSize chars from front or back
             int start = side == Side.FRONT ? 0 : inLen - gramSize;
             int end = start + gramSize;
-            termAtt.SetEmpty().Append(inStr, start, end);
+            termAtt.SetEmpty().Append(inStr, start, end - start); // LUCENENET: Corrected 3rd parameter
             offsetAtt.SetOffset(CorrectOffset(start), CorrectOffset(end));
             gramSize++;
             return true;
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/NGram/Lucene43NGramTokenizer.cs b/src/Lucene.Net.Analysis.Common/Analysis/NGram/Lucene43NGramTokenizer.cs
index b806345..d5ada4f 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/NGram/Lucene43NGramTokenizer.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/NGram/Lucene43NGramTokenizer.cs
@@ -150,7 +150,7 @@ namespace Lucene.Net.Analysis.NGram
 
             int oldPos = pos;
             pos++;
-            termAtt.SetEmpty().Append(inStr, oldPos, oldPos + gramSize);
+            termAtt.SetEmpty().Append(inStr, oldPos, gramSize); // LUCENENET: Corrected 3rd parameter
             offsetAtt.SetOffset(CorrectOffset(oldPos), CorrectOffset(oldPos + gramSize));
             return true;
         }
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/NGram/NGramTokenizer.cs b/src/Lucene.Net.Analysis.Common/Analysis/NGram/NGramTokenizer.cs
index 83ad83a..c022778 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/NGram/NGramTokenizer.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/NGram/NGramTokenizer.cs
@@ -1,6 +1,6 @@
-using Lucene.Net.Analysis.TokenAttributes;
+using J2N;
+using Lucene.Net.Analysis.TokenAttributes;
 using Lucene.Net.Analysis.Util;
-using Lucene.Net.Support;
 using Lucene.Net.Util;
 using System;
 using System.Diagnostics;
@@ -9,21 +9,21 @@ using System.IO;
 namespace Lucene.Net.Analysis.NGram
 {
     /*
-	 * Licensed to the Apache Software Foundation (ASF) under one or more
-	 * contributor license agreements.  See the NOTICE file distributed with
-	 * this work for additional information regarding copyright ownership.
-	 * The ASF licenses this file to You under the Apache License, Version 2.0
-	 * (the "License"); you may not use this file except in compliance with
-	 * the License.  You may obtain a copy of the License at
-	 *
-	 *     http://www.apache.org/licenses/LICENSE-2.0
-	 *
-	 * Unless required by applicable law or agreed to in writing, software
-	 * distributed under the License is distributed on an "AS IS" BASIS,
-	 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	 * See the License for the specific language governing permissions and
-	 * limitations under the License.
-	 */
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
 
     /// <summary>
     /// Tokenizes the input into n-grams of the given size(s).
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Pattern/PatternReplaceFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Pattern/PatternReplaceFilterFactory.cs
index 2ee4727..9ecf174 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Pattern/PatternReplaceFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Pattern/PatternReplaceFilterFactory.cs
@@ -1,5 +1,4 @@
 using Lucene.Net.Analysis.Util;
-using Lucene.Net.Support;
 using System;
 using System.Collections.Generic;
 using System.Text.RegularExpressions;
@@ -7,21 +6,21 @@ using System.Text.RegularExpressions;
 namespace Lucene.Net.Analysis.Pattern
 {
     /*
-	 * Licensed to the Apache Software Foundation (ASF) under one or more
-	 * contributor license agreements.  See the NOTICE file distributed with
-	 * this work for additional information regarding copyright ownership.
-	 * The ASF licenses this file to You under the Apache License, Version 2.0
-	 * (the "License"); you may not use this file except in compliance with
-	 * the License.  You may obtain a copy of the License at
-	 *
-	 *     http://www.apache.org/licenses/LICENSE-2.0
-	 *
-	 * Unless required by applicable law or agreed to in writing, software
-	 * distributed under the License is distributed on an "AS IS" BASIS,
-	 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	 * See the License for the specific language governing permissions and
-	 * limitations under the License.
-	 */
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
 
     /// <summary>
     /// Factory for <see cref="PatternReplaceFilter"/>. 
@@ -47,7 +46,7 @@ namespace Lucene.Net.Analysis.Pattern
         {
             pattern = GetPattern(args, "pattern");
             replacement = Get(args, "replacement");
-            replaceAll = "all".Equals(Get(args, "replace", Arrays.AsList("all", "first"), "all"), StringComparison.Ordinal);
+            replaceAll = "all".Equals(Get(args, "replace", new string[] { "all", "first" }, "all"), StringComparison.Ordinal);
             if (args.Count > 0)
             {
                 throw new System.ArgumentException("Unknown parameters: " + args);
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Pattern/PatternTokenizer.cs b/src/Lucene.Net.Analysis.Common/Analysis/Pattern/PatternTokenizer.cs
index 8717692..644c292 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Pattern/PatternTokenizer.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Pattern/PatternTokenizer.cs
@@ -122,7 +122,7 @@ namespace Lucene.Net.Analysis.Pattern
                             continue;
                         }
 
-                        termAtt.SetEmpty().Append(str.ToString(), index, endIndex);
+                        termAtt.SetEmpty().Append(str.ToString(), index, endIndex - index); // LUCENENET: Corrected 3rd parameter
                         offsetAtt.SetOffset(CorrectOffset(index), CorrectOffset(endIndex));
                         return true;
 
@@ -145,7 +145,7 @@ namespace Lucene.Net.Analysis.Pattern
                         if (matcher.Index - index > 0)
                         {
                             // found a non-zero-length token
-                            termAtt.SetEmpty().Append(str.ToString(), index, matcher.Index);
+                            termAtt.SetEmpty().Append(str.ToString(), index, matcher.Index - index); // LUCENENET: Corrected 3rd parameter
                             offsetAtt.SetOffset(CorrectOffset(index), CorrectOffset(matcher.Index));
                             index = matcher.Index + matcher.Length;
                             return true;
@@ -162,7 +162,7 @@ namespace Lucene.Net.Analysis.Pattern
                     return false;
                 }
 
-                termAtt.SetEmpty().Append(str.ToString(), index, str.Length);
+                termAtt.SetEmpty().Append(str.ToString(), index, str.Length - index); // LUCENENET: Corrected 3rd parameter
                 offsetAtt.SetOffset(CorrectOffset(index), CorrectOffset(str.Length));
                 index = int.MaxValue; // mark exhausted
                 return true;
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Pt/RSLPStemmerBase.cs b/src/Lucene.Net.Analysis.Common/Analysis/Pt/RSLPStemmerBase.cs
index 885f76a..a3d0043 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Pt/RSLPStemmerBase.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Pt/RSLPStemmerBase.cs
@@ -1,5 +1,5 @@
-using Lucene.Net.Analysis.Util;
-using Lucene.Net.Support;
+using J2N.Text;
+using Lucene.Net.Analysis.Util;
 using Lucene.Net.Util;
 using System;
 using System.Collections.Generic;
@@ -138,7 +138,7 @@ namespace Lucene.Net.Analysis.Pt
 #pragma warning disable 612, 618
                     LuceneVersion.LUCENE_CURRENT,
 #pragma warning restore 612, 618
-                    Arrays.AsList(exceptions), false);
+                    exceptions, false);
             }
 
             public override bool Matches(char[] s, int len)
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Query/QueryAutoStopWordAnalyzer.cs b/src/Lucene.Net.Analysis.Common/Analysis/Query/QueryAutoStopWordAnalyzer.cs
index fa6f338..7f78640 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Query/QueryAutoStopWordAnalyzer.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Query/QueryAutoStopWordAnalyzer.cs
@@ -4,6 +4,7 @@ using Lucene.Net.Index;
 using Lucene.Net.Util;
 using System.Collections.Generic;
 using System.Linq;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Analysis.Query
 {
@@ -37,7 +38,7 @@ namespace Lucene.Net.Analysis.Query
     public sealed class QueryAutoStopWordAnalyzer : AnalyzerWrapper
     {
         private readonly Analyzer @delegate;
-        private readonly IDictionary<string, HashSet<string>> stopWordsPerField = new Dictionary<string, HashSet<string>>();
+        private readonly IDictionary<string, ISet<string>> stopWordsPerField = new Dictionary<string, ISet<string>>();
         //The default maximum percentage (40%) of index documents which
         //can contain a term, after which the term is considered to be a stop word.
         public const float defaultMaxDocFreqPercent = 0.4f;
@@ -124,7 +125,7 @@ namespace Lucene.Net.Analysis.Query
 
             foreach (string field in fields)
             {
-                var stopWords = new HashSet<string>();
+                var stopWords = new JCG.HashSet<string>();
                 Terms terms = MultiFields.GetTerms(indexReader, field);
                 CharsRef spare = new CharsRef();
                 if (terms != null)
@@ -151,7 +152,7 @@ namespace Lucene.Net.Analysis.Query
 
         protected override TokenStreamComponents WrapComponents(string fieldName, TokenStreamComponents components)
         {
-            if (!stopWordsPerField.TryGetValue(fieldName, out HashSet<string> stopWords) || stopWords == null)
+            if (!stopWordsPerField.TryGetValue(fieldName, out ISet<string> stopWords) || stopWords == null)
             {
                 return components;
             }
@@ -180,7 +181,7 @@ namespace Lucene.Net.Analysis.Query
             IList<Term> allStopWords = new List<Term>();
             foreach (string fieldName in stopWordsPerField.Keys)
             {
-                HashSet<string> stopWords = stopWordsPerField[fieldName];
+                ISet<string> stopWords = stopWordsPerField[fieldName];
                 foreach (string text in stopWords)
                 {
                     allStopWords.Add(new Term(fieldName, text));
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Ru/RussianAnalyzer.cs b/src/Lucene.Net.Analysis.Common/Analysis/Ru/RussianAnalyzer.cs
index 2ee47f5..8af58b2 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Ru/RussianAnalyzer.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Ru/RussianAnalyzer.cs
@@ -3,7 +3,6 @@ using Lucene.Net.Analysis.Miscellaneous;
 using Lucene.Net.Analysis.Snowball;
 using Lucene.Net.Analysis.Standard;
 using Lucene.Net.Analysis.Util;
-using Lucene.Net.Support;
 using Lucene.Net.Util;
 using System;
 using System.IO;
@@ -12,21 +11,21 @@ using System.Text;
 namespace Lucene.Net.Analysis.Ru
 {
     /*
-	 * Licensed to the Apache Software Foundation (ASF) under one or more
-	 * contributor license agreements.  See the NOTICE file distributed with
-	 * this work for additional information regarding copyright ownership.
-	 * The ASF licenses this file to You under the Apache License, Version 2.0
-	 * (the "License"); you may not use this file except in compliance with
-	 * the License.  You may obtain a copy of the License at
-	 *
-	 *     http://www.apache.org/licenses/LICENSE-2.0
-	 *
-	 * Unless required by applicable law or agreed to in writing, software
-	 * distributed under the License is distributed on an "AS IS" BASIS,
-	 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	 * See the License for the specific language governing permissions and
-	 * limitations under the License.
-	 */
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
 
     /// <summary>
     /// <see cref="Analyzer"/> for Russian language. 
@@ -70,7 +69,7 @@ namespace Lucene.Net.Analysis.Ru
         {
             /// @deprecated (3.1) remove this for Lucene 5.0 
             [Obsolete("(3.1) remove this for Lucene 5.0")]
-            internal static readonly CharArraySet DEFAULT_STOP_SET_30 = CharArraySet.UnmodifiableSet(new CharArraySet(LuceneVersion.LUCENE_CURRENT, Arrays.AsList(RUSSIAN_STOP_WORDS_30), false));
+            internal static readonly CharArraySet DEFAULT_STOP_SET_30 = CharArraySet.UnmodifiableSet(new CharArraySet(LuceneVersion.LUCENE_CURRENT, RUSSIAN_STOP_WORDS_30, false));
             internal static readonly CharArraySet DEFAULT_STOP_SET = LoadDefaultStopSet();
 
             private static CharArraySet LoadDefaultStopSet() // LUCENENET: Avoid static constructors (see https://github.com/apache/lucenenet/pull/224#issuecomment-469284006)
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Ru/RussianLetterTokenizer.cs b/src/Lucene.Net.Analysis.Common/Analysis/Ru/RussianLetterTokenizer.cs
index 38b4180..8a7932e 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Ru/RussianLetterTokenizer.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Ru/RussianLetterTokenizer.cs
@@ -1,5 +1,5 @@
-using Lucene.Net.Analysis.Util;
-using Lucene.Net.Support;
+using J2N;
+using Lucene.Net.Analysis.Util;
 using Lucene.Net.Util;
 using System;
 using System.IO;
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymFilter.cs
index d9a9b56..d858052 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymFilter.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymFilter.cs
@@ -1,6 +1,6 @@
-using Lucene.Net.Analysis.TokenAttributes;
+using J2N;
+using Lucene.Net.Analysis.TokenAttributes;
 using Lucene.Net.Store;
-using Lucene.Net.Support;
 using Lucene.Net.Util;
 using Lucene.Net.Util.Fst;
 using System;
@@ -9,21 +9,21 @@ using System.Diagnostics;
 namespace Lucene.Net.Analysis.Synonym
 {
     /*
-	 * Licensed to the Apache Software Foundation (ASF) under one or more
-	 * contributor license agreements.  See the NOTICE file distributed with
-	 * this work for additional information regarding copyright ownership.
-	 * The ASF licenses this file to You under the Apache License, Version 2.0
-	 * (the "License"); you may not use this file except in compliance with
-	 * the License.  You may obtain a copy of the License at
-	 *
-	 *     http://www.apache.org/licenses/LICENSE-2.0
-	 *
-	 * Unless required by applicable law or agreed to in writing, software
-	 * distributed under the License is distributed on an "AS IS" BASIS,
-	 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	 * See the License for the specific language governing permissions and
-	 * limitations under the License.
-	 */
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
 
     /// <summary>
     /// Matches single or multi word synonyms in a token stream.
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymMap.cs b/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymMap.cs
index 84e4c33..190a8e6 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymMap.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymMap.cs
@@ -1,6 +1,5 @@
 using Lucene.Net.Analysis.TokenAttributes;
 using Lucene.Net.Store;
-using Lucene.Net.Support;
 using Lucene.Net.Util;
 using Lucene.Net.Util.Fst;
 using System;
@@ -8,6 +7,7 @@ using System.Collections.Generic;
 using System.Diagnostics;
 using System.IO;
 using System.Linq;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Analysis.Synonym
 {
@@ -77,7 +77,7 @@ namespace Lucene.Net.Analysis.Synonym
         /// </summary>
         public class Builder
         {
-            internal readonly HashMap<CharsRef, MapEntry> workingSet = new HashMap<CharsRef, MapEntry>();
+            internal readonly IDictionary<CharsRef, MapEntry> workingSet = new JCG.Dictionary<CharsRef, MapEntry>();
             internal readonly BytesRefHash words = new BytesRefHash();
             internal readonly BytesRef utf8Scratch = new BytesRef(8);
             internal int maxHorizontalContext;
@@ -251,11 +251,11 @@ namespace Lucene.Net.Analysis.Synonym
                 BytesRef scratch = new BytesRef(64);
                 ByteArrayDataOutput scratchOutput = new ByteArrayDataOutput();
 
-                HashSet<int?> dedupSet;
+                ISet<int?> dedupSet;
 
                 if (dedup)
                 {
-                    dedupSet = new HashSet<int?>();
+                    dedupSet = new JCG.HashSet<int?>();
                 }
                 else
                 {
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Th/ThaiTokenizer.cs b/src/Lucene.Net.Analysis.Common/Analysis/Th/ThaiTokenizer.cs
index 508db6f..48fc965 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Th/ThaiTokenizer.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Th/ThaiTokenizer.cs
@@ -1,8 +1,8 @@
 #if FEATURE_BREAKITERATOR
+using J2N;
 using ICU4N.Text;
 using Lucene.Net.Analysis.TokenAttributes;
 using Lucene.Net.Analysis.Util;
-using Lucene.Net.Support;
 using System;
 using System.Collections.Generic;
 using System.Globalization;
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Th/ThaiWordFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/Th/ThaiWordFilter.cs
index 0693fa9..40c151b 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Th/ThaiWordFilter.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Th/ThaiWordFilter.cs
@@ -12,21 +12,21 @@ using System.Text.RegularExpressions;
 namespace Lucene.Net.Analysis.Th
 {
     /*
-	 * Licensed to the Apache Software Foundation (ASF) under one or more
-	 * contributor license agreements.  See the NOTICE file distributed with
-	 * this work for additional information regarding copyright ownership.
-	 * The ASF licenses this file to You under the Apache License, Version 2.0
-	 * (the "License"); you may not use this file except in compliance with
-	 * the License.  You may obtain a copy of the License at
-	 *
-	 *     http://www.apache.org/licenses/LICENSE-2.0
-	 *
-	 * Unless required by applicable law or agreed to in writing, software
-	 * distributed under the License is distributed on an "AS IS" BASIS,
-	 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	 * See the License for the specific language governing permissions and
-	 * limitations under the License.
-	 */
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
 
     // LUCENENET NOTE: Removing this notice from the doc comment because it is not relevant for our purposes.
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Tr/TurkishLowerCaseFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/Tr/TurkishLowerCaseFilter.cs
index fcab2df..0cd0464 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Tr/TurkishLowerCaseFilter.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Tr/TurkishLowerCaseFilter.cs
@@ -1,27 +1,27 @@
-using J2N.Globalization;
+using J2N;
+using J2N.Globalization;
 using Lucene.Net.Analysis.TokenAttributes;
-using Lucene.Net.Support;
 using System;
 using System.Globalization;
 
 namespace Lucene.Net.Analysis.Tr
 {
     /*
-	 * Licensed to the Apache Software Foundation (ASF) under one or more
-	 * contributor license agreements.  See the NOTICE file distributed with
-	 * this work for additional information regarding copyright ownership.
-	 * The ASF licenses this file to You under the Apache License, Version 2.0
-	 * (the "License"); you may not use this file except in compliance with
-	 * the License.  You may obtain a copy of the License at
-	 *
-	 *     http://www.apache.org/licenses/LICENSE-2.0
-	 *
-	 * Unless required by applicable law or agreed to in writing, software
-	 * distributed under the License is distributed on an "AS IS" BASIS,
-	 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	 * See the License for the specific language governing permissions and
-	 * limitations under the License.
-	 */
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
 
     /// <summary>
     /// Normalizes Turkish token text to lower case.
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Util/AbstractAnalysisFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Util/AbstractAnalysisFactory.cs
index 4241335..571b67e 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Util/AbstractAnalysisFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Util/AbstractAnalysisFactory.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Analysis.Core;
+using J2N.Collections.Generic.Extensions;
+using Lucene.Net.Analysis.Core;
 using Lucene.Net.Support;
 using Lucene.Net.Util;
 using System;
@@ -8,6 +9,7 @@ using System.IO;
 using System.Linq;
 using System.Text;
 using System.Text.RegularExpressions;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Analysis.Util
 {
@@ -59,7 +61,7 @@ namespace Lucene.Net.Analysis.Util
         protected AbstractAnalysisFactory(IDictionary<string, string> args)
         {
             IsExplicitLuceneMatchVersion = false;
-            originalArgs = Collections.UnmodifiableMap(args);
+            originalArgs = args.AsReadOnly();
             string version = Get(args, LUCENE_MATCH_VERSION_PARAM);
             // LUCENENET TODO: What should we do if the version is null?
             //luceneMatchVersion = version == null ? (LuceneVersion?)null : LuceneVersionHelpers.ParseLeniently(version);
@@ -286,12 +288,14 @@ namespace Lucene.Net.Analysis.Util
             if (args.TryGetValue(name, out s))
             {
                 args.Remove(name);
-                HashSet<string> set = null;
+                ISet<string> set = null;
                 Match matcher = ITEM_PATTERN.Match(s);
                 if (matcher.Success)
                 {
-                    set = new HashSet<string>();
-                    set.Add(matcher.Groups[0].Value);
+                    set = new JCG.HashSet<string>
+                    {
+                        matcher.Groups[0].Value
+                    };
                     matcher = matcher.NextMatch();
                     while (matcher.Success)
                     {
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Util/AnalysisSPILoader.cs b/src/Lucene.Net.Analysis.Common/Analysis/Util/AnalysisSPILoader.cs
index a2af4bd..620c187 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Util/AnalysisSPILoader.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Util/AnalysisSPILoader.cs
@@ -1,7 +1,9 @@
-using Lucene.Net.Support;
+using J2N.Collections.Generic.Extensions;
+using Lucene.Net.Support;
 using Lucene.Net.Util;
 using System;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Analysis.Util
 {
@@ -59,7 +61,7 @@ namespace Lucene.Net.Analysis.Util
         {
             lock (this)
             {
-                IDictionary<string, Type> services = new LinkedHashMap<string, Type>(this.services);
+                IDictionary<string, Type> services = new JCG.LinkedDictionary<string, Type>(this.services);
                 SPIClassIterator<S> loader = SPIClassIterator<S>.Get();
 
                 foreach (var service in loader)
@@ -93,7 +95,7 @@ namespace Lucene.Net.Analysis.Util
                         services.Add(name, service);
                     }
                 }
-                this.services = Collections.UnmodifiableMap(services);
+                this.services = services.AsReadOnly();
             }
         }
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Util/CharArrayMap.cs b/src/Lucene.Net.Analysis.Common/Analysis/Util/CharArrayMap.cs
index 88fd5b5..bdc9c3d 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Util/CharArrayMap.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Util/CharArrayMap.cs
@@ -1,5 +1,6 @@
-using J2N.Globalization;
-using Lucene.Net.Support;
+using J2N;
+using J2N.Text;
+using J2N.Globalization;
 using Lucene.Net.Util;
 using System;
 using System.Collections;
@@ -13,21 +14,21 @@ using System.Text;
 namespace Lucene.Net.Analysis.Util
 {
     /*
-	 * Licensed to the Apache Software Foundation (ASF) under one or more
-	 * contributor license agreements.  See the NOTICE file distributed with
-	 * this work for additional information regarding copyright ownership.
-	 * The ASF licenses this file to You under the Apache License, Version 2.0
-	 * (the "License"); you may not use this file except in compliance with
-	 * the License.  You may obtain a copy of the License at
-	 *
-	 *     http://www.apache.org/licenses/LICENSE-2.0
-	 *
-	 * Unless required by applicable law or agreed to in writing, software
-	 * distributed under the License is distributed on an "AS IS" BASIS,
-	 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	 * See the License for the specific language governing permissions and
-	 * limitations under the License.
-	 */
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
 
     /// <summary>
     /// A simple class that stores key <see cref="string"/>s as <see cref="T:char[]"/>'s in a
@@ -191,8 +192,8 @@ namespace Lucene.Net.Analysis.Util
         public virtual void Clear()
         {
             count = 0;
-            Arrays.Fill(keys, null);
-            Arrays.Fill(values, null);
+            keys.Fill(null);
+            values.Fill(null);
         }
 
         /// <summary>
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Util/CharArraySet.cs b/src/Lucene.Net.Analysis.Common/Analysis/Util/CharArraySet.cs
index 21b944d..28f4c0c 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Util/CharArraySet.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Util/CharArraySet.cs
@@ -1,5 +1,5 @@
 using J2N.Globalization;
-using Lucene.Net.Support;
+using J2N.Text;
 using Lucene.Net.Util;
 using System;
 using System.Collections;
@@ -8,25 +8,26 @@ using System.ComponentModel;
 using System.Globalization;
 using System.Linq;
 using System.Text;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Analysis.Util
 {
     /*
-	 * Licensed to the Apache Software Foundation (ASF) under one or more
-	 * contributor license agreements.  See the NOTICE file distributed with
-	 * this work for additional information regarding copyright ownership.
-	 * The ASF licenses this file to You under the Apache License, Version 2.0
-	 * (the "License"); you may not use this file except in compliance with
-	 * the License.  You may obtain a copy of the License at
-	 *
-	 *     http://www.apache.org/licenses/LICENSE-2.0
-	 *
-	 * Unless required by applicable law or agreed to in writing, software
-	 * distributed under the License is distributed on an "AS IS" BASIS,
-	 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	 * See the License for the specific language governing permissions and
-	 * limitations under the License.
-	 */
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
 
     /// <summary>
     /// A simple class that stores <see cref="string"/>s as <see cref="T:char[]"/>'s in a
@@ -340,7 +341,9 @@ namespace Lucene.Net.Analysis.Util
         /// <returns><c>true</c> if the specified object is equal to this set</returns>
         public override bool Equals(object obj)
         {
-            return Collections.Equals(this, obj as ISet<string>);
+            if (obj is ISet<string> other)
+                return JCG.SetEqualityComparer<string>.Default.Equals(this, other);
+            return false;
         }
 
         /// <summary>
@@ -355,7 +358,7 @@ namespace Lucene.Net.Analysis.Util
         /// <returns>the hash code value for this set</returns>
         public override int GetHashCode()
         {
-            return Collections.GetHashCode(this);
+            return JCG.SetEqualityComparer<string>.Default.GetHashCode(this);
         }
 
         /// <summary>
@@ -393,8 +396,7 @@ namespace Lucene.Net.Analysis.Util
         /// <returns><c>true</c> if the current set is equal to other; otherwise, <c>false</c>.</returns>
         public virtual bool SetEquals(IEnumerable<string> other)
         {
-            var otherSet = other as CharArraySet;
-            if (otherSet == null)
+            if (!(other is CharArraySet otherSet))
                 return false;
 
             // Invoke the implementation on CharArrayMap that
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Util/CharTokenizer.cs b/src/Lucene.Net.Analysis.Common/Analysis/Util/CharTokenizer.cs
index e3ba728..cfb9ec6 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Util/CharTokenizer.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Util/CharTokenizer.cs
@@ -1,7 +1,7 @@
-using System.Diagnostics;
+using J2N;
+using System.Diagnostics;
 using System.IO;
 using Lucene.Net.Analysis.TokenAttributes;
-using Lucene.Net.Support;
 using Lucene.Net.Util;
 
 namespace Lucene.Net.Analysis.Util
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Util/CharacterUtils.cs b/src/Lucene.Net.Analysis.Common/Analysis/Util/CharacterUtils.cs
index ffda963..86df58c 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Util/CharacterUtils.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Util/CharacterUtils.cs
@@ -1,4 +1,6 @@
-using Lucene.Net.Support;
+using J2N;
+using J2N.Text;
+using Lucene.Net.Support;
 using Lucene.Net.Util;
 using System;
 using System.Diagnostics;
@@ -68,7 +70,7 @@ namespace Lucene.Net.Analysis.Util
         }
 
         /// <summary>
-        /// Returns the code point at the given index of the <see cref="ICharSequence"/>.
+        /// Returns the code point at the given index of the <see cref="string"/>.
         /// Depending on the <see cref="LuceneVersion"/> passed to
         /// <see cref="CharacterUtils.GetInstance(LuceneVersion)"/> this method mimics the behavior
         /// of <c>Character.CodePointAt(char[], int)</c> as it would have been
@@ -82,11 +84,29 @@ namespace Lucene.Net.Analysis.Util
         /// <returns> the Unicode code point at the given index </returns>
         /// <exception cref="NullReferenceException">
         ///           - if the sequence is null. </exception>
-        /// <exception cref="IndexOutOfRangeException">
+        /// <exception cref="ArgumentOutOfRangeException">
         ///           - if the value offset is negative or not less than the length of
         ///           the character sequence. </exception>
         public abstract int CodePointAt(string seq, int offset);
 
+        /// <summary>
+        /// Returns the code point at the given index of the <see cref="ICharSequence"/>.
+        /// Depending on the <see cref="LuceneVersion"/> passed to
+        /// <see cref="CharacterUtils.GetInstance(LuceneVersion)"/> this method mimics the behavior
+        /// of <c>Character.CodePointAt(char[], int)</c> as it would have been
+        /// available on a Java 1.4 JVM or on a later virtual machine version.
+        /// </summary>
+        /// <param name="seq">
+        ///          a character sequence </param>
+        /// <param name="offset">
+        ///          the offset to the char values in the chars array to be converted
+        /// </param>
+        /// <returns> the Unicode code point at the given index </returns>
+        /// <exception cref="NullReferenceException">
+        ///           - if the sequence is null. </exception>
+        /// <exception cref="ArgumentOutOfRangeException">
+        ///           - if the value offset is negative or not less than the length of
+        ///           the character sequence. </exception>
         public abstract int CodePointAt(ICharSequence seq, int offset);
 
         /// <summary>
@@ -107,7 +127,7 @@ namespace Lucene.Net.Analysis.Util
         /// <returns> the Unicode code point at the given index </returns>
         /// <exception cref="NullReferenceException">
         ///           - if the array is null. </exception>
-        /// <exception cref="IndexOutOfRangeException">
+        /// <exception cref="ArgumentOutOfRangeException">
         ///           - if the value offset is negative or not less than the length of
         ///           the char array. </exception>
         public abstract int CodePointAt(char[] chars, int offset, int limit);
@@ -127,7 +147,7 @@ namespace Lucene.Net.Analysis.Util
         {
             if (bufferSize < 2)
             {
-                throw new System.ArgumentException("buffersize must be >= 2");
+                throw new ArgumentException("buffersize must be >= 2");
             }
             return new CharacterBuffer(new char[bufferSize], 0, 0);
         }
@@ -138,17 +158,17 @@ namespace Lucene.Net.Analysis.Util
         /// at the given offset. </summary>
         /// <param name="buffer"> the char buffer to lowercase </param>
         /// <param name="offset"> the offset to start at </param>
-        /// <param name="limit"> the max char in the buffer to lower case </param>
-        public virtual void ToLower(char[] buffer, int offset, int limit) // LUCENENET specific - marked virtual so we can override the default
+        /// <param name="length"> the number of characters in the buffer to lower case </param>
+        public virtual void ToLower(char[] buffer, int offset, int length) // LUCENENET specific - marked virtual so we can override the default
         {
-            Debug.Assert(buffer.Length >= limit);
+            Debug.Assert(buffer.Length >= length);
             Debug.Assert(offset <= 0 && offset <= buffer.Length);
 
             // Optimization provided by Vincent Van Den Berghe: 
             // http://search-lucene.com/m/Lucene.Net/j1zMf1uckOzOYqsi?subj=Proposal+to+speed+up+implementation+of+LowercaseFilter+charUtils+ToLower
-            new string(buffer, offset, limit)
+            new string(buffer, offset, length)
                 .ToLowerInvariant()
-                .CopyTo(0, buffer, offset, limit);
+                .CopyTo(0, buffer, offset, length);
 
             // Original (slow) Lucene implementation:
             //for (int i = offset; i < limit; )
@@ -164,17 +184,17 @@ namespace Lucene.Net.Analysis.Util
         /// at the given offset. </summary>
         /// <param name="buffer"> the char buffer to UPPERCASE </param>
         /// <param name="offset"> the offset to start at </param>
-        /// <param name="limit"> the max char in the buffer to lower case </param>
-        public virtual void ToUpper(char[] buffer, int offset, int limit) // LUCENENET specific - marked virtual so we can override the default
+        /// <param name="length"> the number of characters in the buffer to lower case </param>
+        public virtual void ToUpper(char[] buffer, int offset, int length) // LUCENENET specific - marked virtual so we can override the default
         {
-            Debug.Assert(buffer.Length >= limit);
+            Debug.Assert(buffer.Length >= length);
             Debug.Assert(offset <= 0 && offset <= buffer.Length);
 
             // Optimization provided by Vincent Van Den Berghe: 
             // http://search-lucene.com/m/Lucene.Net/j1zMf1uckOzOYqsi?subj=Proposal+to+speed+up+implementation+of+LowercaseFilter+charUtils+ToLower
-            new string(buffer, offset, limit)
+            new string(buffer, offset, length)
                 .ToUpperInvariant()
-                .CopyTo(0, buffer, offset, limit);
+                .CopyTo(0, buffer, offset, length);
 
             // Original (slow) Lucene implementation:
             //for (int i = offset; i < limit; )
@@ -192,7 +212,7 @@ namespace Lucene.Net.Analysis.Util
         {
             if (srcLen < 0)
             {
-                throw new System.ArgumentException("srcLen must be >= 0");
+                throw new ArgumentException("srcLen must be >= 0");
             }
             int codePointCount = 0;
             for (int i = 0; i < srcLen; )
@@ -212,7 +232,7 @@ namespace Lucene.Net.Analysis.Util
         {
             if (srcLen < 0)
             {
-                throw new System.ArgumentException("srcLen must be >= 0");
+                throw new ArgumentException("srcLen must be >= 0");
             }
             int written = 0;
             for (int i = 0; i < srcLen; ++i)
@@ -299,7 +319,7 @@ namespace Lucene.Net.Analysis.Util
 
             public override int CodePointAt(char[] chars, int offset, int limit)
             {
-                return Character.CodePointAt(chars, offset, limit); // LUCENENET TODO: This will throw a NullReferenceException if chars is null. Should this be an ArgumentNullException in .NET?
+                return Character.CodePointAt(chars, offset, limit);
             }
 
             public override bool Fill(CharacterBuffer buffer, TextReader reader, int numChars)
@@ -307,7 +327,7 @@ namespace Lucene.Net.Analysis.Util
                 Debug.Assert(buffer.Buffer.Length >= 2);
                 if (numChars < 2 || numChars > buffer.Buffer.Length)
                 {
-                    throw new System.ArgumentException("numChars must be >= 2 and <= the buffer size");
+                    throw new ArgumentException("numChars must be >= 2 and <= the buffer size");
                 }
                 char[] charBuffer = buffer.Buffer;
                 buffer.offset = 0;
@@ -361,21 +381,39 @@ namespace Lucene.Net.Analysis.Util
         {
             public override int CodePointAt(string seq, int offset)
             {
+                // LUCENENET specific - added guard clauses
+                if (seq == null)
+                    throw new ArgumentNullException(nameof(seq));
+                if (offset < 0 || offset >= seq.Length)
+                    throw new ArgumentOutOfRangeException(nameof(offset));
+
                 return seq[offset];
             }
 
             public override int CodePointAt(ICharSequence seq, int offset)
             {
+                // LUCENENET specific - added guard clauses
+                if (seq == null)
+                    throw new ArgumentNullException(nameof(seq));
+                if (offset < 0 || offset >= seq.Length)
+                    throw new ArgumentOutOfRangeException(nameof(offset));
+
                 return seq[offset];
             }
 
             public override int CodePointAt(char[] chars, int offset, int limit)
             {
+                if (chars == null)
+                    throw new ArgumentNullException(nameof(chars)); // LUCENENET specific - added for .NET compatibility
                 if (offset >= limit)
                 {
-                    throw new System.IndexOutOfRangeException("offset must be less than limit");
+                    throw new ArgumentOutOfRangeException("offset must be less than limit");
                 }
-                return chars[offset]; // LUCENENET TODO: This will throw a NullReferenceException if chars is null. Should this be an ArgumentNullException in .NET?
+                // LUCENENET specific - added array bound check
+                if (offset < 0  || offset >= chars.Length)
+                    throw new ArgumentOutOfRangeException(nameof(offset));
+
+                return chars[offset];
             }
 
             public override bool Fill(CharacterBuffer buffer, TextReader reader, int numChars)
@@ -383,7 +421,7 @@ namespace Lucene.Net.Analysis.Util
                 Debug.Assert(buffer.Buffer.Length >= 1);
                 if (numChars < 1 || numChars > buffer.Buffer.Length)
                 {
-                    throw new System.ArgumentException("numChars must be >= 1 and <= the buffer size");
+                    throw new ArgumentException("numChars must be >= 1 and <= the buffer size");
                 }
                 buffer.offset = 0;
                 int read = ReadFully(reader, buffer.Buffer, 0, numChars);
@@ -402,7 +440,7 @@ namespace Lucene.Net.Analysis.Util
                 int result = index + offset;
                 if (result < 0 || result > count)
                 {
-                    throw new System.IndexOutOfRangeException();
+                    throw new ArgumentOutOfRangeException();
                 }
                 return result;
             }
@@ -421,8 +459,8 @@ namespace Lucene.Net.Analysis.Util
 
                 for (int i = offset; i < limit;)
                 {
-                    i += Character.ToChars(
-                        Character.ToLower(
+                    i += J2N.Character.ToChars(
+                        J2N.Character.ToLower(
                             CodePointAt(buffer, i, limit)), buffer, i);
                 }
             }
@@ -467,38 +505,20 @@ namespace Lucene.Net.Analysis.Util
             /// <returns> the buffer </returns>
             [WritableArray]
             [SuppressMessage("Microsoft.Performance", "CA1819", Justification = "Lucene's design requires some writable array properties")]
-            public char[] Buffer
-            {
-                get
-                {
-                    return buffer;
-                }
-            }
+            public char[] Buffer => buffer;
 
             /// <summary>
             /// Returns the data offset in the internal buffer.
             /// </summary>
             /// <returns> the offset </returns>
-            public int Offset
-            {
-                get
-                {
-                    return offset;
-                }
-            }
+            public int Offset => offset;
 
             /// <summary>
             /// Return the length of the data in the internal buffer starting at
             /// <see cref="Offset"/>
             /// </summary>
             /// <returns> the length </returns>
-            public int Length
-            {
-                get
-                {
-                    return length;
-                }
-            }
+            public int Length => length;
 
             /// <summary>
             /// Resets the CharacterBuffer. All internals are reset to its default
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Util/OpenStringBuilder.cs b/src/Lucene.Net.Analysis.Common/Analysis/Util/OpenStringBuilder.cs
index d2dbab0..164b229 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Util/OpenStringBuilder.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Util/OpenStringBuilder.cs
@@ -1,26 +1,27 @@
-using Lucene.Net.Support;
+using J2N.Text;
 using System;
 using System.Diagnostics.CodeAnalysis;
 using System.Text;
+using WritableArrayAttribute = Lucene.Net.Support.WritableArrayAttribute;
 
 namespace Lucene.Net.Analysis.Util
 {
     /*
-	 * Licensed to the Apache Software Foundation (ASF) under one or more
-	 * contributor license agreements.  See the NOTICE file distributed with
-	 * this work for additional information regarding copyright ownership.
-	 * The ASF licenses this file to You under the Apache License, Version 2.0
-	 * (the "License"); you may not use this file except in compliance with
-	 * the License.  You may obtain a copy of the License at
-	 *
-	 *     http://www.apache.org/licenses/LICENSE-2.0
-	 *
-	 * Unless required by applicable law or agreed to in writing, software
-	 * distributed under the License is distributed on an "AS IS" BASIS,
-	 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	 * See the License for the specific language governing permissions and
-	 * limitations under the License.
-	 */
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
 
     /// <summary>
     /// A StringBuilder that allows one to access the array.
@@ -35,6 +36,8 @@ namespace Lucene.Net.Analysis.Util
         {
         }
 
+        bool ICharSequence.HasValue => m_buf != null;
+
         public OpenStringBuilder(int size)
         {
             m_buf = new char[size];
@@ -47,11 +50,8 @@ namespace Lucene.Net.Analysis.Util
 
         public virtual int Length
         {
-            set
-            {
-                this.m_len = value;
-            }
-            get { return m_len; }
+            get => m_len;
+            set => m_len = value;
         }
 
         public virtual void Set(char[] arr, int end)
@@ -62,13 +62,7 @@ namespace Lucene.Net.Analysis.Util
 
         [WritableArray]
         [SuppressMessage("Microsoft.Performance", "CA1819", Justification = "Lucene's design requires some writable array properties")]
-        public virtual char[] Array
-        {
-            get
-            {
-                return m_buf;
-            }
-        }
+        public virtual char[] Array => m_buf;
 
         // LUCENENE NOTE: This is essentially a duplicate of Length (except that property can be set).
         // .NET uses Length for StringBuilder anyway, so that property is preferable to this one.
@@ -77,17 +71,14 @@ namespace Lucene.Net.Analysis.Util
         //    get{ return m_len; }
         //}
 
-        public virtual int Capacity
-        {
-            get { return m_buf.Length; }
-        }
+        public virtual int Capacity => m_buf.Length;
 
         public virtual OpenStringBuilder Append(ICharSequence csq) 
         {
             return Append(csq, 0, csq.Length);
         }
 
-        public virtual OpenStringBuilder Append(ICharSequence csq, int start, int end)
+        public virtual OpenStringBuilder Append(ICharSequence csq, int start, int end) // LUCENENET TODO: API - change to startIndex/length to match .NET
         {
             EnsureCapacity(end - start);
             for (int i = start; i < end; i++)
@@ -104,7 +95,7 @@ namespace Lucene.Net.Analysis.Util
         }
 
         // LUCENENET specific - overload for string (more common in .NET than ICharSequence)
-        public virtual OpenStringBuilder Append(string csq, int start, int end)
+        public virtual OpenStringBuilder Append(string csq, int start, int end) // LUCENENET TODO: API - change to startIndex/length to match .NET
         {
             EnsureCapacity(end - start);
             for (int i = start; i < end; i++)
@@ -121,7 +112,7 @@ namespace Lucene.Net.Analysis.Util
         }
 
         // LUCENENET specific - overload for StringBuilder
-        public virtual OpenStringBuilder Append(StringBuilder csq, int start, int end)
+        public virtual OpenStringBuilder Append(StringBuilder csq, int start, int end) // LUCENENET TODO: API - change to startIndex/length to match .NET
         {
             EnsureCapacity(end - start);
             for (int i = start; i < end; i++)
@@ -152,13 +143,29 @@ namespace Lucene.Net.Analysis.Util
         // LUCENENET specific - added to .NETify
         public virtual char this[int index]
         {
-            get { return m_buf[index]; }
-            set { m_buf[index] = value; }
+            get => m_buf[index];
+            set => m_buf[index] = value;
         }
 
-        public virtual ICharSequence SubSequence(int start, int end)
+        public virtual ICharSequence Subsequence(int startIndex, int length)
         {
-            throw new System.NotSupportedException(); // todo
+            // From Apache Harmony String class
+            if (m_buf == null || (startIndex == 0 && length == m_buf.Length))
+            {
+                return new CharArrayCharSequence(m_buf);
+            }
+            if (startIndex < 0)
+                throw new ArgumentOutOfRangeException(nameof(startIndex));
+            if (length < 0)
+                throw new ArgumentOutOfRangeException(nameof(length));
+            if (startIndex + length > m_buf.Length)
+                throw new ArgumentOutOfRangeException("", $"{nameof(startIndex)} + {nameof(length)} > {nameof(Length)}");
+
+            char[] result = new char[length];
+            for (int i = 0, j = startIndex; i < length; i++, j++)
+                result[i] = m_buf[j];
+
+            return new CharArrayCharSequence(result);
         }
 
         public virtual void UnsafeWrite(char b)
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Util/WordlistLoader.cs b/src/Lucene.Net.Analysis.Common/Analysis/Util/WordlistLoader.cs
index 5281f5f..f49a375 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Util/WordlistLoader.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Util/WordlistLoader.cs
@@ -1,9 +1,8 @@
-using Lucene.Net.Support;
+using J2N.Text;
 using Lucene.Net.Util;
 using System;
 using System.Collections.Generic;
 using System.IO;
-using System.Linq;
 using System.Text;
 using System.Text.RegularExpressions;
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Wikipedia/WikipediaTokenizerFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Wikipedia/WikipediaTokenizerFactory.cs
index d9bc313..45ed084 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Wikipedia/WikipediaTokenizerFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Wikipedia/WikipediaTokenizerFactory.cs
@@ -7,21 +7,21 @@ using System.IO;
 namespace Lucene.Net.Analysis.Wikipedia
 {
     /*
-	 * Licensed to the Apache Software Foundation (ASF) under one or more
-	 * contributor license agreements.  See the NOTICE file distributed with
-	 * this work for additional information regarding copyright ownership.
-	 * The ASF licenses this file to You under the Apache License, Version 2.0
-	 * (the "License"); you may not use this file except in compliance with
-	 * the License.  You may obtain a copy of the License at
-	 *
-	 *     http://www.apache.org/licenses/LICENSE-2.0
-	 *
-	 * Unless required by applicable law or agreed to in writing, software
-	 * distributed under the License is distributed on an "AS IS" BASIS,
-	 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	 * See the License for the specific language governing permissions and
-	 * limitations under the License.
-	 */
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
 
     /// <summary>
     /// Factory for <see cref="WikipediaTokenizer"/>.
diff --git a/src/Lucene.Net.Analysis.ICU/Analysis/Icu/ICUNormalizer2CharFilter.cs b/src/Lucene.Net.Analysis.ICU/Analysis/Icu/ICUNormalizer2CharFilter.cs
index a65a433..e04dc5f 100644
--- a/src/Lucene.Net.Analysis.ICU/Analysis/Icu/ICUNormalizer2CharFilter.cs
+++ b/src/Lucene.Net.Analysis.ICU/Analysis/Icu/ICUNormalizer2CharFilter.cs
@@ -1,13 +1,14 @@
 // Lucene version compatibility level < 7.1.0
+using J2N;
 using ICU4N.Text;
 using Lucene.Net.Analysis.CharFilters;
 using Lucene.Net.Analysis.Util;
-using Lucene.Net.Support;
 using Lucene.Net.Util;
 using System;
 using System.Diagnostics;
 using System.IO;
 using System.Text;
+using ExceptionToClassNameConventionAttribute = Lucene.Net.Support.ExceptionToClassNameConventionAttribute;
 
 namespace Lucene.Net.Analysis.Icu
 {
@@ -135,7 +136,7 @@ namespace Lucene.Net.Analysis.Icu
                     break;
                 }
 
-                int lastCodePoint = Character.CodePointBefore(tmpBuffer.Buffer, tmpBuffer.Length /*, 0*/);
+                int lastCodePoint = Character.CodePointBefore(tmpBuffer.Buffer, tmpBuffer.Length , 0);
                 if (normalizer.IsInert(lastCodePoint))
                 {
                     // we require an inert char so that we can normalize content before and
diff --git a/src/Lucene.Net.Analysis.Kuromoji/Dict/UserDictionary.cs b/src/Lucene.Net.Analysis.Kuromoji/Dict/UserDictionary.cs
index d5853c0..2017217 100644
--- a/src/Lucene.Net.Analysis.Kuromoji/Dict/UserDictionary.cs
+++ b/src/Lucene.Net.Analysis.Kuromoji/Dict/UserDictionary.cs
@@ -1,5 +1,5 @@
-using Lucene.Net.Analysis.Ja.Util;
-using Lucene.Net.Support;
+using J2N.Text;
+using Lucene.Net.Analysis.Ja.Util;
 using Lucene.Net.Util;
 using Lucene.Net.Util.Fst;
 using System;
@@ -7,6 +7,7 @@ using System.Collections.Generic;
 using System.IO;
 using System.Text;
 using System.Text.RegularExpressions;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Analysis.Ja.Dict
 {
@@ -144,7 +145,7 @@ namespace Lucene.Net.Analysis.Ja.Dict
         public int[][] Lookup(char[] chars, int off, int len)
         {
             // TODO: can we avoid this treemap/toIndexArray?
-            TreeDictionary<int, int[]> result = new TreeDictionary<int, int[]>(); // index, [length, length...]
+            IDictionary<int, int[]> result = new JCG.SortedDictionary<int, int[]>(); // index, [length, length...]
             bool found = false; // true if we found any results
 
             FST.BytesReader fstReader = fst.GetBytesReader();
@@ -188,7 +189,7 @@ namespace Lucene.Net.Analysis.Ja.Dict
         /// </summary>
         /// <param name="input"></param>
         /// <returns>Array of {wordId, index, length}.</returns>
-        private int[][] ToIndexArray(TreeDictionary<int, int[]> input)
+        private int[][] ToIndexArray(IDictionary<int, int[]> input)
         {
             List<int[]> result = new List<int[]>();
             foreach (int i in input.Keys)
diff --git a/src/Lucene.Net.Analysis.Kuromoji/JapaneseAnalyzer.cs b/src/Lucene.Net.Analysis.Kuromoji/JapaneseAnalyzer.cs
index 23822d6..46e2539 100644
--- a/src/Lucene.Net.Analysis.Kuromoji/JapaneseAnalyzer.cs
+++ b/src/Lucene.Net.Analysis.Kuromoji/JapaneseAnalyzer.cs
@@ -6,6 +6,7 @@ using Lucene.Net.Util;
 using System;
 using System.Collections.Generic;
 using System.IO;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Analysis.Ja
 {
@@ -86,7 +87,7 @@ namespace Lucene.Net.Analysis.Ja
                 try
                 {
                     CharArraySet tagset = LoadStopwordSet(false, typeof(JapaneseAnalyzer), "stoptags.txt", "#");
-                    var DEFAULT_STOP_TAGS = new HashSet<string>();
+                    var DEFAULT_STOP_TAGS = new JCG.HashSet<string>();
                     foreach (string element in tagset)
                     {
                         DEFAULT_STOP_TAGS.Add(element);
diff --git a/src/Lucene.Net.Analysis.Kuromoji/JapanesePartOfSpeechStopFilterFactory.cs b/src/Lucene.Net.Analysis.Kuromoji/JapanesePartOfSpeechStopFilterFactory.cs
index 04fc900..f52c550 100644
--- a/src/Lucene.Net.Analysis.Kuromoji/JapanesePartOfSpeechStopFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Kuromoji/JapanesePartOfSpeechStopFilterFactory.cs
@@ -1,6 +1,7 @@
 using Lucene.Net.Analysis.Util;
 using System;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Analysis.Ja
 {
@@ -58,7 +59,7 @@ namespace Lucene.Net.Analysis.Ja
             CharArraySet cas = GetWordSet(loader, stopTagFiles, false);
             if (cas != null)
             {
-                stopTags = new HashSet<string>();
+                stopTags = new JCG.HashSet<string>();
                 foreach (string element in cas) 
                 {
                     stopTags.Add(element);
diff --git a/src/Lucene.Net.Analysis.Kuromoji/JapaneseTokenizer.cs b/src/Lucene.Net.Analysis.Kuromoji/JapaneseTokenizer.cs
index 7512eb2..b035a3f 100644
--- a/src/Lucene.Net.Analysis.Kuromoji/JapaneseTokenizer.cs
+++ b/src/Lucene.Net.Analysis.Kuromoji/JapaneseTokenizer.cs
@@ -5,6 +5,7 @@ using Lucene.Net.Analysis.Util;
 using Lucene.Net.Support;
 using Lucene.Net.Util;
 using Lucene.Net.Util.Fst;
+using J2N;
 using System;
 using System.Collections.Generic;
 using System.Diagnostics;
diff --git a/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsBuilder.cs b/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsBuilder.cs
index 49aa559..1d7dbb0 100644
--- a/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsBuilder.cs
+++ b/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsBuilder.cs
@@ -1,4 +1,4 @@
-using Lucene.Net.Support;
+using J2N.Text;
 using System.Diagnostics;
 using System.Globalization;
 using System.IO;
diff --git a/src/Lucene.Net.Analysis.Kuromoji/Tools/TokenInfoDictionaryBuilder.cs b/src/Lucene.Net.Analysis.Kuromoji/Tools/TokenInfoDictionaryBuilder.cs
index 6c07b40..6127ca0 100644
--- a/src/Lucene.Net.Analysis.Kuromoji/Tools/TokenInfoDictionaryBuilder.cs
+++ b/src/Lucene.Net.Analysis.Kuromoji/Tools/TokenInfoDictionaryBuilder.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Support;
+using J2N.Text;
+using Lucene.Net.Support;
 using Lucene.Net.Util;
 using Lucene.Net.Util.Fst;
 using Lucene.Net.Util.Packed;
diff --git a/src/Lucene.Net.Analysis.Kuromoji/Tools/UnknownDictionaryBuilder.cs b/src/Lucene.Net.Analysis.Kuromoji/Tools/UnknownDictionaryBuilder.cs
index e57570a..5529769 100644
--- a/src/Lucene.Net.Analysis.Kuromoji/Tools/UnknownDictionaryBuilder.cs
+++ b/src/Lucene.Net.Analysis.Kuromoji/Tools/UnknownDictionaryBuilder.cs
@@ -1,5 +1,5 @@
-using Lucene.Net.Analysis.Ja.Dict;
-using Lucene.Net.Support;
+using J2N.Text;
+using Lucene.Net.Analysis.Ja.Dict;
 using System;
 using System.Collections.Generic;
 using System.Globalization;
diff --git a/src/Lucene.Net.Analysis.Morfologik/Morfologik/MorfologikFilter.cs b/src/Lucene.Net.Analysis.Morfologik/Morfologik/MorfologikFilter.cs
index 5562c8d..238c88d 100644
--- a/src/Lucene.Net.Analysis.Morfologik/Morfologik/MorfologikFilter.cs
+++ b/src/Lucene.Net.Analysis.Morfologik/Morfologik/MorfologikFilter.cs
@@ -1,4 +1,5 @@
 // Lucene version compatibility level 8.2.0
+using J2N;
 using Lucene.Net.Analysis.Morfologik.TokenAttributes;
 using Lucene.Net.Analysis.TokenAttributes;
 using Lucene.Net.Support;
diff --git a/src/Lucene.Net.Analysis.Phonetic/BeiderMorseFilter.cs b/src/Lucene.Net.Analysis.Phonetic/BeiderMorseFilter.cs
index b343994..dc9cef7 100644
--- a/src/Lucene.Net.Analysis.Phonetic/BeiderMorseFilter.cs
+++ b/src/Lucene.Net.Analysis.Phonetic/BeiderMorseFilter.cs
@@ -89,8 +89,8 @@ namespace Lucene.Net.Analysis.Phonetic
                 RestoreState(state);
 
                 int start = matcher.Index;
-                int end = start + matcher.Length;
-                termAtt.SetEmpty().Append(encoded, start, end);
+                //int end = start + matcher.Length;
+                termAtt.SetEmpty().Append(encoded, start, matcher.Length); // LUCENENET: Corrected 3rd parameter
                 posIncAtt.PositionIncrement = 0;
                 return true;
             }
@@ -106,8 +106,8 @@ namespace Lucene.Net.Analysis.Phonetic
                 if (matcher.Success)
                 {
                     int start = matcher.Index;
-                    int end = start + matcher.Length;
-                    termAtt.SetEmpty().Append(encoded, start, end);
+                    //int end = start + matcher.Length;
+                    termAtt.SetEmpty().Append(encoded, start, matcher.Length); // LUCENENET: Corrected 3rd parameter
                 }
                 return true;
             }
diff --git a/src/Lucene.Net.Analysis.Phonetic/Language/Bm/Lang.cs b/src/Lucene.Net.Analysis.Phonetic/Language/Bm/Lang.cs
index e1cec25..95cab22 100644
--- a/src/Lucene.Net.Analysis.Phonetic/Language/Bm/Lang.cs
+++ b/src/Lucene.Net.Analysis.Phonetic/Language/Bm/Lang.cs
@@ -1,11 +1,13 @@
 // commons-codec version compatibility level: 1.9
+using J2N.Collections.Generic.Extensions;
+using J2N.Text;
 using Lucene.Net.Support;
 using System;
 using System.Collections.Generic;
-using System.Globalization;
 using System.IO;
 using System.Reflection;
 using System.Text.RegularExpressions;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Analysis.Phonetic.Language.Bm
 {
@@ -205,7 +207,7 @@ namespace Lucene.Net.Analysis.Phonetic.Language.Bm
                             string[] langs = TOKEN.Split(parts[1]).TrimEnd();
                             bool accept = parts[2].Equals("true", StringComparison.Ordinal);
 
-                            rules.Add(new LangRule(pattern, new HashSet<string>(langs), accept));
+                            rules.Add(new LangRule(pattern, new JCG.HashSet<string>(langs), accept));
                         }
                     }
                 }
@@ -218,7 +220,7 @@ namespace Lucene.Net.Analysis.Phonetic.Language.Bm
 
         private Lang(IList<LangRule> rules, Languages languages)
         {
-            this.rules = Collections.UnmodifiableList(rules);
+            this.rules = rules.AsReadOnly();
             this.languages = languages;
         }
 
@@ -242,7 +244,7 @@ namespace Lucene.Net.Analysis.Phonetic.Language.Bm
         {
             string text = input.ToLowerInvariant();
 
-            ISet<string> langs = new HashSet<string>(this.languages.GetLanguages());
+            ISet<string> langs = new JCG.HashSet<string>(this.languages.GetLanguages());
             foreach (LangRule rule in this.rules)
             {
                 if (rule.Matches(text))
diff --git a/src/Lucene.Net.Analysis.Phonetic/Language/Bm/Languages.cs b/src/Lucene.Net.Analysis.Phonetic/Language/Bm/Languages.cs
index 201a245..d01f343 100644
--- a/src/Lucene.Net.Analysis.Phonetic/Language/Bm/Languages.cs
+++ b/src/Lucene.Net.Analysis.Phonetic/Language/Bm/Languages.cs
@@ -1,10 +1,12 @@
 // commons-codec version compatibility level: 1.9
+using J2N.Collections.Generic.Extensions;
 using Lucene.Net.Support;
 using System;
 using System.Collections.Generic;
 using System.IO;
 using System.Linq;
 using System.Reflection;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Analysis.Phonetic.Language.Bm
 {
@@ -89,7 +91,7 @@ namespace Lucene.Net.Analysis.Phonetic.Language.Bm
         public static Languages GetInstance(string languagesResourceName)
         {
             // read languages list
-            ISet<string> ls = new HashSet<string>();
+            ISet<string> ls = new JCG.HashSet<string>();
             Stream langIS = typeof(Languages).GetTypeInfo().Assembly.FindAndGetManifestResourceStream(typeof(Languages), languagesResourceName);
 
             if (langIS == null)
@@ -125,7 +127,7 @@ namespace Lucene.Net.Analysis.Phonetic.Language.Bm
                 }
             }
 
-            return new Languages(Collections.UnmodifiableSet(ls));
+            return new Languages(ls.AsReadOnly());
         }
 
         private static string LangResourceName(NameType nameType)
@@ -265,7 +267,7 @@ namespace Lucene.Net.Analysis.Phonetic.Language.Bm
 
         internal SomeLanguages(ISet<string> languages)
         {
-            this.languages = Collections.UnmodifiableSet(languages);
+            this.languages = languages.AsReadOnly();
         }
 
         public override bool Contains(string language)
@@ -306,7 +308,7 @@ namespace Lucene.Net.Analysis.Phonetic.Language.Bm
             else
             {
                 SomeLanguages sl = (SomeLanguages)other;
-                ISet<string> ls = new HashSet<string>(/*Math.Min(languages.Count, sl.languages.Count)*/);
+                ISet<string> ls = new JCG.HashSet<string>(Math.Min(languages.Count, sl.languages.Count));
                 foreach (string lang in languages)
                 {
                     if (sl.languages.Contains(lang))
diff --git a/src/Lucene.Net.Analysis.Phonetic/Language/Bm/PhoneticEngine.cs b/src/Lucene.Net.Analysis.Phonetic/Language/Bm/PhoneticEngine.cs
index 35305b7..8223c86 100644
--- a/src/Lucene.Net.Analysis.Phonetic/Language/Bm/PhoneticEngine.cs
+++ b/src/Lucene.Net.Analysis.Phonetic/Language/Bm/PhoneticEngine.cs
@@ -1,11 +1,14 @@
 // commons-codec version compatibility level: 1.9
+using J2N.Collections.Generic.Extensions;
+using J2N.Text;
 using Lucene.Net.Support;
 using System;
 using System.Collections.Generic;
-using System.Globalization;
 using System.Linq;
 using System.Text;
 using System.Text.RegularExpressions;
+using JCG = J2N.Collections.Generic;
+
 
 namespace Lucene.Net.Analysis.Phonetic.Language.Bm
 {
@@ -284,17 +287,11 @@ namespace Lucene.Net.Analysis.Phonetic.Language.Bm
         private static IDictionary<NameType, ISet<string>> LoadNamePrefixes() // LUCENENET: Avoid static constructors (see https://github.com/apache/lucenenet/pull/224#issuecomment-469284006)
         {
             var namePrefixes = new Dictionary<NameType, ISet<string>>();
-            namePrefixes[NameType.ASHKENAZI] =
-                    Collections.UnmodifiableSet(
-                            new HashSet<string>() { "bar", "ben", "da", "de", "van", "von" });
-            namePrefixes[NameType.SEPHARDIC] =
-                    Collections.UnmodifiableSet(
-                            new HashSet<string>() { "al", "el", "da", "dal", "de", "del", "dela", "de la",
-                                                              "della", "des", "di", "do", "dos", "du", "van", "von" });
-            namePrefixes[NameType.GENERIC] =
-                    Collections.UnmodifiableSet(
-                            new HashSet<string>() { "da", "dal", "de", "del", "dela", "de la", "della",
-                                                          "des", "di", "do", "dos", "du", "van", "von" });
+            namePrefixes[NameType.ASHKENAZI] = new JCG.HashSet<string>() { "bar", "ben", "da", "de", "van", "von" }.AsReadOnly();
+            namePrefixes[NameType.SEPHARDIC] = new JCG.HashSet<string>() { "al", "el", "da", "dal", "de", "del", "dela", "de la",
+                                                              "della", "des", "di", "do", "dos", "du", "van", "von" }.AsReadOnly();
+            namePrefixes[NameType.GENERIC] = new JCG.HashSet<string>() { "da", "dal", "de", "del", "dela", "de la", "della",
+                                                          "des", "di", "do", "dos", "du", "van", "von" }.AsReadOnly();
             return namePrefixes;
         }
 
@@ -387,7 +384,7 @@ namespace Lucene.Net.Analysis.Phonetic.Language.Bm
                 return phonemeBuilder;
             }
 
-            ISet<Phoneme> phonemes = new SortedSet<Phoneme>(Phoneme.COMPARER);
+            ISet<Phoneme> phonemes = new JCG.SortedSet<Phoneme>(Phoneme.COMPARER);
 
             foreach (Phoneme phoneme in phonemeBuilder.Phonemes)
             {
diff --git a/src/Lucene.Net.Analysis.Phonetic/Language/Bm/Rule.cs b/src/Lucene.Net.Analysis.Phonetic/Language/Bm/Rule.cs
index 2d240f0..9506f2a 100644
--- a/src/Lucene.Net.Analysis.Phonetic/Language/Bm/Rule.cs
+++ b/src/Lucene.Net.Analysis.Phonetic/Language/Bm/Rule.cs
@@ -1,4 +1,6 @@
 // commons-codec version compatibility level: 1.9
+using J2N.Collections.Generic.Extensions;
+using J2N.Text;
 using Lucene.Net.Support;
 using System;
 using System.Collections.Generic;
@@ -6,6 +8,7 @@ using System.IO;
 using System.Reflection;
 using System.Text;
 using System.Text.RegularExpressions;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Analysis.Phonetic.Language.Bm
 {
@@ -146,10 +149,10 @@ namespace Lucene.Net.Analysis.Phonetic.Language.Bm
                         rs["common"] = ParseRules(CreateScanner(s, rt, "common"), CreateResourceName(s, rt, "common"));
                     }
 
-                    rts[rt] = Collections.UnmodifiableMap(rs);
+                    rts[rt] = rs.AsReadOnly();
                 }
 
-                rules[s] = Collections.UnmodifiableMap(rts);
+                rules[s] = rts.AsReadOnly();
             }
             return rules;
         }
@@ -296,7 +299,7 @@ namespace Lucene.Net.Analysis.Phonetic.Language.Bm
         /// <returns>A list of <see cref="Rule"/>s that apply.</returns>
         public static IList<Rule> GetInstance(NameType nameType, RuleType rt, string lang)
         {
-            return GetInstance(nameType, rt, LanguageSet.From(new HashSet<string>() { lang }));
+            return GetInstance(nameType, rt, LanguageSet.From(new JCG.HashSet<string>() { lang }));
         }
 
         /// <summary>
@@ -356,7 +359,7 @@ namespace Lucene.Net.Analysis.Phonetic.Language.Bm
                 }
                 string before = ph.Substring(0, open - 0);
                 string input = ph.Substring(open + 1, (ph.Length - 1) - (open + 1));
-                ISet<string> langs = new HashSet<string>(PLUS.Split(input).TrimEnd());
+                ISet<string> langs = new JCG.HashSet<string>(PLUS.Split(input).TrimEnd());
 
                 return new Phoneme(before, LanguageSet.From(langs));
             }
@@ -419,7 +422,7 @@ namespace Lucene.Net.Analysis.Phonetic.Language.Bm
 
         private static IDictionary<string, IList<Rule>> ParseRules(TextReader reader, string location)
         {
-            IDictionary<string, IList<Rule>> lines = new HashMap<string, IList<Rule>>();
+            IDictionary<string, IList<Rule>> lines = new JCG.Dictionary<string, IList<Rule>>();
             int currentLine = 0;
 
             bool inMultilineComment = false;
@@ -858,15 +861,15 @@ namespace Lucene.Net.Analysis.Phonetic.Language.Bm
 
             // evaluate the pattern, left context and right context
             // fail early if any of the evaluations is not successful
-            if (!input.SubSequence(i, ipl).Equals(this.pattern))
+            if (!input.Subsequence(i, ipl - i).Equals(this.pattern)) // LUCENENET: Corrected 2nd Subseqence parameter
             {
                 return false;
             }
-            else if (!this.rContext.IsMatch(input.SubSequence(ipl, input.Length)))
+            else if (!this.rContext.IsMatch(input.Subsequence(ipl, input.Length - ipl))) // LUCENENET: Corrected 2nd Subseqence parameter
             {
                 return false;
             }
-            return this.lContext.IsMatch(input.SubSequence(0, i));
+            return this.lContext.IsMatch(input.Subsequence(0, i - 0)); // LUCENENET: Corrected 2nd Subseqence parameter
         }
 
         /// <summary>
diff --git a/src/Lucene.Net.Analysis.Phonetic/Language/DaitchMokotoffSoundex.cs b/src/Lucene.Net.Analysis.Phonetic/Language/DaitchMokotoffSoundex.cs
index 1e51694..883e274 100644
--- a/src/Lucene.Net.Analysis.Phonetic/Language/DaitchMokotoffSoundex.cs
+++ b/src/Lucene.Net.Analysis.Phonetic/Language/DaitchMokotoffSoundex.cs
@@ -1,4 +1,5 @@
 // commons-codec version compatibility level: 1.10
+using J2N.Text;
 using Lucene.Net.Support;
 using System;
 using System.Collections.Generic;
diff --git a/src/Lucene.Net.Analysis.SmartCn/Hhmm/BigramDictionary.cs b/src/Lucene.Net.Analysis.SmartCn/Hhmm/BigramDictionary.cs
index 3885813..0b2eab1 100644
--- a/src/Lucene.Net.Analysis.SmartCn/Hhmm/BigramDictionary.cs
+++ b/src/Lucene.Net.Analysis.SmartCn/Hhmm/BigramDictionary.cs
@@ -1,7 +1,6 @@
 // lucene version compatibility level: 4.8.1
 using J2N.IO;
 using Lucene.Net.Support;
-using Lucene.Net.Support.IO;
 using System;
 using System.IO;
 using System.Reflection;
diff --git a/src/Lucene.Net.Analysis.SmartCn/Hhmm/WordDictionary.cs b/src/Lucene.Net.Analysis.SmartCn/Hhmm/WordDictionary.cs
index 2c237a7..517cc9d 100644
--- a/src/Lucene.Net.Analysis.SmartCn/Hhmm/WordDictionary.cs
+++ b/src/Lucene.Net.Analysis.SmartCn/Hhmm/WordDictionary.cs
@@ -1,7 +1,6 @@
 // lucene version compatibility level: 4.8.1
 using J2N.IO;
 using Lucene.Net.Support;
-using Lucene.Net.Support.IO;
 using System;
 using System.IO;
 using System.Reflection;
diff --git a/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Row.cs b/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Row.cs
index 88b0e13..e20b9a2 100644
--- a/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Row.cs
+++ b/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Row.cs
@@ -1,6 +1,7 @@
 using J2N.IO;
 using System.Collections.Generic;
 using System.IO;
+using JCG = J2N.Collections.Generic;
 
 /*
                     Egothor Software License version 1.00
@@ -64,7 +65,7 @@ namespace Egothor.Stemmer
     /// </summary>
     public class Row
     {
-        internal SortedDictionary<char, Cell> cells = new SortedDictionary<char, Cell>();
+        internal IDictionary<char, Cell> cells = new JCG.SortedDictionary<char, Cell>();
         internal int uniformCnt = 0;
         internal int uniformSkip = 0;
 
diff --git a/src/Lucene.Net.Benchmark/ByTask/Feeds/ContentItemsSource.cs b/src/Lucene.Net.Benchmark/ByTask/Feeds/ContentItemsSource.cs
index 84a1d7c..f6e7de5 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Feeds/ContentItemsSource.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Feeds/ContentItemsSource.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Benchmarks.ByTask.Utils;
+using J2N.Text;
+using Lucene.Net.Benchmarks.ByTask.Utils;
 using Lucene.Net.Support;
 using System;
 using System.Collections.Generic;
diff --git a/src/Lucene.Net.Benchmark/ByTask/Feeds/DemoHTMLParser.cs b/src/Lucene.Net.Benchmark/ByTask/Feeds/DemoHTMLParser.cs
index 2ee6184..567dae6 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Feeds/DemoHTMLParser.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Feeds/DemoHTMLParser.cs
@@ -1,12 +1,13 @@
 // LUCENENET TODO: Use HTML Agility pack instead of SAX ?
 
-using Lucene.Net.Support;
+using J2N.Collections.Generic.Extensions;
 using Sax;
 using Sax.Helpers;
 using System;
 using System.Collections.Generic;
 using System.IO;
 using System.Text;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Benchmarks.ByTask.Feeds
 {
@@ -204,7 +205,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds
 
             private static ISet<string> CreateElementNameSet(params string[] names)
             {
-                return Collections.UnmodifiableSet(new HashSet<string>(names));
+                return new JCG.HashSet<string>(names).AsReadOnly();
             }
 
             /// <summary>HTML elements that cause a line break (they are block-elements).</summary>
diff --git a/src/Lucene.Net.Benchmark/ByTask/Feeds/DirContentSource.cs b/src/Lucene.Net.Benchmark/ByTask/Feeds/DirContentSource.cs
index a939c1b..cc5f57c 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Feeds/DirContentSource.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Feeds/DirContentSource.cs
@@ -1,5 +1,5 @@
-using Lucene.Net.Benchmarks.ByTask.Utils;
-using Lucene.Net.Support;
+using J2N.Text;
+using Lucene.Net.Benchmarks.ByTask.Utils;
 using Lucene.Net.Support.IO;
 using System;
 using System.Collections;
diff --git a/src/Lucene.Net.Benchmark/ByTask/Feeds/LineDocSource.cs b/src/Lucene.Net.Benchmark/ByTask/Feeds/LineDocSource.cs
index 8829b75..219beb6 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Feeds/LineDocSource.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Feeds/LineDocSource.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Benchmarks.ByTask.Tasks;
+using J2N.Text;
+using Lucene.Net.Benchmarks.ByTask.Tasks;
 using Lucene.Net.Benchmarks.ByTask.Utils;
 using Lucene.Net.Support;
 using System;
diff --git a/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecContentSource.cs b/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecContentSource.cs
index d34f235..324cf48 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecContentSource.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecContentSource.cs
@@ -1,5 +1,5 @@
-using Lucene.Net.Benchmarks.ByTask.Utils;
-using Lucene.Net.Support;
+using J2N.Text;
+using Lucene.Net.Benchmarks.ByTask.Utils;
 using System;
 using System.Collections.Generic;
 using System.Globalization;
@@ -269,7 +269,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds
                 docBuf.Length = 0;
                 Read(docBuf, DOCNO, true, false);
                 name = docBuf.ToString(DOCNO.Length, docBuf.IndexOf(TERMINATING_DOCNO,
-                    DOCNO.Length) - DOCNO.Length).Trim();
+                    DOCNO.Length, StringComparison.Ordinal) - DOCNO.Length).Trim();
 
                 if (!excludeDocnameIteration)
                 {
diff --git a/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecDocParser.cs b/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecDocParser.cs
index 1cf6e00..7f42706 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecDocParser.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecDocParser.cs
@@ -1,4 +1,4 @@
-using Lucene.Net.Support;
+using J2N.Text;
 using System;
 using System.Collections.Generic;
 using System.IO;
@@ -131,18 +131,18 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds
         /// <returns></returns>
         public static string Extract(StringBuilder buf, string startTag, string endTag, int maxPos, string[] noisePrefixes)
         {
-            int k1 = buf.IndexOf(startTag);
+            int k1 = buf.IndexOf(startTag, StringComparison.Ordinal);
             if (k1 >= 0 && (maxPos < 0 || k1 < maxPos))
             {
                 k1 += startTag.Length;
-                int k2 = buf.IndexOf(endTag, k1);
+                int k2 = buf.IndexOf(endTag, k1, StringComparison.Ordinal);
                 if (k2 >= 0 && (maxPos < 0 || k2 < maxPos))
                 { // found end tag with allowed range
                     if (noisePrefixes != null)
                     {
                         foreach (string noise in noisePrefixes)
                         {
-                            int k1a = buf.IndexOf(noise, k1);
+                            int k1a = buf.IndexOf(noise, k1, StringComparison.Ordinal);
                             if (k1a >= 0 && k1a < k2)
                             {
                                 k1 = k1a + noise.Length;
diff --git a/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecFBISParser.cs b/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecFBISParser.cs
index cf321cc..530ccda 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecFBISParser.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecFBISParser.cs
@@ -1,4 +1,4 @@
-using Lucene.Net.Support;
+using J2N.Text;
 using System;
 using System.Text;
 
@@ -43,10 +43,10 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds
                           // optionally skip some of the text, set date, title
             DateTime? date = null;
             string title = null;
-            int h1 = docBuf.IndexOf(HEADER);
+            int h1 = docBuf.IndexOf(HEADER, StringComparison.Ordinal);
             if (h1 >= 0)
             {
-                int h2 = docBuf.IndexOf(HEADER_END, h1);
+                int h2 = docBuf.IndexOf(HEADER_END, h1, StringComparison.Ordinal);
                 mark = h2 + HEADER_END_LENGTH;
                 // date...
                 string dateStr = Extract(docBuf, DATE1, DATE1_END, h2, null);
diff --git a/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecFR94Parser.cs b/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecFR94Parser.cs
index 72f99bb..d4a022d 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecFR94Parser.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecFR94Parser.cs
@@ -1,4 +1,4 @@
-using Lucene.Net.Support;
+using J2N.Text;
 using System;
 using System.Text;
 
@@ -46,10 +46,10 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds
             int mark = 0; // that much is skipped
                           // optionally skip some of the text, set date (no title?)
             DateTime? date = null;
-            int h1 = docBuf.IndexOf(TEXT);
+            int h1 = docBuf.IndexOf(TEXT, StringComparison.Ordinal);
             if (h1 >= 0)
             {
-                int h2 = docBuf.IndexOf(TEXT_END, h1);
+                int h2 = docBuf.IndexOf(TEXT_END, h1, StringComparison.Ordinal);
                 mark = h1 + TEXT_LENGTH;
                 // date...
                 string dateStr = Extract(docBuf, DATE, DATE_END, h2, DATE_NOISE_PREFIXES);
diff --git a/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecGov2Parser.cs b/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecGov2Parser.cs
index 12912e9..595b566 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecGov2Parser.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecGov2Parser.cs
@@ -1,4 +1,4 @@
-using Lucene.Net.Support;
+using J2N.Text;
 using System;
 using System.IO;
 using System.Text;
@@ -39,10 +39,10 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds
             // skip some of the non-html text, optionally set date
             DateTime? date = null;
             int start = 0;
-            int h1 = docBuf.IndexOf(DOCHDR);
+            int h1 = docBuf.IndexOf(DOCHDR, StringComparison.Ordinal);
             if (h1 >= 0)
             {
-                int h2 = docBuf.IndexOf(TERMINATING_DOCHDR, h1);
+                int h2 = docBuf.IndexOf(TERMINATING_DOCHDR, h1, StringComparison.Ordinal);
                 string dateStr = Extract(docBuf, DATE, DATE_END, h2, null);
                 if (dateStr != null)
                 {
diff --git a/src/Lucene.Net.Benchmark/ByTask/Tasks/OpenReaderTask.cs b/src/Lucene.Net.Benchmark/ByTask/Tasks/OpenReaderTask.cs
index 7ef8705..3cfc3c9 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Tasks/OpenReaderTask.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Tasks/OpenReaderTask.cs
@@ -1,5 +1,5 @@
-using Lucene.Net.Index;
-using Lucene.Net.Support;
+using J2N.Text;
+using Lucene.Net.Index;
 using System;
 using System.Collections.Generic;
 using System.IO;
diff --git a/src/Lucene.Net.Benchmark/ByTask/Tasks/ReadTask.cs b/src/Lucene.Net.Benchmark/ByTask/Tasks/ReadTask.cs
index 444db69..1e95fa1 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Tasks/ReadTask.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Tasks/ReadTask.cs
@@ -7,6 +7,7 @@ using Lucene.Net.Store;
 using Lucene.Net.Util;
 using System;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 using Console = Lucene.Net.Support.SystemConsole;
 
 namespace Lucene.Net.Benchmarks.ByTask.Tasks
@@ -328,7 +329,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Tasks
         protected virtual ICollection<string> GetFieldsToHighlight(Document document)
         {
             IList<IIndexableField> fields = document.Fields;
-            ISet<string> result = new HashSet<string>(/*fields.Count*/);
+            ISet<string> result = new JCG.HashSet<string>(fields.Count);
             foreach (IIndexableField f in fields)
             {
                 result.Add(f.Name);
diff --git a/src/Lucene.Net.Benchmark/ByTask/Tasks/RepSumByNameRoundTask.cs b/src/Lucene.Net.Benchmark/ByTask/Tasks/RepSumByNameRoundTask.cs
index 7d5a30c..68901bd 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Tasks/RepSumByNameRoundTask.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Tasks/RepSumByNameRoundTask.cs
@@ -1,7 +1,7 @@
 using Lucene.Net.Benchmarks.ByTask.Stats;
-using Lucene.Net.Support;
 using System.Collections.Generic;
 using Console = Lucene.Net.Support.SystemConsole;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Benchmarks.ByTask.Tasks
 {
@@ -55,7 +55,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Tasks
         protected virtual Report ReportSumByNameRound(IList<TaskStats> taskStats)
         {
             // aggregate by task name and round
-            LinkedHashMap<string, TaskStats> p2 = new LinkedHashMap<string, TaskStats>();
+            JCG.LinkedDictionary<string, TaskStats> p2 = new JCG.LinkedDictionary<string, TaskStats>();
             int reported = 0;
             foreach (TaskStats stat1 in taskStats)
             {
diff --git a/src/Lucene.Net.Benchmark/ByTask/Tasks/RepSumByNameTask.cs b/src/Lucene.Net.Benchmark/ByTask/Tasks/RepSumByNameTask.cs
index 2afce47..1817539 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Tasks/RepSumByNameTask.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Tasks/RepSumByNameTask.cs
@@ -1,7 +1,7 @@
 using Lucene.Net.Benchmarks.ByTask.Stats;
-using Lucene.Net.Support;
 using System.Collections.Generic;
 using Console = Lucene.Net.Support.SystemConsole;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Benchmarks.ByTask.Tasks
 {
@@ -56,7 +56,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Tasks
         {
             // aggregate by task name
             int reported = 0;
-            LinkedHashMap<string, TaskStats> p2 = new LinkedHashMap<string, TaskStats>();
+            JCG.LinkedDictionary<string, TaskStats> p2 = new JCG.LinkedDictionary<string, TaskStats>();
             foreach (TaskStats stat1 in taskStats)
             {
                 if (stat1.Elapsed >= 0)
diff --git a/src/Lucene.Net.Benchmark/ByTask/Tasks/RepSumByPrefRoundTask.cs b/src/Lucene.Net.Benchmark/ByTask/Tasks/RepSumByPrefRoundTask.cs
index 2118a0f..1d2dac0 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Tasks/RepSumByPrefRoundTask.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Tasks/RepSumByPrefRoundTask.cs
@@ -1,8 +1,8 @@
 using Lucene.Net.Benchmarks.ByTask.Stats;
-using Lucene.Net.Support;
 using System;
 using System.Collections.Generic;
 using Console = Lucene.Net.Support.SystemConsole;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Benchmarks.ByTask.Tasks
 {
@@ -52,7 +52,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Tasks
         {
             // aggregate by task name and by round
             int reported = 0;
-            LinkedHashMap<string, TaskStats> p2 = new LinkedHashMap<string, TaskStats>();
+            JCG.LinkedDictionary<string, TaskStats> p2 = new JCG.LinkedDictionary<string, TaskStats>();
             foreach (TaskStats stat1 in taskStats)
             {
                 if (stat1.Elapsed >= 0 && stat1.Task.GetName().StartsWith(m_prefix, StringComparison.Ordinal))
diff --git a/src/Lucene.Net.Benchmark/ByTask/Tasks/RepSumByPrefTask.cs b/src/Lucene.Net.Benchmark/ByTask/Tasks/RepSumByPrefTask.cs
index 5e7eea6..02d3077 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Tasks/RepSumByPrefTask.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Tasks/RepSumByPrefTask.cs
@@ -1,8 +1,8 @@
 using Lucene.Net.Benchmarks.ByTask.Stats;
-using Lucene.Net.Support;
 using System;
 using System.Collections.Generic;
 using Console = Lucene.Net.Support.SystemConsole;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Benchmarks.ByTask.Tasks
 {
@@ -54,7 +54,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Tasks
         {
             // aggregate by task name
             int reported = 0;
-            LinkedHashMap<string, TaskStats> p2 = new LinkedHashMap<string, TaskStats>();
+            JCG.LinkedDictionary<string, TaskStats> p2 = new JCG.LinkedDictionary<string, TaskStats>();
             foreach (TaskStats stat1 in taskStats)
             {
                 if (stat1.Elapsed >= 0 && stat1.Task.GetName().StartsWith(m_prefix, StringComparison.Ordinal))
diff --git a/src/Lucene.Net.Benchmark/ByTask/Tasks/ReportTask.cs b/src/Lucene.Net.Benchmark/ByTask/Tasks/ReportTask.cs
index 0e83471..bd745be 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Tasks/ReportTask.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Tasks/ReportTask.cs
@@ -1,9 +1,9 @@
 using Lucene.Net.Benchmarks.ByTask.Stats;
 using Lucene.Net.Benchmarks.ByTask.Utils;
-using Lucene.Net.Support;
 using System;
 using System.Collections.Generic;
 using System.Text;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Benchmarks.ByTask.Tasks
 {
@@ -136,7 +136,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Tasks
             return sb.ToString();
         }
 
-        protected virtual Report GenPartialReport(int reported, LinkedHashMap<string, TaskStats> partOfTasks, int totalSize)
+        protected virtual Report GenPartialReport(int reported, JCG.LinkedDictionary<string, TaskStats> partOfTasks, int totalSize)
         {
             string longetOp = LongestOp(partOfTasks.Values);
             bool first = true;
diff --git a/src/Lucene.Net.Benchmark/ByTask/Tasks/SearchTravRetHighlightTask.cs b/src/Lucene.Net.Benchmark/ByTask/Tasks/SearchTravRetHighlightTask.cs
index 2d623b1..097b453 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Tasks/SearchTravRetHighlightTask.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Tasks/SearchTravRetHighlightTask.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Analysis;
+using J2N.Text;
+using Lucene.Net.Analysis;
 using Lucene.Net.Documents;
 using Lucene.Net.Index;
 using Lucene.Net.Search;
@@ -7,6 +8,7 @@ using Lucene.Net.Support;
 using System;
 using System.Collections.Generic;
 using System.Globalization;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Benchmarks.ByTask.Tasks
 {
@@ -63,7 +65,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Tasks
         protected int m_numToHighlight = int.MaxValue;
         protected bool m_mergeContiguous;
         protected int m_maxFrags = 2;
-        protected ISet<string> m_paramFields = new HashSet<string>();
+        protected ISet<string> m_paramFields = new JCG.HashSet<string>();
         protected Highlighter m_highlighter;
         protected int m_maxDocCharsToAnalyze;
 
@@ -172,7 +174,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Tasks
                 }
                 else if (splits[i].StartsWith("fields[", StringComparison.Ordinal) == true)
                 {
-                    m_paramFields = new HashSet<string>();
+                    m_paramFields = new JCG.HashSet<string>();
                     int len = "fields[".Length;
                     string fieldNames = splits[i].Substring(len, (splits[i].Length - 1) - len);
                     string[] fieldSplits = fieldNames.Split(';').TrimEnd();
diff --git a/src/Lucene.Net.Benchmark/ByTask/Tasks/SearchTravRetLoadFieldSelectorTask.cs b/src/Lucene.Net.Benchmark/ByTask/Tasks/SearchTravRetLoadFieldSelectorTask.cs
index 3be86f1..30b9172 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Tasks/SearchTravRetLoadFieldSelectorTask.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Tasks/SearchTravRetLoadFieldSelectorTask.cs
@@ -2,6 +2,7 @@
 using Lucene.Net.Documents;
 using Lucene.Net.Index;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Benchmarks.ByTask.Tasks
 {
@@ -67,7 +68,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Tasks
         public override void SetParams(string @params)
         {
             this.m_params = @params; // cannot just call super.setParams(), b/c it's params differ.
-            m_fieldsToLoad = new HashSet<string>();
+            m_fieldsToLoad = new JCG.HashSet<string>();
             for (StringTokenizer tokenizer = new StringTokenizer(@params, ","); tokenizer.MoveNext();)
             {
                 string s = tokenizer.Current;
diff --git a/src/Lucene.Net.Benchmark/ByTask/Tasks/SearchTravRetVectorHighlightTask.cs b/src/Lucene.Net.Benchmark/ByTask/Tasks/SearchTravRetVectorHighlightTask.cs
index 0725c65..8b73832 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Tasks/SearchTravRetVectorHighlightTask.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Tasks/SearchTravRetVectorHighlightTask.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Analysis;
+using J2N.Text;
+using Lucene.Net.Analysis;
 using Lucene.Net.Documents;
 using Lucene.Net.Index;
 using Lucene.Net.Search;
@@ -7,6 +8,7 @@ using Lucene.Net.Support;
 using System;
 using System.Collections.Generic;
 using System.Globalization;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Benchmarks.ByTask.Tasks
 {
@@ -61,7 +63,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Tasks
         protected int m_numToHighlight = int.MaxValue;
         protected int m_maxFrags = 2;
         protected int m_fragSize = 100;
-        protected ISet<string> m_paramFields = new HashSet<string>();
+        protected ISet<string> m_paramFields = new JCG.HashSet<string>();
         protected FastVectorHighlighter m_highlighter;
 
         public SearchTravRetVectorHighlightTask(PerfRunData runData)
@@ -175,7 +177,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Tasks
                 }
                 else if (splits[i].StartsWith("fields[", StringComparison.Ordinal) == true)
                 {
-                    m_paramFields = new HashSet<string>();
+                    m_paramFields = new JCG.HashSet<string>();
                     int len = "fields[".Length;
                     string fieldNames = splits[i].Substring(len, (splits[i].Length - 1) - len);
                     string[] fieldSplits = fieldNames.Split(';').TrimEnd();
diff --git a/src/Lucene.Net.Benchmark/ByTask/Tasks/SearchWithSortTask.cs b/src/Lucene.Net.Benchmark/ByTask/Tasks/SearchWithSortTask.cs
index 634f048..f1e96dd 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Tasks/SearchWithSortTask.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Tasks/SearchWithSortTask.cs
@@ -1,6 +1,6 @@
-using Lucene.Net.Benchmarks.ByTask.Feeds;
+using J2N.Text;
+using Lucene.Net.Benchmarks.ByTask.Feeds;
 using Lucene.Net.Search;
-using Lucene.Net.Support;
 using System;
 
 namespace Lucene.Net.Benchmarks.ByTask.Tasks
diff --git a/src/Lucene.Net.Benchmark/ByTask/Tasks/WriteLineDocTask.cs b/src/Lucene.Net.Benchmark/ByTask/Tasks/WriteLineDocTask.cs
index 5b5885c..98838ef 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Tasks/WriteLineDocTask.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Tasks/WriteLineDocTask.cs
@@ -1,8 +1,8 @@
-using Lucene.Net.Benchmarks.ByTask.Feeds;
+using J2N.Text;
+using Lucene.Net.Benchmarks.ByTask.Feeds;
 using Lucene.Net.Benchmarks.ByTask.Utils;
 using Lucene.Net.Documents;
 using Lucene.Net.Index;
-using Lucene.Net.Support;
 using System;
 using System.Collections.Generic;
 using System.Globalization;
@@ -10,6 +10,7 @@ using System.IO;
 using System.Text;
 using System.Text.RegularExpressions;
 using System.Threading;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Benchmarks.ByTask.Tasks
 {
@@ -129,7 +130,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Tasks
             else
             {
                 checkSufficientFields = true;
-                HashSet<string> sf = new HashSet<string>(suff.Split(',').TrimEnd());
+                ISet<string> sf = new JCG.HashSet<string>(suff.Split(',').TrimEnd());
                 for (int i = 0; i < fieldsToWrite.Length; i++)
                 {
                     if (sf.Contains(fieldsToWrite[i]))
diff --git a/src/Lucene.Net.Benchmark/ByTask/Utils/Algorithm.cs b/src/Lucene.Net.Benchmark/ByTask/Utils/Algorithm.cs
index 9e311ea..fb1261a 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Utils/Algorithm.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Utils/Algorithm.cs
@@ -1,4 +1,5 @@
 using J2N.IO;
+using J2N.Text;
 using Lucene.Net.Benchmarks.ByTask.Tasks;
 using Lucene.Net.Support;
 using System;
@@ -8,6 +9,7 @@ using System.IO;
 using System.Linq;
 using System.Reflection;
 using System.Text;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Benchmarks.ByTask.Utils
 {
@@ -362,7 +364,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Utils
             // it is only necessary for assemblies that are not
             // referenced by the host assembly.
 
-            HashSet<string> result = new HashSet<string>();
+            ISet<string> result = new JCG.HashSet<string>();
             string alts = config.Get("alt.tasks.packages", null);
             string dfltPkg = typeof(PerfTask).GetTypeInfo().Assembly.GetName().Name;
             string[] referencedAssemblies = AssemblyUtils.GetReferencedAssemblies().Select(a => a.GetName().Name).ToArray();
diff --git a/src/Lucene.Net.Benchmark/Quality/QualityQuery.cs b/src/Lucene.Net.Benchmark/Quality/QualityQuery.cs
index def489d4..93a5e53 100644
--- a/src/Lucene.Net.Benchmark/Quality/QualityQuery.cs
+++ b/src/Lucene.Net.Benchmark/Quality/QualityQuery.cs
@@ -1,4 +1,4 @@
-using Lucene.Net.Support;
+using J2N.Text;
 using System;
 using System.Collections.Generic;
 using System.Globalization;
diff --git a/src/Lucene.Net.Benchmark/Quality/Trec/QueryDriver.cs b/src/Lucene.Net.Benchmark/Quality/Trec/QueryDriver.cs
index 7fa8389..124b76e 100644
--- a/src/Lucene.Net.Benchmark/Quality/Trec/QueryDriver.cs
+++ b/src/Lucene.Net.Benchmark/Quality/Trec/QueryDriver.cs
@@ -7,6 +7,7 @@ using System.Collections.Generic;
 using System.IO;
 using System.Linq;
 using System.Text;
+using JCG = J2N.Collections.Generic;
 using Console = Lucene.Net.Support.SystemConsole;
 
 namespace Lucene.Net.Benchmarks.Quality.Trec
@@ -73,7 +74,7 @@ namespace Lucene.Net.Benchmarks.Quality.Trec
                 // validate topics & judgments match each other
                 judge.ValidateData(qqs, logger);
 
-                ISet<string> fieldSet = new HashSet<string>();
+                ISet<string> fieldSet = new JCG.HashSet<string>();
                 if (fieldSpec.IndexOf('T') >= 0) fieldSet.Add("title");
                 if (fieldSpec.IndexOf('D') >= 0) fieldSet.Add("description");
                 if (fieldSpec.IndexOf('N') >= 0) fieldSet.Add("narrative");
diff --git a/src/Lucene.Net.Benchmark/Quality/Trec/TrecJudge.cs b/src/Lucene.Net.Benchmark/Quality/Trec/TrecJudge.cs
index a191dd0..8da68d9 100644
--- a/src/Lucene.Net.Benchmark/Quality/Trec/TrecJudge.cs
+++ b/src/Lucene.Net.Benchmark/Quality/Trec/TrecJudge.cs
@@ -1,9 +1,9 @@
 using J2N.Text;
-using Lucene.Net.Support;
 using System;
 using System.Collections.Generic;
 using System.Diagnostics;
 using System.IO;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Benchmarks.Quality.Trec
 {
@@ -112,7 +112,7 @@ namespace Lucene.Net.Benchmarks.Quality.Trec
             internal QRelJudgement(string queryID)
             {
                 this.queryID = queryID;
-                relevantDocs = new HashMap<string, string>();
+                relevantDocs = new JCG.Dictionary<string, string>();
             }
 
             public virtual void AddRelevantDoc(string docName)
diff --git a/src/Lucene.Net.Benchmark/Quality/Trec/TrecTopicsReader.cs b/src/Lucene.Net.Benchmark/Quality/Trec/TrecTopicsReader.cs
index 158386f..5063313 100644
--- a/src/Lucene.Net.Benchmark/Quality/Trec/TrecTopicsReader.cs
+++ b/src/Lucene.Net.Benchmark/Quality/Trec/TrecTopicsReader.cs
@@ -1,4 +1,4 @@
-using Lucene.Net.Support;
+using J2N.Text;
 using System;
 using System.Collections.Generic;
 using System.IO;
@@ -74,11 +74,11 @@ namespace Lucene.Net.Benchmarks.Quality.Trec
                     IDictionary<string, string> fields = new Dictionary<string, string>();
                     // id
                     sb = Read(reader, "<num>", null, true, false);
-                    int k = sb.IndexOf(":");
+                    int k = sb.IndexOf(":", StringComparison.Ordinal);
                     string id = sb.ToString(k + 1, sb.Length - (k + 1)).Trim();
                     // title
                     sb = Read(reader, "<title>", null, true, false);
-                    k = sb.IndexOf(">");
+                    k = sb.IndexOf(">", StringComparison.Ordinal);
                     string title = sb.ToString(k + 1, sb.Length - (k + 1)).Trim();
                     // description
                     Read(reader, "<desc>", null, false, false);
diff --git a/src/Lucene.Net.Codecs/BlockTerms/BlockTermsReader.cs b/src/Lucene.Net.Codecs/BlockTerms/BlockTermsReader.cs
index fc848b5..24e1b8e 100644
--- a/src/Lucene.Net.Codecs/BlockTerms/BlockTermsReader.cs
+++ b/src/Lucene.Net.Codecs/BlockTerms/BlockTermsReader.cs
@@ -5,6 +5,7 @@ using Lucene.Net.Util;
 using System;
 using System.Collections.Generic;
 using System.Diagnostics;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Codecs.BlockTerms
 {
@@ -48,7 +49,7 @@ namespace Lucene.Net.Codecs.BlockTerms
         // produce DocsEnum on demand
         private readonly PostingsReaderBase postingsReader;
 
-        private readonly IDictionary<string, FieldReader> fields = new SortedDictionary<string, FieldReader>(StringComparer.Ordinal);
+        private readonly IDictionary<string, FieldReader> fields = new JCG.SortedDictionary<string, FieldReader>(StringComparer.Ordinal);
 
         // Reads the terms index
         private TermsIndexReaderBase indexReader;
diff --git a/src/Lucene.Net.Codecs/BlockTerms/BlockTermsWriter.cs b/src/Lucene.Net.Codecs/BlockTerms/BlockTermsWriter.cs
index de61035..905b978 100644
--- a/src/Lucene.Net.Codecs/BlockTerms/BlockTermsWriter.cs
+++ b/src/Lucene.Net.Codecs/BlockTerms/BlockTermsWriter.cs
@@ -1,6 +1,6 @@
+using J2N.Text;
 using Lucene.Net.Index;
 using Lucene.Net.Store;
-using Lucene.Net.Support;
 using Lucene.Net.Util;
 using System;
 using System.Collections.Generic;
diff --git a/src/Lucene.Net.Codecs/BlockTerms/FixedGapTermsIndexReader.cs b/src/Lucene.Net.Codecs/BlockTerms/FixedGapTermsIndexReader.cs
index 35b721e..b33b0e8 100644
--- a/src/Lucene.Net.Codecs/BlockTerms/FixedGapTermsIndexReader.cs
+++ b/src/Lucene.Net.Codecs/BlockTerms/FixedGapTermsIndexReader.cs
@@ -3,10 +3,8 @@ using Lucene.Net.Store;
 using Lucene.Net.Support;
 using Lucene.Net.Util;
 using Lucene.Net.Util.Packed;
-using System;
 using System.Collections.Generic;
 using System.Diagnostics;
-using System.Linq;
 
 namespace Lucene.Net.Codecs.BlockTerms
 {
diff --git a/src/Lucene.Net.Codecs/Bloom/BloomFilteringPostingsFormat.cs b/src/Lucene.Net.Codecs/Bloom/BloomFilteringPostingsFormat.cs
index 1b3bdda..3df399c 100644
--- a/src/Lucene.Net.Codecs/Bloom/BloomFilteringPostingsFormat.cs
+++ b/src/Lucene.Net.Codecs/Bloom/BloomFilteringPostingsFormat.cs
@@ -1,12 +1,12 @@
 using Lucene.Net.Index;
 using Lucene.Net.Store;
-using Lucene.Net.Support;
 using Lucene.Net.Util;
 using Lucene.Net.Util.Automaton;
 using System;
 using System.Collections.Generic;
 using System.Diagnostics;
 using System.Linq;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Codecs.Bloom
 {
@@ -128,7 +128,7 @@ namespace Lucene.Net.Codecs.Bloom
         {
             private readonly BloomFilteringPostingsFormat outerInstance;
             private readonly FieldsProducer _delegateFieldsProducer;
-            private readonly HashMap<string, FuzzySet> _bloomsByFieldName = new HashMap<string, FuzzySet>();
+            private readonly JCG.Dictionary<string, FuzzySet> _bloomsByFieldName = new JCG.Dictionary<string, FuzzySet>();
 
             public BloomFilteredFieldsProducer(BloomFilteringPostingsFormat outerInstance, SegmentReadState state)
             {
diff --git a/src/Lucene.Net.Codecs/Memory/DirectPostingsFormat.cs b/src/Lucene.Net.Codecs/Memory/DirectPostingsFormat.cs
index 084c733..f623855 100644
--- a/src/Lucene.Net.Codecs/Memory/DirectPostingsFormat.cs
+++ b/src/Lucene.Net.Codecs/Memory/DirectPostingsFormat.cs
@@ -5,6 +5,7 @@ using System.Collections.Generic;
 using System.Diagnostics;
 using System.Diagnostics.CodeAnalysis;
 using System.Linq;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Codecs.Memory
 {
@@ -134,7 +135,7 @@ namespace Lucene.Net.Codecs.Memory
         private sealed class DirectFields : FieldsProducer
         {
             // LUCENENET specific: Use StringComparer.Ordinal to get the same ordering as Java
-            private readonly IDictionary<string, DirectField> fields = new SortedDictionary<string, DirectField>(StringComparer.Ordinal);
+            private readonly IDictionary<string, DirectField> fields = new JCG.SortedDictionary<string, DirectField>(StringComparer.Ordinal);
 
             public DirectFields(SegmentReadState state, Fields fields, int minSkipCount, int lowFreqCutoff)
             {
diff --git a/src/Lucene.Net.Codecs/Memory/FSTOrdTermsReader.cs b/src/Lucene.Net.Codecs/Memory/FSTOrdTermsReader.cs
index 2f68422..97c261a 100644
--- a/src/Lucene.Net.Codecs/Memory/FSTOrdTermsReader.cs
+++ b/src/Lucene.Net.Codecs/Memory/FSTOrdTermsReader.cs
@@ -8,6 +8,7 @@ using System;
 using System.Collections;
 using System.Collections.Generic;
 using System.Diagnostics;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Codecs.Memory
 {
@@ -42,7 +43,7 @@ namespace Lucene.Net.Codecs.Memory
         private const int INTERVAL = FSTOrdTermsWriter.SKIP_INTERVAL;
 
         // LUCENENET specific: Use StringComparer.Ordinal to get the same ordering as Java
-        private readonly SortedDictionary<string, TermsReader> fields = new SortedDictionary<string, TermsReader>(StringComparer.Ordinal);
+        private readonly IDictionary<string, TermsReader> fields = new JCG.SortedDictionary<string, TermsReader>(StringComparer.Ordinal);
         private readonly PostingsReaderBase postingsReader;
         private int version;
         //static final boolean TEST = false;
diff --git a/src/Lucene.Net.Codecs/Memory/FSTTermsReader.cs b/src/Lucene.Net.Codecs/Memory/FSTTermsReader.cs
index 4caf087..c19e763 100644
--- a/src/Lucene.Net.Codecs/Memory/FSTTermsReader.cs
+++ b/src/Lucene.Net.Codecs/Memory/FSTTermsReader.cs
@@ -5,6 +5,7 @@ using System;
 using System.Collections;
 using System.Collections.Generic;
 using System.Diagnostics;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Codecs.Memory
 {
@@ -59,7 +60,7 @@ namespace Lucene.Net.Codecs.Memory
     public class FSTTermsReader : FieldsProducer
     {
         // LUCENENET specific: Use StringComparer.Ordinal to get the same ordering as Java
-        private readonly SortedDictionary<string, TermsReader> fields = new SortedDictionary<string, TermsReader>(StringComparer.Ordinal);
+        private readonly IDictionary<string, TermsReader> fields = new JCG.SortedDictionary<string, TermsReader>(StringComparer.Ordinal);
         private readonly PostingsReaderBase postingsReader;
         //static boolean TEST = false;
         private readonly int version;
diff --git a/src/Lucene.Net.Codecs/Memory/MemoryDocValuesConsumer.cs b/src/Lucene.Net.Codecs/Memory/MemoryDocValuesConsumer.cs
index 447d7f3..a3b23a4 100644
--- a/src/Lucene.Net.Codecs/Memory/MemoryDocValuesConsumer.cs
+++ b/src/Lucene.Net.Codecs/Memory/MemoryDocValuesConsumer.cs
@@ -4,6 +4,7 @@ using System.Collections;
 using System.Collections.Generic;
 using System.Diagnostics;
 using System.Linq;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Codecs.Memory
 {
@@ -92,10 +93,10 @@ namespace Lucene.Net.Codecs.Memory
             long gcd = 0;
             bool missing = false;
             // TODO: more efficient?
-            HashSet<long?> uniqueValues = null;
+            ISet<long?> uniqueValues = null;
             if (optimizeStorage)
             {
-                uniqueValues = new HashSet<long?>();
+                uniqueValues = new JCG.HashSet<long?>();
 
                 long count = 0;
                 foreach (var nv in values)
diff --git a/src/Lucene.Net.Codecs/Memory/MemoryPostingsFormat.cs b/src/Lucene.Net.Codecs/Memory/MemoryPostingsFormat.cs
index 0786f5d..086258a 100644
--- a/src/Lucene.Net.Codecs/Memory/MemoryPostingsFormat.cs
+++ b/src/Lucene.Net.Codecs/Memory/MemoryPostingsFormat.cs
@@ -3,6 +3,7 @@ using Lucene.Net.Util.Fst;
 using System;
 using System.Collections.Generic;
 using System.Diagnostics;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Codecs.Memory
 {
@@ -1034,7 +1035,7 @@ namespace Lucene.Net.Codecs.Memory
             ChecksumIndexInput @in = state.Directory.OpenChecksumInput(fileName, IOContext.READ_ONCE);
 
             // LUCENENET specific: Use StringComparer.Ordinal to get the same ordering as Java
-            var fields = new SortedDictionary<string, TermsReader>(StringComparer.Ordinal);
+            var fields = new JCG.SortedDictionary<string, TermsReader>(StringComparer.Ordinal);
 
             try
             {
@@ -1058,14 +1059,14 @@ namespace Lucene.Net.Codecs.Memory
                 @in.Dispose();
             }
 
-            return new FieldsProducerAnonymousInnerClassHelper(this, fields);
+            return new FieldsProducerAnonymousInnerClassHelper(fields);
         }
 
         private class FieldsProducerAnonymousInnerClassHelper : FieldsProducer
         {
-            private readonly SortedDictionary<string, TermsReader> _fields;
+            private readonly IDictionary<string, TermsReader> _fields;
 
-            public FieldsProducerAnonymousInnerClassHelper(MemoryPostingsFormat outerInstance, SortedDictionary<string, TermsReader> fields)
+            public FieldsProducerAnonymousInnerClassHelper(IDictionary<string, TermsReader> fields)
             {
                 _fields = fields;
             }
diff --git a/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsReader.cs b/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsReader.cs
index 38cbc19..c57002b 100644
--- a/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsReader.cs
+++ b/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsReader.cs
@@ -1,3 +1,4 @@
+using J2N.Runtime.CompilerServices;
 using Lucene.Net.Index;
 using Lucene.Net.Store;
 using Lucene.Net.Support;
@@ -6,6 +7,7 @@ using System;
 using System.Collections.Generic;
 using System.Diagnostics;
 using System.Diagnostics.CodeAnalysis;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Codecs.Pulsing
 {
@@ -41,7 +43,7 @@ namespace Lucene.Net.Codecs.Pulsing
         private readonly SegmentReadState _segmentState;
         private int _maxPositions;
         private int _version;
-        private SortedDictionary<int, int> _fields;
+        private IDictionary<int, int> _fields;
 
         public PulsingPostingsReader(SegmentReadState state, PostingsReaderBase wrappedPostingsReader)
         {
@@ -64,7 +66,7 @@ namespace Lucene.Net.Codecs.Pulsing
             }
             else
             {
-                _fields = new SortedDictionary<int, int>();
+                _fields = new JCG.SortedDictionary<int, int>();
                 var summaryFileName = IndexFileNames.SegmentFileName(_segmentState.SegmentInfo.Name,
                     _segmentState.SegmentSuffix, PulsingPostingsWriter.SUMMARY_EXTENSION);
                 IndexInput input = null;
@@ -705,7 +707,7 @@ namespace Lucene.Net.Codecs.Pulsing
             // you don't reuse? and maybe pulsingPostingsReader should throw an exc if it wraps
             // another pulsing, because this is just stupid and wasteful. 
             // we still have to be careful in case someone does Pulsing(Stomping(Pulsing(...
-            private readonly IDictionary<PulsingPostingsReader, DocsEnum> _enums = new IdentityHashMap<PulsingPostingsReader, DocsEnum>();
+            private readonly IDictionary<PulsingPostingsReader, DocsEnum> _enums = new JCG.Dictionary<PulsingPostingsReader, DocsEnum>(IdentityEqualityComparer<PulsingPostingsReader>.Default);
 
             public IDictionary<PulsingPostingsReader, DocsEnum> Enums
             {
diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesReader.cs b/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesReader.cs
index fe493c6..2a79b76 100644
--- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesReader.cs
+++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesReader.cs
@@ -1,4 +1,4 @@
-using Lucene.Net.Support;
+using J2N.Text;
 using System;
 using System.Collections.Generic;
 using System.Diagnostics;
diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesWriter.cs b/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesWriter.cs
index fdb30ea..bc0c966 100644
--- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesWriter.cs
+++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesWriter.cs
@@ -4,6 +4,7 @@ using System.Diagnostics;
 using System.Globalization;
 using System.Numerics;
 using System.Text;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Codecs.SimpleText
 {
@@ -53,7 +54,7 @@ namespace Lucene.Net.Codecs.SimpleText
         private IndexOutput data;
         private readonly BytesRef scratch = new BytesRef();
         private readonly int numDocs;
-        private readonly HashSet<string> _fieldsSeen = new HashSet<string>(); // for asserting
+        private readonly ISet<string> _fieldsSeen = new JCG.HashSet<string>(); // for asserting
 
         // LUCENENET NOTE: Changed from public to internal because the class had to be made public, but is not for public use.
         internal SimpleTextDocValuesWriter(SegmentWriteState state, string ext)
diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldsReader.cs b/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldsReader.cs
index a2de3ee..4a7c413 100644
--- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldsReader.cs
+++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldsReader.cs
@@ -1,11 +1,11 @@
 using Lucene.Net.Index;
-using Lucene.Net.Support;
 using Lucene.Net.Util.Fst;
 using System;
 using System.Collections.Generic;
 using System.Diagnostics;
 using System.Linq;
 using System.Text;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Codecs.SimpleText
 {
@@ -52,7 +52,7 @@ namespace Lucene.Net.Codecs.SimpleText
 
     internal class SimpleTextFieldsReader : FieldsProducer
     {
-        private readonly SortedDictionary<string, long?> _fields;
+        private readonly IDictionary<string, long?> _fields;
         private readonly IndexInput _input;
         private readonly FieldInfos _fieldInfos;
         private readonly int _maxDoc;
@@ -81,13 +81,13 @@ namespace Lucene.Net.Codecs.SimpleText
             }
         }
 
-        private SortedDictionary<string, long?> ReadFields(IndexInput @in)
+        private IDictionary<string, long?> ReadFields(IndexInput @in)
         {
             ChecksumIndexInput input = new BufferedChecksumIndexInput(@in);
             var scratch = new BytesRef(10);
 
             // LUCENENET specific: Use StringComparer.Ordinal to get the same ordering as Java
-            var fields = new SortedDictionary<string, long?>(StringComparer.Ordinal);
+            var fields = new JCG.SortedDictionary<string, long?>(StringComparer.Ordinal);
 
             while (true)
             {
diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextSegmentInfoReader.cs b/src/Lucene.Net.Codecs/SimpleText/SimpleTextSegmentInfoReader.cs
index 2cb7884..aa44774 100644
--- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextSegmentInfoReader.cs
+++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextSegmentInfoReader.cs
@@ -3,6 +3,7 @@ using System.Collections.Generic;
 using System.Diagnostics;
 using System.Globalization;
 using System.Text;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Codecs.SimpleText
 {
@@ -82,7 +83,7 @@ namespace Lucene.Net.Codecs.SimpleText
                 SimpleTextUtil.ReadLine(input, scratch);
                 Debug.Assert(StringHelper.StartsWith(scratch, SimpleTextSegmentInfoWriter.SI_NUM_FILES));
                 int numFiles = Convert.ToInt32(ReadString(SimpleTextSegmentInfoWriter.SI_NUM_FILES.Length, scratch), CultureInfo.InvariantCulture);
-                var files = new HashSet<string>();
+                var files = new JCG.HashSet<string>();
 
                 for (int i = 0; i < numFiles; i++)
                 {
diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextTermVectorsReader.cs b/src/Lucene.Net.Codecs/SimpleText/SimpleTextTermVectorsReader.cs
index a3018de..4758b97 100644
--- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextTermVectorsReader.cs
+++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextTermVectorsReader.cs
@@ -5,6 +5,7 @@ using System.Diagnostics;
 using System.Globalization;
 using System.Linq;
 using System.Reflection;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Codecs.SimpleText
 {
@@ -115,7 +116,7 @@ namespace Lucene.Net.Codecs.SimpleText
         public override Fields Get(int doc)
         {
             // LUCENENET specific: Use StringComparer.Ordinal to get the same ordering as Java
-            var fields = new SortedDictionary<string, SimpleTVTerms>(StringComparer.Ordinal);
+            var fields = new JCG.SortedDictionary<string, SimpleTVTerms>(StringComparer.Ordinal);
 
             _input.Seek(_offsets[doc]);
             ReadLine();
@@ -304,7 +305,7 @@ namespace Lucene.Net.Codecs.SimpleText
 
         private class SimpleTVTerms : Terms
         {
-            internal readonly SortedDictionary<BytesRef, SimpleTVPostings> terms;
+            internal readonly JCG.SortedDictionary<BytesRef, SimpleTVPostings> terms;
             private readonly bool _hasOffsetsRenamed;
             private readonly bool _hasPositionsRenamed;
             private readonly bool _hasPayloadsRenamed;
@@ -314,7 +315,7 @@ namespace Lucene.Net.Codecs.SimpleText
                 _hasOffsetsRenamed = hasOffsets;
                 _hasPositionsRenamed = hasPositions;
                 _hasPayloadsRenamed = hasPayloads;
-                terms = new SortedDictionary<BytesRef, SimpleTVPostings>();
+                terms = new JCG.SortedDictionary<BytesRef, SimpleTVPostings>();
             }
 
             public override TermsEnum GetIterator(TermsEnum reuse)
@@ -380,11 +381,11 @@ namespace Lucene.Net.Codecs.SimpleText
 
         private class SimpleTVTermsEnum : TermsEnum
         {
-            private readonly SortedDictionary<BytesRef, SimpleTVPostings> _terms;
+            private readonly JCG.SortedDictionary<BytesRef, SimpleTVPostings> _terms;
             private IEnumerator<KeyValuePair<BytesRef, SimpleTVPostings>> _iterator;
             private KeyValuePair<BytesRef, SimpleTVPostings> _current;
 
-            internal SimpleTVTermsEnum(SortedDictionary<BytesRef, SimpleTVPostings> terms)
+            internal SimpleTVTermsEnum(JCG.SortedDictionary<BytesRef, SimpleTVPostings> terms)
             {
                 _terms = terms;
                 _iterator = terms.GetEnumerator();
@@ -392,7 +393,7 @@ namespace Lucene.Net.Codecs.SimpleText
 
             public override SeekStatus SeekCeil(BytesRef text)
             {
-                var newTerms = new SortedDictionary<BytesRef, SimpleTVPostings>(_terms.Comparer);
+                var newTerms = new JCG.SortedDictionary<BytesRef, SimpleTVPostings>(_terms.Comparer);
                 foreach (var p in _terms.Where(p => p.Key.CompareTo(text) >= 0))
                     newTerms.Add(p.Key, p.Value);
 
diff --git a/src/Lucene.Net.Expressions/JS/JavascriptCompiler.cs b/src/Lucene.Net.Expressions/JS/JavascriptCompiler.cs
index de47309..36f0bd3 100644
--- a/src/Lucene.Net.Expressions/JS/JavascriptCompiler.cs
+++ b/src/Lucene.Net.Expressions/JS/JavascriptCompiler.cs
@@ -1,3 +1,5 @@
+using J2N.Collections.Generic.Extensions;
+using J2N.Text;
 using Antlr.Runtime;
 using Antlr.Runtime.Tree;
 using Lucene.Net.Queries.Function;
@@ -8,6 +10,7 @@ using System.Globalization;
 using System.Linq;
 using System.Reflection;
 using System.Reflection.Emit;
+using JCG = J2N.Collections.Generic;
 
 #if NETSTANDARD
 using System.IO;
@@ -91,7 +94,7 @@ namespace Lucene.Net.Expressions.JS
 
         private readonly string sourceText;
 
-        private readonly IDictionary<string, int> externalsMap = new LinkedHashMap<string, int>();
+        private readonly IDictionary<string, int> externalsMap = new JCG.LinkedDictionary<string, int>();
 
         private TypeBuilder dynamicType;
 
@@ -636,7 +639,7 @@ namespace Lucene.Net.Expressions.JS
             {
                 throw new Exception("Cannot resolve function", e);
             }
-            return Collections.UnmodifiableMap(map);
+            return map.AsReadOnly();
         }
 
         private static Type GetType(string typeName)
diff --git a/src/Lucene.Net.Facet/DrillDownQuery.cs b/src/Lucene.Net.Facet/DrillDownQuery.cs
index 27a8cd2..1d3ae7c 100644
--- a/src/Lucene.Net.Facet/DrillDownQuery.cs
+++ b/src/Lucene.Net.Facet/DrillDownQuery.cs
@@ -4,6 +4,7 @@ using System.Collections.Generic;
 using System.Diagnostics;
 using System.Linq;
 using System.Runtime.CompilerServices;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Facet
 {
@@ -61,7 +62,7 @@ namespace Lucene.Net.Facet
 
         private readonly FacetsConfig config;
         private readonly BooleanQuery query;
-        private readonly IDictionary<string, int?> drillDownDims = new LinkedHashMap<string, int?>();
+        private readonly IDictionary<string, int?> drillDownDims = new JCG.LinkedDictionary<string, int?>();
 
         /// <summary>
         /// Used by <see cref="Clone"/>
diff --git a/src/Lucene.Net.Facet/FacetsConfig.cs b/src/Lucene.Net.Facet/FacetsConfig.cs
index 1030b73..35abba9 100644
--- a/src/Lucene.Net.Facet/FacetsConfig.cs
+++ b/src/Lucene.Net.Facet/FacetsConfig.cs
@@ -5,6 +5,7 @@ using System.Diagnostics;
 using System.Linq;
 using System.Text;
 using System.Threading;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Facet
 {
@@ -239,7 +240,7 @@ namespace Lucene.Net.Facet
         /// </summary>
         public virtual IDictionary<string, DimConfig> DimConfigs => fieldTypes;
 
-        private static void CheckSeen(HashSet<string> seenDims, string dim)
+        private static void CheckSeen(ISet<string> seenDims, string dim)
         {
             if (seenDims.Contains(dim))
             {
@@ -282,7 +283,7 @@ namespace Lucene.Net.Facet
             // ... and also all AssociationFacetFields
             IDictionary<string, IList<AssociationFacetField>> assocByField = new Dictionary<string, IList<AssociationFacetField>>();
 
-            var seenDims = new HashSet<string>();
+            var seenDims = new JCG.HashSet<string>();
 
             foreach (IIndexableField field in doc.Fields)
             {
diff --git a/src/Lucene.Net.Facet/SortedSet/DefaultSortedSetDocValuesReaderState.cs b/src/Lucene.Net.Facet/SortedSet/DefaultSortedSetDocValuesReaderState.cs
index a183e00..ef97760 100644
--- a/src/Lucene.Net.Facet/SortedSet/DefaultSortedSetDocValuesReaderState.cs
+++ b/src/Lucene.Net.Facet/SortedSet/DefaultSortedSetDocValuesReaderState.cs
@@ -7,21 +7,21 @@ using System.Collections.Generic;
 namespace Lucene.Net.Facet.SortedSet
 {
     /*
-	 * Licensed to the Apache Software Foundation (ASF) under one or more
-	 * contributor license agreements.  See the NOTICE file distributed with
-	 * this work for additional information regarding copyright ownership.
-	 * The ASF licenses this file to You under the Apache License, Version 2.0
-	 * (the "License"); you may not use this file except in compliance with
-	 * the License.  You may obtain a copy of the License at
-	 *
-	 *     http://www.apache.org/licenses/LICENSE-2.0
-	 *
-	 * Unless required by applicable law or agreed to in writing, software
-	 * distributed under the License is distributed on an "AS IS" BASIS,
-	 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	 * See the License for the specific language governing permissions and
-	 * limitations under the License.
-	 */
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
 
     /// <summary>
     /// Default implementation of <see cref="SortedSetDocValuesFacetCounts"/>
diff --git a/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetCounts.cs b/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetCounts.cs
index bd385f0..1f4fd5e 100644
--- a/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetCounts.cs
+++ b/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetCounts.cs
@@ -1,7 +1,6 @@
-using Lucene.Net.Support;
+using J2N.Text;
 using System;
 using System.Collections.Generic;
-using System.Linq;
 
 namespace Lucene.Net.Facet.SortedSet
 {
diff --git a/src/Lucene.Net.Facet/Taxonomy/CachedOrdinalsReader.cs b/src/Lucene.Net.Facet/Taxonomy/CachedOrdinalsReader.cs
index e3eb3be..92909f7 100644
--- a/src/Lucene.Net.Facet/Taxonomy/CachedOrdinalsReader.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/CachedOrdinalsReader.cs
@@ -1,6 +1,5 @@
 using Lucene.Net.Support;
 using System;
-using System.Collections.Generic;
 using System.Diagnostics.CodeAnalysis;
 using System.Runtime.CompilerServices;
 using System.Threading;
@@ -24,11 +23,11 @@ namespace Lucene.Net.Facet.Taxonomy
      * limitations under the License.
      */
 
-    using BinaryDocValues = Lucene.Net.Index.BinaryDocValues;
-    using IAccountable = Lucene.Net.Util.IAccountable;
     using ArrayUtil = Lucene.Net.Util.ArrayUtil;
-    using DocValuesFormat = Lucene.Net.Codecs.DocValuesFormat;
     using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext;
+    using BinaryDocValues = Lucene.Net.Index.BinaryDocValues;
+    using DocValuesFormat = Lucene.Net.Codecs.DocValuesFormat;
+    using IAccountable = Lucene.Net.Util.IAccountable;
     using Int32sRef = Lucene.Net.Util.Int32sRef;
     using RamUsageEstimator = Lucene.Net.Util.RamUsageEstimator;
 
diff --git a/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs b/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs
index 549cef6..6bc4785 100644
--- a/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Support;
+using J2N.Text;
+using Lucene.Net.Support;
 using System;
 using System.Diagnostics;
 using System.Diagnostics.CodeAnalysis;
diff --git a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs
index 439c105..3712fd3 100644
--- a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs
@@ -1,14 +1,13 @@
-using Lucene.Net.Analysis.TokenAttributes;
+using J2N.Threading.Atomic;
+using Lucene.Net.Analysis.TokenAttributes;
 using Lucene.Net.Index;
 using Lucene.Net.Index.Extensions;
 using Lucene.Net.Store;
-using J2N.Threading.Atomic;
 using Lucene.Net.Support;
 using Lucene.Net.Util;
 using System;
 using System.Collections.Generic;
 using System.Diagnostics;
-using System.Linq;
 using System.IO;
 using System.Reflection;
 
@@ -33,11 +32,10 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
 
     using AtomicReader = Lucene.Net.Index.AtomicReader;
     using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext;
-    using LockObtainFailedException = Lucene.Net.Store.LockObtainFailedException; // javadocs
     using BytesRef = Lucene.Net.Util.BytesRef;
     using Cl2oTaxonomyWriterCache = Lucene.Net.Facet.Taxonomy.WriterCache.Cl2oTaxonomyWriterCache;
-    using Directory = Lucene.Net.Store.Directory;
     using CorruptIndexException = Lucene.Net.Index.CorruptIndexException; // javadocs
+    using Directory = Lucene.Net.Store.Directory;
     using DirectoryReader = Lucene.Net.Index.DirectoryReader;
     using DocsEnum = Lucene.Net.Index.DocsEnum;
     using Document = Lucene.Net.Documents.Document;
@@ -45,12 +43,13 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
     using FieldType = Lucene.Net.Documents.FieldType;
     using IndexWriter = Lucene.Net.Index.IndexWriter;
     using IndexWriterConfig = Lucene.Net.Index.IndexWriterConfig;
+    using ITaxonomyWriterCache = Lucene.Net.Facet.Taxonomy.WriterCache.ITaxonomyWriterCache;
+    using LockObtainFailedException = Lucene.Net.Store.LockObtainFailedException; // javadocs
     using LogByteSizeMergePolicy = Lucene.Net.Index.LogByteSizeMergePolicy;
     using OpenMode = Lucene.Net.Index.OpenMode;
     using ReaderManager = Lucene.Net.Index.ReaderManager;
     using SegmentInfos = Lucene.Net.Index.SegmentInfos;
     using StringField = Lucene.Net.Documents.StringField;
-    using ITaxonomyWriterCache = Lucene.Net.Facet.Taxonomy.WriterCache.ITaxonomyWriterCache;
     using Terms = Lucene.Net.Index.Terms;
     using TermsEnum = Lucene.Net.Index.TermsEnum;
     using TextField = Lucene.Net.Documents.TextField;
diff --git a/src/Lucene.Net.Facet/Taxonomy/FacetLabel.cs b/src/Lucene.Net.Facet/Taxonomy/FacetLabel.cs
index 0a48733..1cd4b17 100644
--- a/src/Lucene.Net.Facet/Taxonomy/FacetLabel.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/FacetLabel.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Support;
+using J2N.Text;
+using Lucene.Net.Support;
 using System;
 using System.Diagnostics;
 using System.Diagnostics.CodeAnalysis;
@@ -170,7 +171,8 @@ namespace Lucene.Net.Facet.Taxonomy
             int hash = Length;
             for (int i = 0; i < Length; i++)
             {
-                hash = hash * 31 + Components[i].GetHashCode();
+                // LUCENENET specific: Use CharSequenceComparer to get the same value as StringCharSequence.GetHashCode()
+                hash = hash * 31 + CharSequenceComparer.Ordinal.GetHashCode(Components[i]);
             }
             return hash;
         }
diff --git a/src/Lucene.Net.Facet/Taxonomy/FloatAssociationFacetField.cs b/src/Lucene.Net.Facet/Taxonomy/FloatAssociationFacetField.cs
index 548b3b7..374f8a6 100644
--- a/src/Lucene.Net.Facet/Taxonomy/FloatAssociationFacetField.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/FloatAssociationFacetField.cs
@@ -4,21 +4,21 @@ using System.Globalization;
 namespace Lucene.Net.Facet.Taxonomy
 {
     /*
-	 * Licensed to the Apache Software Foundation (ASF) under one or more
-	 * contributor license agreements.  See the NOTICE file distributed with
-	 * this work for additional information regarding copyright ownership.
-	 * The ASF licenses this file to You under the Apache License, Version 2.0
-	 * (the "License"); you may not use this file except in compliance with
-	 * the License.  You may obtain a copy of the License at
-	 *
-	 *     http://www.apache.org/licenses/LICENSE-2.0
-	 *
-	 * Unless required by applicable law or agreed to in writing, software
-	 * distributed under the License is distributed on an "AS IS" BASIS,
-	 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	 * See the License for the specific language governing permissions and
-	 * limitations under the License.
-	 */
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
 
     using BytesRef = Lucene.Net.Util.BytesRef;
     using Document = Lucene.Net.Documents.Document;
diff --git a/src/Lucene.Net.Facet/Taxonomy/LRUHashMap.cs b/src/Lucene.Net.Facet/Taxonomy/LRUHashMap.cs
index bd449b9..3a673be 100644
--- a/src/Lucene.Net.Facet/Taxonomy/LRUHashMap.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/LRUHashMap.cs
@@ -52,7 +52,7 @@ namespace Lucene.Net.Facet.Taxonomy
         /// <para/>
         /// Setting <paramref name="limit"/> to a very large value, like <see cref="int.MaxValue"/>
         /// is allowed, but is less efficient than
-        /// using <see cref="Support.HashMap{TKey, TValue}"/> or 
+        /// using <see cref="J2N.Collections.Generic.Dictionary{TKey, TValue}"/> or 
         /// <see cref="Dictionary{TKey, TValue}"/> because our class needs
         /// to keep track of the use order (via an additional doubly-linked
         /// list) which is not used when the map's size is always below the
@@ -76,7 +76,7 @@ namespace Lucene.Net.Facet.Taxonomy
         /// <para/>
         /// Setting <paramref name="limit"/> to a very large value, like <see cref="int.MaxValue"/>
         /// is allowed, but is less efficient than
-        /// using <see cref="Support.HashMap{TKey, TValue}"/> or 
+        /// using <see cref="J2N.Collections.Generic.Dictionary{TKey, TValue}"/> or 
         /// <see cref="Dictionary{TKey, TValue}"/> because our class needs
         /// to keep track of the use order (via an additional doubly-linked
         /// list) which is not used when the map's size is always below the
@@ -104,10 +104,7 @@ namespace Lucene.Net.Facet.Taxonomy
         /// </summary>
         public virtual int Limit
         {
-            get
-            {
-                return cache.Limit;
-            }
+            get => cache.Limit;
             set
             {
                 if (value < 1)
@@ -131,8 +128,7 @@ namespace Lucene.Net.Facet.Taxonomy
 
         public TValue Get(TKey key)
         {
-            TValue result;
-            if (!cache.TryGetValue(key, out result))
+            if (!cache.TryGetValue(key, out TValue result))
             {
                 return default(TValue);
             }
@@ -143,51 +139,21 @@ namespace Lucene.Net.Facet.Taxonomy
 
         public TValue this[TKey key]
         {
-            get
-            {
-                return cache[key];
-            }
-            set
-            {
-                cache[key] = value;
-            }
+            get => cache[key];
+            set => cache[key] = value;
         }
 
-        public int Count
-        {
-            get
-            {
-                return cache.Count;
-            }
-        }
+        public int Count => cache.Count;
 
-        public bool IsReadOnly
-        {
-            get
-            {
-                return false;
-            }
-        }
+        public bool IsReadOnly => false;
 
-        public ICollection<TKey> Keys
-        {
-            get
-            {
-                return cache.Keys;
-            }
-        }
+        public ICollection<TKey> Keys => cache.Keys;
 
-        public ICollection<TValue> Values
-        {
-            get
-            {
-                return cache.Values;
-            }
-        }
+        public ICollection<TValue> Values => cache.Values;
 
         public void Add(KeyValuePair<TKey, TValue> item)
         {
-            throw new NotSupportedException();
+            cache.Add(item.Key, item.Value);
         }
 
         public void Add(TKey key, TValue value)
diff --git a/src/Lucene.Net.Facet/Taxonomy/ParallelTaxonomyArrays.cs b/src/Lucene.Net.Facet/Taxonomy/ParallelTaxonomyArrays.cs
index 6dc81c9..01e3bd3 100644
--- a/src/Lucene.Net.Facet/Taxonomy/ParallelTaxonomyArrays.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/ParallelTaxonomyArrays.cs
@@ -4,21 +4,21 @@ using System.Diagnostics.CodeAnalysis;
 namespace Lucene.Net.Facet.Taxonomy
 {
     /*
-	 * Licensed to the Apache Software Foundation (ASF) under one or more
-	 * contributor license agreements.  See the NOTICE file distributed with
-	 * this work for additional information regarding copyright ownership.
-	 * The ASF licenses this file to You under the Apache License, Version 2.0
-	 * (the "License"); you may not use this file except in compliance with
-	 * the License.  You may obtain a copy of the License at
-	 *
-	 *     http://www.apache.org/licenses/LICENSE-2.0
-	 *
-	 * Unless required by applicable law or agreed to in writing, software
-	 * distributed under the License is distributed on an "AS IS" BASIS,
-	 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	 * See the License for the specific language governing permissions and
-	 * limitations under the License.
-	 */
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
 
     /// <summary>
     /// Returns 3 arrays for traversing the taxonomy:
diff --git a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacets.cs b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacets.cs
index ac71f7a..e5eabcf 100644
--- a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacets.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacets.cs
@@ -1,7 +1,6 @@
-using Lucene.Net.Support;
+using J2N.Text;
 using System;
 using System.Collections.Generic;
-using System.Linq;
 
 namespace Lucene.Net.Facet.Taxonomy
 {
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CategoryPathUtils.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CategoryPathUtils.cs
index 2be7eda..4a2a00f 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CategoryPathUtils.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CategoryPathUtils.cs
@@ -57,7 +57,7 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
             for (int i = 0; i < length; i++)
             {
                 int len = charBlockArray[offset++];
-                hash = hash * 31 + charBlockArray.SubSequence(offset, offset + len).GetHashCode();
+                hash = hash * 31 + charBlockArray.Subsequence(offset, len).GetHashCode(); // LUCENENET: Corrected 2nd Subsequence parameter
                 offset += len;
             }
             return hash;
@@ -87,7 +87,7 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
                     return false;
                 }
 
-                if (!cp.Components[i].Equals(charBlockArray.SubSequence(offset, offset + len), StringComparison.Ordinal))
+                if (!cp.Components[i].Equals(charBlockArray.Subsequence(offset, len).ToString(), StringComparison.Ordinal)) // LUCENENET: Corrected 2nd Subsequence parameter
                 {
                     return false;
                 }
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CharBlockArray.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CharBlockArray.cs
index 98ae751..50fbd38 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CharBlockArray.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CharBlockArray.cs
@@ -1,4 +1,4 @@
-using Lucene.Net.Support;
+using J2N.Text;
 using Lucene.Net.Support.IO;
 using System;
 using System.Collections.Generic;
@@ -64,6 +64,8 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
                 return clone;
             }
 
+            
+
             // LUCENENET specific
             public void Serialize(Stream writer)
             {
@@ -224,20 +226,18 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
             }
         }
 
-        public virtual int Length
-        {
-            get
-            {
-                return this.length;
-            }
-        }
+        public virtual int Length => this.length;
+
 
-        public virtual string SubSequence(int start, int end)
+        // LUCENENET specific
+        bool ICharSequence.HasValue => true;
+
+        public virtual ICharSequence Subsequence(int startIndex, int length)
         {
-            int remaining = end - start;
+            int remaining = length;
             StringBuilder sb = new StringBuilder(remaining);
-            int blockIdx = BlockIndex(start);
-            int indexInBlock = IndexInBlock(start);
+            int blockIdx = BlockIndex(startIndex);
+            int indexInBlock = IndexInBlock(startIndex);
             while (remaining > 0)
             {
                 Block b = blocks[blockIdx++];
@@ -246,12 +246,12 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
                 remaining -= numToAppend;
                 indexInBlock = 0; // 2nd+ iterations read from start of the block
             }
-            return sb.ToString();
+            return new StringBuilderCharSequence(sb);
         }
 
-        ICharSequence ICharSequence.SubSequence(int start, int end)
+        ICharSequence ICharSequence.Subsequence(int startIndex, int length)
         {
-            return new StringCharSequenceWrapper(this.SubSequence(start, end));
+            return Subsequence(startIndex, length);
         }
 
         public override string ToString()
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CompactLabelToOrdinal.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CompactLabelToOrdinal.cs
index b71f3aa..955e2c7 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CompactLabelToOrdinal.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CompactLabelToOrdinal.cs
@@ -446,7 +446,7 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
                         for (int i = 0; i < length; i++)
                         {
                             int len = (ushort)l2o.labelRepository[offset++];
-                            hash = hash * 31 + l2o.labelRepository.SubSequence(offset, offset + len).GetHashCode();
+                            hash = hash * 31 + l2o.labelRepository.Subsequence(offset, len).GetHashCode(); // LUCENENET: Corrected 2nd Subsequence parameter
                             offset += len;
                         }
                     }
diff --git a/src/Lucene.Net.Grouping/AbstractDistinctValuesCollector.cs b/src/Lucene.Net.Grouping/AbstractDistinctValuesCollector.cs
index 8f586ed..22d1b75 100644
--- a/src/Lucene.Net.Grouping/AbstractDistinctValuesCollector.cs
+++ b/src/Lucene.Net.Grouping/AbstractDistinctValuesCollector.cs
@@ -1,5 +1,6 @@
 using Lucene.Net.Index;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Search.Grouping
 {
@@ -102,7 +103,7 @@ namespace Lucene.Net.Search.Grouping
             public GroupCount(TGroupValue groupValue)
             {
                 this.GroupValue = groupValue;
-                this.UniqueValues = new HashSet<TGroupValue>();
+                this.UniqueValues = new JCG.HashSet<TGroupValue>();
             }
         }
 
diff --git a/src/Lucene.Net.Grouping/AbstractFirstPassGroupingCollector.cs b/src/Lucene.Net.Grouping/AbstractFirstPassGroupingCollector.cs
index 126596b..519d131 100644
--- a/src/Lucene.Net.Grouping/AbstractFirstPassGroupingCollector.cs
+++ b/src/Lucene.Net.Grouping/AbstractFirstPassGroupingCollector.cs
@@ -1,28 +1,28 @@
 using Lucene.Net.Index;
-using Lucene.Net.Support;
 using System;
 using System.Collections.Generic;
 using System.Diagnostics;
 using System.Linq;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Search.Grouping
 {
     /*
-	 * Licensed to the Apache Software Foundation (ASF) under one or more
-	 * contributor license agreements.  See the NOTICE file distributed with
-	 * this work for additional information regarding copyright ownership.
-	 * The ASF licenses this file to You under the Apache License, Version 2.0
-	 * (the "License"); you may not use this file except in compliance with
-	 * the License.  You may obtain a copy of the License at
-	 *
-	 *     http://www.apache.org/licenses/LICENSE-2.0
-	 *
-	 * Unless required by applicable law or agreed to in writing, software
-	 * distributed under the License is distributed on an "AS IS" BASIS,
-	 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	 * See the License for the specific language governing permissions and
-	 * limitations under the License.
-	 */
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
 
     /// <summary>
     /// FirstPassGroupingCollector is the first of two passes necessary
@@ -48,7 +48,7 @@ namespace Lucene.Net.Search.Grouping
 
         // Set once we reach topNGroups unique groups:
         // @lucene.internal
-        protected SortedSet<CollectedSearchGroup<TGroupValue>> m_orderedGroups;
+        protected JCG.SortedSet<CollectedSearchGroup<TGroupValue>> m_orderedGroups;
         private int docBase;
         private int spareSlot;
 
@@ -92,7 +92,7 @@ namespace Lucene.Net.Search.Grouping
             }
 
             spareSlot = topNGroups;
-            groupMap = new HashMap<TGroupValue, CollectedSearchGroup<TGroupValue>>(topNGroups);
+            groupMap = new JCG.Dictionary<TGroupValue, CollectedSearchGroup<TGroupValue>>(topNGroups);
         }
 
         /// <summary>
@@ -372,7 +372,7 @@ namespace Lucene.Net.Search.Grouping
         private void BuildSortedSet()
         {
             var comparer = new BuildSortedSetComparer(this);
-            m_orderedGroups = new SortedSet<CollectedSearchGroup<TGroupValue>>(comparer);
+            m_orderedGroups = new JCG.SortedSet<CollectedSearchGroup<TGroupValue>>(comparer);
             m_orderedGroups.UnionWith(groupMap.Values);
             Debug.Assert(m_orderedGroups.Count > 0);
 
diff --git a/src/Lucene.Net.Grouping/AbstractGroupFacetCollector.cs b/src/Lucene.Net.Grouping/AbstractGroupFacetCollector.cs
index ad2c0c1..e887697 100644
--- a/src/Lucene.Net.Grouping/AbstractGroupFacetCollector.cs
+++ b/src/Lucene.Net.Grouping/AbstractGroupFacetCollector.cs
@@ -1,26 +1,26 @@
 using Lucene.Net.Index;
-using Lucene.Net.Support;
 using Lucene.Net.Util;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Search.Grouping
 {
     /*
-	 * Licensed to the Apache Software Foundation (ASF) under one or more
-	 * contributor license agreements.  See the NOTICE file distributed with
-	 * this work for additional information regarding copyright ownership.
-	 * The ASF licenses this file to You under the Apache License, Version 2.0
-	 * (the "License"); you may not use this file except in compliance with
-	 * the License.  You may obtain a copy of the License at
-	 *
-	 *     http://www.apache.org/licenses/LICENSE-2.0
-	 *
-	 * Unless required by applicable law or agreed to in writing, software
-	 * distributed under the License is distributed on an "AS IS" BASIS,
-	 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	 * See the License for the specific language governing permissions and
-	 * limitations under the License.
-	 */
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
 
     /// <summary>
     /// Base class for computing grouped facets.
@@ -157,7 +157,7 @@ namespace Lucene.Net.Search.Grouping
             private readonly static IComparer<FacetEntry> orderByValue = new OrderByValueComparer();
 
             private readonly int maxSize;
-            private readonly TreeSet<FacetEntry> facetEntries;
+            private readonly JCG.SortedSet<FacetEntry> facetEntries;
             private readonly int totalMissingCount;
             private readonly int totalCount;
 
@@ -165,7 +165,7 @@ namespace Lucene.Net.Search.Grouping
 
             public GroupedFacetResult(int size, int minCount, bool orderByCount, int totalCount, int totalMissingCount)
             {
-                this.facetEntries = new TreeSet<FacetEntry>(orderByCount ? orderByCountAndValue : orderByValue);
+                this.facetEntries = new JCG.SortedSet<FacetEntry>(orderByCount ? orderByCountAndValue : orderByValue);
                 this.totalMissingCount = totalMissingCount;
                 this.totalCount = totalCount;
                 maxSize = size;
@@ -182,18 +182,20 @@ namespace Lucene.Net.Search.Grouping
                 FacetEntry facetEntry = new FacetEntry(facetValue, count);
                 if (facetEntries.Count == maxSize)
                 {
-                    FacetEntry temp;
-                    if (!facetEntries.TrySuccessor(facetEntry, out temp))
+                    if (!facetEntries.TryGetSuccessor(facetEntry, out FacetEntry _))
                     {
                         return;
                     }
-                    facetEntries.DeleteMax();
+                    var max = facetEntries.Max;
+                    if (max != null)
+                        facetEntries.Remove(max);
                 }
                 facetEntries.Add(facetEntry);
 
                 if (facetEntries.Count == maxSize)
                 {
-                    currentMin = facetEntries.FindMax().Count;
+                    var max = facetEntries.Max;
+                    currentMin =  max != null ? max.Count : 0;
                 }
             }
 
@@ -229,24 +231,12 @@ namespace Lucene.Net.Search.Grouping
             /// <summary>
             /// Gets the sum of all facet entries counts.
             /// </summary>
-            public virtual int TotalCount
-            {
-                get
-                {
-                    return totalCount;
-                }
-            }
+            public virtual int TotalCount => totalCount;
 
             /// <summary>
             /// Gets the number of groups that didn't have a facet value.
             /// </summary>
-            public virtual int TotalMissingCount
-            {
-                get
-                {
-                    return totalMissingCount;
-                }
-            }
+            public virtual int TotalMissingCount => totalMissingCount;
         }
 
         /// <summary>
@@ -295,24 +285,12 @@ namespace Lucene.Net.Search.Grouping
             /// <summary>
             /// Gets the value of this facet entry
             /// </summary>
-            public virtual BytesRef Value
-            {
-                get
-                {
-                    return value;
-                }
-            }
+            public virtual BytesRef Value => value;
 
             /// <summary>
             /// Gets the count (number of groups) of this facet entry.
             /// </summary>
-            public virtual int Count
-            {
-                get
-                {
-                    return count;
-                }
-            }
+            public virtual int Count => count;
         }
 
         /// <summary>
diff --git a/src/Lucene.Net.Grouping/AbstractSecondPassGroupingCollector.cs b/src/Lucene.Net.Grouping/AbstractSecondPassGroupingCollector.cs
index 18eb85a..b918e8b 100644
--- a/src/Lucene.Net.Grouping/AbstractSecondPassGroupingCollector.cs
+++ b/src/Lucene.Net.Grouping/AbstractSecondPassGroupingCollector.cs
@@ -1,27 +1,27 @@
 using Lucene.Net.Index;
-using Lucene.Net.Support;
 using System;
 using System.Collections.Generic;
 using System.Linq;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Search.Grouping
 {
     /*
-	 * Licensed to the Apache Software Foundation (ASF) under one or more
-	 * contributor license agreements.  See the NOTICE file distributed with
-	 * this work for additional information regarding copyright ownership.
-	 * The ASF licenses this file to You under the Apache License, Version 2.0
-	 * (the "License"); you may not use this file except in compliance with
-	 * the License.  You may obtain a copy of the License at
-	 *
-	 *     http://www.apache.org/licenses/LICENSE-2.0
-	 *
-	 * Unless required by applicable law or agreed to in writing, software
-	 * distributed under the License is distributed on an "AS IS" BASIS,
-	 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	 * See the License for the specific language governing permissions and
-	 * limitations under the License.
-	 */
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
 
     /// <summary>
     /// SecondPassGroupingCollector is the second of two passes
@@ -62,7 +62,7 @@ namespace Lucene.Net.Search.Grouping
             this.withinGroupSort = withinGroupSort;
             this.groups = groups;
             this.maxDocsPerGroup = maxDocsPerGroup;
-            m_groupMap = new HashMap<TGroupValue, AbstractSecondPassGroupingCollector.SearchGroupDocs<TGroupValue>>(groups.Count());
+            m_groupMap = new JCG.Dictionary<TGroupValue, AbstractSecondPassGroupingCollector.SearchGroupDocs<TGroupValue>>(groups.Count());
 
             foreach (SearchGroup<TGroupValue> group in groups)
             {
diff --git a/src/Lucene.Net.Grouping/Function/FunctionAllGroupsCollector.cs b/src/Lucene.Net.Grouping/Function/FunctionAllGroupsCollector.cs
index 44f4cad..b4ca8fb 100644
--- a/src/Lucene.Net.Grouping/Function/FunctionAllGroupsCollector.cs
+++ b/src/Lucene.Net.Grouping/Function/FunctionAllGroupsCollector.cs
@@ -3,25 +3,26 @@ using Lucene.Net.Queries.Function;
 using Lucene.Net.Util.Mutable;
 using System.Collections;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Search.Grouping.Function
 {
     /*
-	 * Licensed to the Apache Software Foundation (ASF) under one or more
-	 * contributor license agreements.  See the NOTICE file distributed with
-	 * this work for additional information regarding copyright ownership.
-	 * The ASF licenses this file to You under the Apache License, Version 2.0
-	 * (the "License"); you may not use this file except in compliance with
-	 * the License.  You may obtain a copy of the License at
-	 *
-	 *     http://www.apache.org/licenses/LICENSE-2.0
-	 *
-	 * Unless required by applicable law or agreed to in writing, software
-	 * distributed under the License is distributed on an "AS IS" BASIS,
-	 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	 * See the License for the specific language governing permissions and
-	 * limitations under the License.
-	 */
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
 
     /// <summary>
     /// A collector that collects all groups that match the
@@ -39,7 +40,7 @@ namespace Lucene.Net.Search.Grouping.Function
     {
         private readonly IDictionary /* Map<?, ?> */ vsContext;
         private readonly ValueSource groupBy;
-        private readonly SortedSet<MutableValue> groups = new SortedSet<MutableValue>();
+        private readonly ISet<MutableValue> groups = new JCG.SortedSet<MutableValue>();
 
         private FunctionValues.ValueFiller filler;
         private MutableValue mval;
@@ -55,13 +56,7 @@ namespace Lucene.Net.Search.Grouping.Function
             this.groupBy = groupBy;
         }
 
-        public override IEnumerable<MutableValue> Groups
-        {
-            get
-            {
-                return groups;
-            }
-        }
+        public override IEnumerable<MutableValue> Groups => groups;
 
         public override void Collect(int doc)
         {
diff --git a/src/Lucene.Net.Grouping/Function/FunctionDistinctValuesCollector.cs b/src/Lucene.Net.Grouping/Function/FunctionDistinctValuesCollector.cs
index 3e17ce5..d028645 100644
--- a/src/Lucene.Net.Grouping/Function/FunctionDistinctValuesCollector.cs
+++ b/src/Lucene.Net.Grouping/Function/FunctionDistinctValuesCollector.cs
@@ -1,9 +1,9 @@
 using Lucene.Net.Index;
 using Lucene.Net.Queries.Function;
-using Lucene.Net.Support;
 using Lucene.Net.Util.Mutable;
 using System.Collections;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Search.Grouping.Function
 {
@@ -46,7 +46,7 @@ namespace Lucene.Net.Search.Grouping.Function
             this.vsContext = vsContext;
             this.groupSource = groupSource;
             this.countSource = countSource;
-            groupMap = new LinkedHashMap<MutableValue, GroupCount>();
+            groupMap = new JCG.LinkedDictionary<MutableValue, GroupCount>();
             foreach (SearchGroup<MutableValue> group in groups)
             {
                 groupMap[group.GroupValue] = new GroupCount(group.GroupValue);
diff --git a/src/Lucene.Net.Grouping/GroupDocs.cs b/src/Lucene.Net.Grouping/GroupDocs.cs
index 34c2ccd..5e4892d 100644
--- a/src/Lucene.Net.Grouping/GroupDocs.cs
+++ b/src/Lucene.Net.Grouping/GroupDocs.cs
@@ -4,21 +4,21 @@ using System.Diagnostics.CodeAnalysis;
 namespace Lucene.Net.Search.Grouping
 {
     /*
-	 * Licensed to the Apache Software Foundation (ASF) under one or more
-	 * contributor license agreements.  See the NOTICE file distributed with
-	 * this work for additional information regarding copyright ownership.
-	 * The ASF licenses this file to You under the Apache License, Version 2.0
-	 * (the "License"); you may not use this file except in compliance with
-	 * the License.  You may obtain a copy of the License at
-	 *
-	 *     http://www.apache.org/licenses/LICENSE-2.0
-	 *
-	 * Unless required by applicable law or agreed to in writing, software
-	 * distributed under the License is distributed on an "AS IS" BASIS,
-	 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	 * See the License for the specific language governing permissions and
-	 * limitations under the License.
-	 */
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
 
     /// <summary>
     /// Represents one group in the results.
diff --git a/src/Lucene.Net.Grouping/SearchGroup.cs b/src/Lucene.Net.Grouping/SearchGroup.cs
index a34e24b..50b40db 100644
--- a/src/Lucene.Net.Grouping/SearchGroup.cs
+++ b/src/Lucene.Net.Grouping/SearchGroup.cs
@@ -5,25 +5,26 @@ using System.Diagnostics;
 using System.Diagnostics.CodeAnalysis;
 using System.Linq;
 using System.Reflection;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Search.Grouping
 {
     /*
-	 * Licensed to the Apache Software Foundation (ASF) under one or more
-	 * contributor license agreements.  See the NOTICE file distributed with
-	 * this work for additional information regarding copyright ownership.
-	 * The ASF licenses this file to You under the Apache License, Version 2.0
-	 * (the "License"); you may not use this file except in compliance with
-	 * the License.  You may obtain a copy of the License at
-	 *
-	 *     http://www.apache.org/licenses/LICENSE-2.0
-	 *
-	 * Unless required by applicable law or agreed to in writing, software
-	 * distributed under the License is distributed on an "AS IS" BASIS,
-	 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	 * See the License for the specific language governing permissions and
-	 * limitations under the License.
-	 */
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
 
     /// <summary>
     /// Represents a group that is found during the first pass search.
@@ -193,9 +194,8 @@ namespace Lucene.Net.Search.Grouping
             // Only for assert
             private bool NeverEquals(object other)
             {
-                if (other is MergedGroup<T>)
+                if (other is MergedGroup<T> otherMergedGroup)
                 {
-                    MergedGroup<T> otherMergedGroup = (MergedGroup<T>)other;
                     if (groupValue == null)
                     {
                         Debug.Assert(otherMergedGroup.groupValue != null);
@@ -204,11 +204,11 @@ namespace Lucene.Net.Search.Grouping
                     {
                         
                         Debug.Assert(!groupValueIsValueType 
-                            ? groupValue.Equals(otherMergedGroup.groupValue)
+                            ? JCG.EqualityComparer<T>.Default.Equals(groupValue, otherMergedGroup.groupValue)
 
-                            // LUCENENET specific - use Collections.Equals() if we have a reference type
+                            // LUCENENET specific - use J2N.Collections.StructuralEqualityComparer.Default.Equals() if we have a reference type
                             // to ensure if it is a collection its contents are compared
-                            : Collections.Equals(groupValue, otherMergedGroup.groupValue));
+                            : J2N.Collections.StructuralEqualityComparer.Default.Equals(groupValue, otherMergedGroup.groupValue));
                     }
                 }
                 return true;
@@ -220,18 +220,19 @@ namespace Lucene.Net.Search.Grouping
                 // same groupValue
                 Debug.Assert(NeverEquals(other));
 
-                if (other is MergedGroup<T>)
+                if (other is MergedGroup<T> otherMergedGroup)
                 {
-                    MergedGroup<T> otherMergedGroup = (MergedGroup<T>)other;
                     if (groupValue == null)
                     {
                         return otherMergedGroup == null;
                     }
                     else
                     {
-                        // LUCENENET specific - use Collections.Equals() if we have a reference type
+                        // LUCENENET specific - use J2N.Collections.StructuralEqualityComparer.Default.Equals() if we have a reference type
                         // to ensure if it is a collection its contents are compared
-                        return groupValueIsValueType ? groupValue.Equals(otherMergedGroup) : Collections.Equals(groupValue, otherMergedGroup);
+                        return groupValueIsValueType ?
+                            JCG.EqualityComparer<T>.Default.Equals(groupValue, otherMergedGroup.groupValue) :
+                            J2N.Collections.StructuralEqualityComparer.Default.Equals(groupValue, otherMergedGroup.groupValue);
                     }
                 }
                 else
@@ -248,9 +249,11 @@ namespace Lucene.Net.Search.Grouping
                 }
                 else
                 {
-                    // LUCENENET specific - use Collections.GetHashCode() if we have a reference type
+                    // LUCENENET specific - use J2N.Collections.StructuralEqualityComparer.Default.GetHashCode() if we have a reference type
                     // to ensure if it is a collection its contents are compared
-                    return groupValueIsValueType ? groupValue.GetHashCode() : Collections.GetHashCode(groupValue);
+                    return groupValueIsValueType ?
+                        JCG.EqualityComparer<T>.Default.GetHashCode(groupValue) :
+                        J2N.Collections.StructuralEqualityComparer.Default.GetHashCode(groupValue);
                 }
             }
         }
@@ -316,14 +319,14 @@ namespace Lucene.Net.Search.Grouping
         {
 
             private readonly GroupComparer<T> groupComp;
-            private readonly TreeSet<MergedGroup<T>> queue;
+            private readonly JCG.SortedSet<MergedGroup<T>> queue;
             private readonly IDictionary<T, MergedGroup<T>> groupsSeen;
 
             public GroupMerger(Sort groupSort)
             {
                 groupComp = new GroupComparer<T>(groupSort);
-                queue = new TreeSet<MergedGroup<T>>(groupComp);
-                groupsSeen = new HashMap<T, MergedGroup<T>>();
+                queue = new JCG.SortedSet<MergedGroup<T>>(groupComp);
+                groupsSeen = new JCG.Dictionary<T, MergedGroup<T>>();
             }
 
             private void UpdateNextGroup(int topN, ShardIter<T> shard)
diff --git a/src/Lucene.Net.Grouping/Term/TermAllGroupHeadsCollector.cs b/src/Lucene.Net.Grouping/Term/TermAllGroupHeadsCollector.cs
index bd2c079..97c1dbc 100644
--- a/src/Lucene.Net.Grouping/Term/TermAllGroupHeadsCollector.cs
+++ b/src/Lucene.Net.Grouping/Term/TermAllGroupHeadsCollector.cs
@@ -1,7 +1,7 @@
 using Lucene.Net.Index;
-using Lucene.Net.Support;
 using Lucene.Net.Util;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Search.Grouping.Terms
 {
@@ -149,7 +149,7 @@ namespace Lucene.Net.Search.Grouping.Terms
             : base(groupField, sortWithinGroup.GetSort().Length)
         {
             this.sortWithinGroup = sortWithinGroup;
-            groups = new HashMap<BytesRef, GroupHead>();
+            groups = new JCG.Dictionary<BytesRef, GroupHead>();
 
             SortField[] sortFields = sortWithinGroup.GetSort();
             for (int i = 0; i < sortFields.Length; i++)
diff --git a/src/Lucene.Net.Grouping/Term/TermDistinctValuesCollector.cs b/src/Lucene.Net.Grouping/Term/TermDistinctValuesCollector.cs
index 3e0fc8f..e3fe93b 100644
--- a/src/Lucene.Net.Grouping/Term/TermDistinctValuesCollector.cs
+++ b/src/Lucene.Net.Grouping/Term/TermDistinctValuesCollector.cs
@@ -8,21 +8,21 @@ using System.Linq;
 namespace Lucene.Net.Search.Grouping.Terms
 {
     /*
-	 * Licensed to the Apache Software Foundation (ASF) under one or more
-	 * contributor license agreements.  See the NOTICE file distributed with
-	 * this work for additional information regarding copyright ownership.
-	 * The ASF licenses this file to You under the Apache License, Version 2.0
-	 * (the "License"); you may not use this file except in compliance with
-	 * the License.  You may obtain a copy of the License at
-	 *
-	 *     http://www.apache.org/licenses/LICENSE-2.0
-	 *
-	 * Unless required by applicable law or agreed to in writing, software
-	 * distributed under the License is distributed on an "AS IS" BASIS,
-	 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	 * See the License for the specific language governing permissions and
-	 * limitations under the License.
-	 */
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
 
     /// <summary>
     /// A term based implementation of <see cref="T:AbstractDistinctValuesCollector{TermDistinctValuesCollector.GroupCount}"/> that relies
diff --git a/src/Lucene.Net.Grouping/TopGroups.cs b/src/Lucene.Net.Grouping/TopGroups.cs
index 7a7e522..007f0c9 100644
--- a/src/Lucene.Net.Grouping/TopGroups.cs
+++ b/src/Lucene.Net.Grouping/TopGroups.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Support;
+using J2N.Collections;
+using Lucene.Net.Support;
 using System;
 using System.Diagnostics.CodeAnalysis;
 using System.Reflection;
@@ -6,21 +7,21 @@ using System.Reflection;
 namespace Lucene.Net.Search.Grouping
 {
     /*
-	 * Licensed to the Apache Software Foundation (ASF) under one or more
-	 * contributor license agreements.  See the NOTICE file distributed with
-	 * this work for additional information regarding copyright ownership.
-	 * The ASF licenses this file to You under the Apache License, Version 2.0
-	 * (the "License"); you may not use this file except in compliance with
-	 * the License.  You may obtain a copy of the License at
-	 *
-	 *     http://www.apache.org/licenses/LICENSE-2.0
-	 *
-	 * Unless required by applicable law or agreed to in writing, software
-	 * distributed under the License is distributed on an "AS IS" BASIS,
-	 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	 * See the License for the specific language governing permissions and
-	 * limitations under the License.
-	 */
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
 
     /// <summary>
     /// Represents result returned by a grouping search.
@@ -198,9 +199,9 @@ namespace Lucene.Net.Search.Grouping
                             throw new ArgumentException("group values differ across shards; you must pass same top groups to all shards' second-pass collector");
                         }
                     }
-                    // LUCENENET specific - use Collections.Equals() if we have a reference type
+                    // LUCENENET specific - use StructuralEqualityComparer.Default.Equals() if we have a reference type
                     // to ensure if it is a collection its contents are compared
-                    else if (!(shardGroupsIsValueType ? groupValue.Equals(shardGroupDocs.GroupValue) : Collections.Equals(groupValue, shardGroupDocs.GroupValue)))
+                    else if (!(shardGroupsIsValueType ? groupValue.Equals(shardGroupDocs.GroupValue) : StructuralEqualityComparer.Default.Equals(groupValue, shardGroupDocs.GroupValue)))
                     {
                         throw new ArgumentException("group values differ across shards; you must pass same top groups to all shards' second-pass collector");
                     }
diff --git a/src/Lucene.Net.Highlighter/Highlight/QueryScorer.cs b/src/Lucene.Net.Highlighter/Highlight/QueryScorer.cs
index 1f41488..5ad4b04 100644
--- a/src/Lucene.Net.Highlighter/Highlight/QueryScorer.cs
+++ b/src/Lucene.Net.Highlighter/Highlight/QueryScorer.cs
@@ -1,29 +1,29 @@
-using Lucene.Net.Analysis;
+using J2N.Text;
+using Lucene.Net.Analysis;
 using Lucene.Net.Analysis.TokenAttributes;
 using Lucene.Net.Index;
-using Lucene.Net.Support;
-using Lucene.Net.Util;
 using System;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Search.Highlight
 {
     /*
-	 * Licensed to the Apache Software Foundation (ASF) under one or more
-	 * contributor license agreements.  See the NOTICE file distributed with
-	 * this work for additional information regarding copyright ownership.
-	 * The ASF licenses this file to You under the Apache License, Version 2.0
-	 * (the "License"); you may not use this file except in compliance with
-	 * the License.  You may obtain a copy of the License at
-	 *
-	 *     http://www.apache.org/licenses/LICENSE-2.0
-	 *
-	 * Unless required by applicable law or agreed to in writing, software
-	 * distributed under the License is distributed on an "AS IS" BASIS,
-	 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	 * See the License for the specific language governing permissions and
-	 * limitations under the License.
-	 */
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
 
     ///<summary>
     /// <see cref="IScorer"/> implementation which scores text fragments by the number of
@@ -110,7 +110,7 @@ namespace Lucene.Net.Search.Highlight
         /// <param name="weightedTerms">an array of pre-created <see cref="WeightedSpanTerm"/>s</param>
         public QueryScorer(WeightedSpanTerm[] weightedTerms)
         {
-            this.fieldWeightedSpanTerms = new HashMap<string, WeightedSpanTerm>(weightedTerms.Length);
+            this.fieldWeightedSpanTerms = new JCG.Dictionary<string, WeightedSpanTerm>(weightedTerms.Length);
 
             foreach (WeightedSpanTerm t in weightedTerms)
             {
@@ -240,7 +240,7 @@ namespace Lucene.Net.Search.Highlight
         /// <seealso cref="IScorer.StartFragment"/>
         public virtual void StartFragment(TextFragment newFragment)
         {
-            foundTerms = new HashSet<string>();
+            foundTerms = new JCG.HashSet<string>();
             totalScore = 0;
         }
 
diff --git a/src/Lucene.Net.Highlighter/Highlight/QueryTermExtractor.cs b/src/Lucene.Net.Highlighter/Highlight/QueryTermExtractor.cs
index b19ba22..5716f63 100644
--- a/src/Lucene.Net.Highlighter/Highlight/QueryTermExtractor.cs
+++ b/src/Lucene.Net.Highlighter/Highlight/QueryTermExtractor.cs
@@ -1,28 +1,29 @@
-using Lucene.Net.Index;
-using Lucene.Net.Support;
+using J2N.Text;
+using Lucene.Net.Index;
 using System;
 using System.Collections.Generic;
 using System.IO;
 using System.Linq;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Search.Highlight
 {
     /*
-	 * Licensed to the Apache Software Foundation (ASF) under one or more
-	 * contributor license agreements.  See the NOTICE file distributed with
-	 * this work for additional information regarding copyright ownership.
-	 * The ASF licenses this file to You under the Apache License, Version 2.0
-	 * (the "License"); you may not use this file except in compliance with
-	 * the License.  You may obtain a copy of the License at
-	 *
-	 *     http://www.apache.org/licenses/LICENSE-2.0
-	 *
-	 * Unless required by applicable law or agreed to in writing, software
-	 * distributed under the License is distributed on an "AS IS" BASIS,
-	 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	 * See the License for the specific language governing permissions and
-	 * limitations under the License.
-	 */
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
 
     /// <summary> 
     /// Utility class used to extract the terms used in a query, plus any weights.
@@ -79,7 +80,7 @@ namespace Lucene.Net.Search.Highlight
         /// <returns>an array of the terms used in a query, plus their weights.</returns>
         public static WeightedTerm[] GetTerms(Query query, bool prohibited, string fieldName)
         {
-            var terms = new HashSet<WeightedTerm>();
+            var terms = new JCG.HashSet<WeightedTerm>();
             if (fieldName != null)
             {
                 fieldName = fieldName.Intern();
@@ -100,7 +101,7 @@ namespace Lucene.Net.Search.Highlight
         }
 
         //fieldname MUST be interned prior to this call
-        private static void GetTerms(Query query, HashSet<WeightedTerm> terms, bool prohibited, string fieldName)
+        private static void GetTerms(Query query, ISet<WeightedTerm> terms, bool prohibited, string fieldName)
         {
             try
             {
@@ -110,7 +111,7 @@ namespace Lucene.Net.Search.Highlight
                     GetTermsFromFilteredQuery((FilteredQuery)query, terms, prohibited, fieldName);
                 else
                 {
-                    var nonWeightedTerms = new HashSet<Term>();
+                    var nonWeightedTerms = new JCG.HashSet<Term>();
                     query.ExtractTerms(nonWeightedTerms);
                     foreach (var term in nonWeightedTerms)
                     {
@@ -139,7 +140,7 @@ namespace Lucene.Net.Search.Highlight
         /// something common which would allow access to child queries so what follows here are query-specific
         /// implementations for accessing embedded query elements. 
         /// </summary>
-        private static void GetTermsFromBooleanQuery(BooleanQuery query, HashSet<WeightedTerm> terms, bool prohibited, string fieldName)
+        private static void GetTermsFromBooleanQuery(BooleanQuery query, ISet<WeightedTerm> terms, bool prohibited, string fieldName)
         {
             var queryClauses = query.Clauses;
             for (int i = 0; i < queryClauses.Count; i++)
@@ -148,7 +149,7 @@ namespace Lucene.Net.Search.Highlight
                     GetTerms(queryClauses[i].Query, terms, prohibited, fieldName);
             }
         }
-        private static void GetTermsFromFilteredQuery(FilteredQuery query, HashSet<WeightedTerm> terms, bool prohibited, string fieldName)
+        private static void GetTermsFromFilteredQuery(FilteredQuery query, ISet<WeightedTerm> terms, bool prohibited, string fieldName)
         {
             GetTerms(query.Query, terms, prohibited, fieldName);
         }
diff --git a/src/Lucene.Net.Highlighter/Highlight/QueryTermScorer.cs b/src/Lucene.Net.Highlighter/Highlight/QueryTermScorer.cs
index 46e3241..601161d 100644
--- a/src/Lucene.Net.Highlighter/Highlight/QueryTermScorer.cs
+++ b/src/Lucene.Net.Highlighter/Highlight/QueryTermScorer.cs
@@ -1,28 +1,28 @@
 using Lucene.Net.Analysis;
 using Lucene.Net.Analysis.TokenAttributes;
 using Lucene.Net.Index;
-using Lucene.Net.Support;
 using System;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Search.Highlight
 {
     /*
-	 * Licensed to the Apache Software Foundation (ASF) under one or more
-	 * contributor license agreements.  See the NOTICE file distributed with
-	 * this work for additional information regarding copyright ownership.
-	 * The ASF licenses this file to You under the Apache License, Version 2.0
-	 * (the "License"); you may not use this file except in compliance with
-	 * the License.  You may obtain a copy of the License at
-	 *
-	 *     http://www.apache.org/licenses/LICENSE-2.0
-	 *
-	 * Unless required by applicable law or agreed to in writing, software
-	 * distributed under the License is distributed on an "AS IS" BASIS,
-	 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	 * See the License for the specific language governing permissions and
-	 * limitations under the License.
-	 */
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
 
     /// <summary>
     /// <see cref="IScorer"/> implementation which scores text fragments by the number of
@@ -34,7 +34,7 @@ namespace Lucene.Net.Search.Highlight
     public class QueryTermScorer : IScorer
     {
         private TextFragment currentTextFragment = null;
-        private HashSet<string> uniqueTermsInFragment;
+        private ISet<string> uniqueTermsInFragment;
 
         private float totalScore = 0;
         private float maxTermWeight = 0;
@@ -84,8 +84,7 @@ namespace Lucene.Net.Search.Highlight
             termsToFind = new Dictionary<string, WeightedTerm>();
             for (int i = 0; i < weightedTerms.Length; i++)
             {
-                WeightedTerm existingTerm;
-                if (!termsToFind.TryGetValue(weightedTerms[i].Term, out existingTerm)
+                if (!termsToFind.TryGetValue(weightedTerms[i].Term, out WeightedTerm existingTerm)
                     || (existingTerm == null)
                     || (existingTerm.Weight < weightedTerms[i].Weight))
                 {
@@ -108,7 +107,7 @@ namespace Lucene.Net.Search.Highlight
 
         public virtual void StartFragment(TextFragment newFragment)
         {
-            uniqueTermsInFragment = new HashSet<string>();
+            uniqueTermsInFragment = new JCG.HashSet<string>();
             currentTextFragment = newFragment;
             totalScore = 0;
         }
@@ -138,10 +137,7 @@ namespace Lucene.Net.Search.Highlight
         /// <summary>
         /// <seealso cref="IScorer.FragmentScore"/>
         /// </summary>
-        public virtual float FragmentScore
-        {
-            get { return totalScore; }
-        }
+        public virtual float FragmentScore => totalScore;
 
         public virtual void AllFragmentsProcessed()
         {
@@ -152,9 +148,6 @@ namespace Lucene.Net.Search.Highlight
         /// The highest weighted term (useful for passing to <see cref="GradientFormatter"/> 
         /// to set top end of coloring scale.
         /// </summary>
-        public virtual float MaxTermWeight
-        {
-            get { return maxTermWeight; }
-        }
+        public virtual float MaxTermWeight => maxTermWeight;
     }
 }
\ No newline at end of file
diff --git a/src/Lucene.Net.Highlighter/Highlight/WeightedSpanTermExtractor.cs b/src/Lucene.Net.Highlighter/Highlight/WeightedSpanTermExtractor.cs
index a3dfe34..a3b63ea 100644
--- a/src/Lucene.Net.Highlighter/Highlight/WeightedSpanTermExtractor.cs
+++ b/src/Lucene.Net.Highlighter/Highlight/WeightedSpanTermExtractor.cs
@@ -1,32 +1,33 @@
-using Lucene.Net.Analysis;
+using J2N.Text;
+using Lucene.Net.Analysis;
 using Lucene.Net.Index;
 using Lucene.Net.Index.Memory;
 using Lucene.Net.Queries;
 using Lucene.Net.Search.Spans;
-using Lucene.Net.Support;
 using Lucene.Net.Util;
 using System;
 using System.Collections;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Search.Highlight
 {
     /*
-	 * Licensed to the Apache Software Foundation (ASF) under one or more
-	 * contributor license agreements.  See the NOTICE file distributed with
-	 * this work for additional information regarding copyright ownership.
-	 * The ASF licenses this file to You under the Apache License, Version 2.0
-	 * (the "License"); you may not use this file except in compliance with
-	 * the License.  You may obtain a copy of the License at
-	 *
-	 *     http://www.apache.org/licenses/LICENSE-2.0
-	 *
-	 * Unless required by applicable law or agreed to in writing, software
-	 * distributed under the License is distributed on an "AS IS" BASIS,
-	 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	 * See the License for the specific language governing permissions and
-	 * limitations under the License.
-	 */
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
 
     /// <summary>
     /// Class used to extract <see cref="WeightedSpanTerm"/>s from a <see cref="Query"/> based on whether 
@@ -244,17 +245,19 @@ namespace Lucene.Net.Search.Highlight
         /// <exception cref="System.IO.IOException">If there is a low-level I/O error</exception>
         protected virtual void ExtractWeightedSpanTerms(IDictionary<string, WeightedSpanTerm> terms, SpanQuery spanQuery)
         {
-            HashSet<string> fieldNames;
+            ISet<string> fieldNames;
 
             if (fieldName == null)
             {
-                fieldNames = new HashSet<string>();
+                fieldNames = new JCG.HashSet<string>();
                 CollectSpanQueryFields(spanQuery, fieldNames);
             }
             else
             {
-                fieldNames = new HashSet<string>();
-                fieldNames.Add(fieldName);
+                fieldNames = new JCG.HashSet<string>
+                {
+                    fieldName
+                };
             }
             // To support the use of the default field name
             if (defaultField != null)
@@ -262,9 +265,9 @@ namespace Lucene.Net.Search.Highlight
                 fieldNames.Add(defaultField);
             }
 
-            IDictionary<string, SpanQuery> queries = new HashMap<string, SpanQuery>();
+            IDictionary<string, SpanQuery> queries = new JCG.Dictionary<string, SpanQuery>();
 
-            var nonWeightedTerms = new HashSet<Term>();
+            var nonWeightedTerms = new JCG.HashSet<Term>();
             bool mustRewriteQuery = MustRewriteQuery(spanQuery);
             if (mustRewriteQuery)
             {
@@ -288,8 +291,8 @@ namespace Lucene.Net.Search.Highlight
                 q = mustRewriteQuery ? queries[field] : spanQuery;
 
                 AtomicReaderContext context = GetLeafContext();
-                var termContexts = new HashMap<Term, TermContext>();
-                TreeSet<Term> extractedTerms = new TreeSet<Term>();
+                var termContexts = new JCG.Dictionary<Term, TermContext>();
+                ISet<Term> extractedTerms = new JCG.SortedSet<Term>();
                 q.ExtractTerms(extractedTerms);
                 foreach (Term term in extractedTerms)
                 {
@@ -343,7 +346,7 @@ namespace Lucene.Net.Search.Highlight
         /// <exception cref="System.IO.IOException">If there is a low-level I/O error</exception>
         protected virtual void ExtractWeightedTerms(IDictionary<string, WeightedSpanTerm> terms, Query query)
         {
-            var nonWeightedTerms = new HashSet<Term>();
+            var nonWeightedTerms = new JCG.HashSet<Term>();
             query.ExtractTerms(nonWeightedTerms);
 
             foreach (Term queryTerm in nonWeightedTerms)
diff --git a/src/Lucene.Net.Highlighter/PostingsHighlight/MultiTermHighlighting.cs b/src/Lucene.Net.Highlighter/PostingsHighlight/MultiTermHighlighting.cs
index ea7eed9..680dfb9 100644
--- a/src/Lucene.Net.Highlighter/PostingsHighlight/MultiTermHighlighting.cs
+++ b/src/Lucene.Net.Highlighter/PostingsHighlight/MultiTermHighlighting.cs
@@ -1,9 +1,9 @@
 #if FEATURE_BREAKITERATOR
+using J2N;
 using Lucene.Net.Analysis;
 using Lucene.Net.Analysis.TokenAttributes;
 using Lucene.Net.Index;
 using Lucene.Net.Search.Spans;
-using Lucene.Net.Support;
 using Lucene.Net.Util;
 using Lucene.Net.Util.Automaton;
 using System;
@@ -13,21 +13,21 @@ using System.Diagnostics;
 namespace Lucene.Net.Search.PostingsHighlight
 {
     /*
-	 * Licensed to the Apache Software Foundation (ASF) under one or more
-	 * contributor license agreements.  See the NOTICE file distributed with
-	 * this work for additional information regarding copyright ownership.
-	 * The ASF licenses this file to You under the Apache License, Version 2.0
-	 * (the "License"); you may not use this file except in compliance with
-	 * the License.  You may obtain a copy of the License at
-	 *
-	 *     http://www.apache.org/licenses/LICENSE-2.0
-	 *
-	 * Unless required by applicable law or agreed to in writing, software
-	 * distributed under the License is distributed on an "AS IS" BASIS,
-	 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	 * See the License for the specific language governing permissions and
-	 * limitations under the License.
-	 */
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
 
     /// <summary>
     /// Support for highlighting multiterm queries in PostingsHighlighter.
diff --git a/src/Lucene.Net.Highlighter/PostingsHighlight/PostingsHighlighter.cs b/src/Lucene.Net.Highlighter/PostingsHighlight/PostingsHighlighter.cs
index b94550c..0224926 100644
--- a/src/Lucene.Net.Highlighter/PostingsHighlight/PostingsHighlighter.cs
+++ b/src/Lucene.Net.Highlighter/PostingsHighlight/PostingsHighlighter.cs
@@ -1,4 +1,5 @@
 #if FEATURE_BREAKITERATOR
+using J2N.Text;
 using ICU4N.Text;
 using Lucene.Net.Analysis;
 using Lucene.Net.Index;
@@ -12,25 +13,26 @@ using System.Globalization;
 using System.IO;
 using System.Linq;
 using System.Text;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Search.PostingsHighlight
 {
     /*
-	 * Licensed to the Apache Software Foundation (ASF) under one or more
-	 * contributor license agreements.  See the NOTICE file distributed with
-	 * this work for additional information regarding copyright ownership.
-	 * The ASF licenses this file to You under the Apache License, Version 2.0
-	 * (the "License"); you may not use this file except in compliance with
-	 * the License.  You may obtain a copy of the License at
-	 *
-	 *     http://www.apache.org/licenses/LICENSE-2.0
-	 *
-	 * Unless required by applicable law or agreed to in writing, software
-	 * distributed under the License is distributed on an "AS IS" BASIS,
-	 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	 * See the License for the specific language governing permissions and
-	 * limitations under the License.
-	 */
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
 
     /// <summary>
     /// Simple highlighter that does not analyze fields nor use
@@ -389,7 +391,7 @@ namespace Lucene.Net.Search.PostingsHighlight
             }
             IndexReader reader = searcher.IndexReader;
             Query rewritten = Rewrite(query);
-            TreeSet<Term> queryTerms = new TreeSet<Term>();
+            JCG.SortedSet<Term> queryTerms = new JCG.SortedSet<Term>();
             rewritten.ExtractTerms(queryTerms);
 
             IndexReaderContext readerContext = reader.Context;
@@ -417,10 +419,10 @@ namespace Lucene.Net.Search.PostingsHighlight
                 int numPassages = maxPassages[i];
                 Term floor = new Term(field, "");
                 Term ceiling = new Term(field, UnicodeUtil.BIG_TERM);
-                // LUCENENET NOTE: System.Collections.Generic.SortedSet<T>.GetViewBetween ceiling is inclusive.
-                // However, in Java, subSet ceiling is exclusive. Also,
-                // SortedSet<T> doesn't seem to have the correct logic, but C5.TreeSet<T> does.
-                var fieldTerms = queryTerms.RangeFromTo(floor, ceiling); //SubSet(floor, ceiling);
+
+                // LUCENENET: Call custom GetViewBetween overload to mimic Java's exclusive upper bound behavior.
+                var fieldTerms = queryTerms.GetViewBetween(floor, lowerValueInclusive: true, ceiling, upperValueInclusive: false);
+
                 // TODO: should we have some reasonable defaults for term pruning? (e.g. stopwords)
 
                 // Strip off the redundant field:
@@ -621,7 +623,7 @@ namespace Lucene.Net.Search.PostingsHighlight
             {
                 throw new NullReferenceException("PassageScorer cannot be null");
             }
-            Support.PriorityQueue<OffsetsEnum> pq = new Support.PriorityQueue<OffsetsEnum>();
+            JCG.PriorityQueue<OffsetsEnum> pq = new JCG.PriorityQueue<OffsetsEnum>();
             float[] weights = new float[terms.Length];
             // initialize postings
             for (int i = 0; i < terms.Length; i++)
@@ -666,11 +668,10 @@ namespace Lucene.Net.Search.PostingsHighlight
 
             pq.Add(new OffsetsEnum(EMPTY, int.MaxValue)); // a sentinel for termination
 
-            Support.PriorityQueue<Passage> passageQueue = new Support.PriorityQueue<Passage>(n, new HighlightDocComparerAnonymousHelper1());
+            JCG.PriorityQueue<Passage> passageQueue = new JCG.PriorityQueue<Passage>(n, new HighlightDocComparerAnonymousHelper1());
             Passage current = new Passage();
 
-            OffsetsEnum off;
-            while ((off = pq.Poll()) != null)
+            while (pq.TryDequeue(out OffsetsEnum off))
             {
                 DocsAndPositionsEnum dp = off.dp;
                 int start = dp.StartOffset;
@@ -700,10 +701,10 @@ namespace Lucene.Net.Search.PostingsHighlight
                         }
                         else
                         {
-                            passageQueue.Offer(current);
+                            passageQueue.Enqueue(current);
                             if (passageQueue.Count > n)
                             {
-                                current = passageQueue.Poll();
+                                current = passageQueue.Dequeue();
                                 current.Reset();
                             }
                             else
@@ -754,7 +755,7 @@ namespace Lucene.Net.Search.PostingsHighlight
                     }
                     if (start >= current.endOffset || end > contentLength)
                     {
-                        pq.Offer(off);
+                        pq.Enqueue(off);
                         break;
                     }
                 }
diff --git a/src/Lucene.Net.Highlighter/VectorHighlight/BaseFragmentsBuilder.cs b/src/Lucene.Net.Highlighter/VectorHighlight/BaseFragmentsBuilder.cs
index e56f389..51b38ad 100644
--- a/src/Lucene.Net.Highlighter/VectorHighlight/BaseFragmentsBuilder.cs
+++ b/src/Lucene.Net.Highlighter/VectorHighlight/BaseFragmentsBuilder.cs
@@ -14,21 +14,21 @@ using WeightedFragInfo = Lucene.Net.Search.VectorHighlight.FieldFragList.Weighte
 namespace Lucene.Net.Search.VectorHighlight
 {
     /*
-	 * Licensed to the Apache Software Foundation (ASF) under one or more
-	 * contributor license agreements.  See the NOTICE file distributed with
-	 * this work for additional information regarding copyright ownership.
-	 * The ASF licenses this file to You under the Apache License, Version 2.0
-	 * (the "License"); you may not use this file except in compliance with
-	 * the License.  You may obtain a copy of the License at
-	 *
-	 *     http://www.apache.org/licenses/LICENSE-2.0
-	 *
-	 * Unless required by applicable law or agreed to in writing, software
-	 * distributed under the License is distributed on an "AS IS" BASIS,
-	 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	 * See the License for the specific language governing permissions and
-	 * limitations under the License.
-	 */
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
 
     /// <summary>
     /// Base <see cref="IFragmentsBuilder"/> implementation that supports colored pre/post
diff --git a/src/Lucene.Net.Highlighter/VectorHighlight/FieldFragList.cs b/src/Lucene.Net.Highlighter/VectorHighlight/FieldFragList.cs
index 5d7f44b..917c94e 100644
--- a/src/Lucene.Net.Highlighter/VectorHighlight/FieldFragList.cs
+++ b/src/Lucene.Net.Highlighter/VectorHighlight/FieldFragList.cs
@@ -7,21 +7,21 @@ using WeightedPhraseInfo = Lucene.Net.Search.VectorHighlight.FieldPhraseList.Wei
 namespace Lucene.Net.Search.VectorHighlight
 {
     /*
-	 * Licensed to the Apache Software Foundation (ASF) under one or more
-	 * contributor license agreements.  See the NOTICE file distributed with
-	 * this work for additional information regarding copyright ownership.
-	 * The ASF licenses this file to You under the Apache License, Version 2.0
-	 * (the "License"); you may not use this file except in compliance with
-	 * the License.  You may obtain a copy of the License at
-	 *
-	 *     http://www.apache.org/licenses/LICENSE-2.0
-	 *
-	 * Unless required by applicable law or agreed to in writing, software
-	 * distributed under the License is distributed on an "AS IS" BASIS,
-	 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	 * See the License for the specific language governing permissions and
-	 * limitations under the License.
-	 */
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
 
     /// <summary>
     /// FieldFragList has a list of "frag info" that is used by <see cref="IFragmentsBuilder"/> class
diff --git a/src/Lucene.Net.Highlighter/VectorHighlight/FieldPhraseList.cs b/src/Lucene.Net.Highlighter/VectorHighlight/FieldPhraseList.cs
index 429b674..4566a1a 100644
--- a/src/Lucene.Net.Highlighter/VectorHighlight/FieldPhraseList.cs
+++ b/src/Lucene.Net.Highlighter/VectorHighlight/FieldPhraseList.cs
@@ -9,21 +9,21 @@ using TermInfo = Lucene.Net.Search.VectorHighlight.FieldTermStack.TermInfo;
 namespace Lucene.Net.Search.VectorHighlight
 {
     /*
-	 * Licensed to the Apache Software Foundation (ASF) under one or more
-	 * contributor license agreements.  See the NOTICE file distributed with
-	 * this work for additional information regarding copyright ownership.
-	 * The ASF licenses this file to You under the Apache License, Version 2.0
-	 * (the "License"); you may not use this file except in compliance with
-	 * the License.  You may obtain a copy of the License at
-	 *
-	 *     http://www.apache.org/licenses/LICENSE-2.0
-	 *
-	 * Unless required by applicable law or agreed to in writing, software
-	 * distributed under the License is distributed on an "AS IS" BASIS,
-	 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	 * See the License for the specific language governing permissions and
-	 * limitations under the License.
-	 */
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
 
     /// <summary>
     /// FieldPhraseList has a list of WeightedPhraseInfo that is used by FragListBuilder
diff --git a/src/Lucene.Net.Highlighter/VectorHighlight/FieldQuery.cs b/src/Lucene.Net.Highlighter/VectorHighlight/FieldQuery.cs
index 8c2676b..0c9aa7b 100644
--- a/src/Lucene.Net.Highlighter/VectorHighlight/FieldQuery.cs
+++ b/src/Lucene.Net.Highlighter/VectorHighlight/FieldQuery.cs
@@ -1,8 +1,8 @@
 using Lucene.Net.Index;
-using Lucene.Net.Support;
 using System;
 using System.Collections.Generic;
 using System.Linq;
+using JCG = J2N.Collections.Generic;
 using TermInfo = Lucene.Net.Search.VectorHighlight.FieldTermStack.TermInfo;
 
 namespace Lucene.Net.Search.VectorHighlight
@@ -34,11 +34,11 @@ namespace Lucene.Net.Search.VectorHighlight
 
         // fieldMatch==true,  Map<fieldName,QueryPhraseMap>
         // fieldMatch==false, Map<null,QueryPhraseMap>
-        internal IDictionary<string, QueryPhraseMap> rootMaps = new HashMap<string, QueryPhraseMap>();
+        internal IDictionary<string, QueryPhraseMap> rootMaps = new JCG.Dictionary<string, QueryPhraseMap>();
 
         // fieldMatch==true,  Map<fieldName,setOfTermsInQueries>
         // fieldMatch==false, Map<null,setOfTermsInQueries>
-        internal IDictionary<string, ISet<string>> termSetMap = new HashMap<string, ISet<string>>();
+        internal IDictionary<string, ISet<string>> termSetMap = new JCG.Dictionary<string, ISet<string>>();
 
         internal int termOrPhraseNumber; // used for colored tag support
 
@@ -48,8 +48,8 @@ namespace Lucene.Net.Search.VectorHighlight
         internal FieldQuery(Query query, IndexReader reader, bool phraseHighlight, bool fieldMatch)
         {
             this.fieldMatch = fieldMatch;
-            // LUCENENET NOTE: LinkedHashSet cares about insertion order - in .NET, we can just use List<T> for that
-            List<Query> flatQueries = new List<Query>();
+            // LUCENENET NOTE: LinkedHashSet cares about insertion order
+            ISet<Query> flatQueries = new JCG.LinkedHashSet<Query>();
             Flatten(query, reader, flatQueries);
             SaveTerms(flatQueries, reader);
             ICollection<Query> expandQueries = Expand(flatQueries);
@@ -119,8 +119,7 @@ namespace Lucene.Net.Search.VectorHighlight
                     {
                         Query flat = new TermQuery(pq.GetTerms()[0]);
                         flat.Boost = pq.Boost;
-                        if (!flatQueries.Contains(flat)) // LUCENENET specific - set semantics, but this is a list
-                            flatQueries.Add(flat);
+                        flatQueries.Add(flat);
                     }
                 }
             }
@@ -192,8 +191,7 @@ namespace Lucene.Net.Search.VectorHighlight
         /// <returns></returns>
         internal ICollection<Query> Expand(ICollection<Query> flatQueries)
         {
-            // LUCENENET NOTE: LinkedHashSet cares about insertion order - in .NET, we can just use List<T> for that
-            List<Query> expandQueries = new List<Query>();
+            ISet<Query> expandQueries = new JCG.LinkedHashSet<Query>();
 
             for (int i = 0; i < flatQueries.Count; )
             {
@@ -203,8 +201,7 @@ namespace Lucene.Net.Search.VectorHighlight
                 {
                     i++;
                 }
-                if (!expandQueries.Contains(query)) // LUCENENET specific - set semantics, but this is a list
-                    expandQueries.Add(query);
+                expandQueries.Add(query);
                 if (!(query is PhraseQuery)) continue;
                 using (IEnumerator<Query> j = flatQueries.GetEnumerator())
                 {
@@ -385,10 +382,9 @@ namespace Lucene.Net.Search.VectorHighlight
         private ISet<string> GetTermSet(Query query)
         {
             string key = GetKey(query);
-            ISet<string> set;
-            if (!termSetMap.TryGetValue(key, out set) || set == null)
+            if (!termSetMap.TryGetValue(key, out ISet<string> set) || set == null)
             {
-                set = new HashSet<string>();
+                set = new JCG.HashSet<string>();
                 termSetMap[key] = set;
             }
             return set;
@@ -396,8 +392,7 @@ namespace Lucene.Net.Search.VectorHighlight
 
         internal ISet<string> GetTermSet(string field)
         {
-            ISet<string> result;
-            termSetMap.TryGetValue(fieldMatch ? field : null, out result);
+            termSetMap.TryGetValue(fieldMatch ? field : null, out ISet<string> result);
             return result;
         }
 
@@ -406,8 +401,7 @@ namespace Lucene.Net.Search.VectorHighlight
         {
             QueryPhraseMap rootMap = GetRootMap(fieldName);
             if (rootMap == null) return null;
-            QueryPhraseMap result;
-            rootMap.subMap.TryGetValue(term, out result);
+            rootMap.subMap.TryGetValue(term, out QueryPhraseMap result);
             return result;
         }
 
@@ -421,8 +415,7 @@ namespace Lucene.Net.Search.VectorHighlight
 
         private QueryPhraseMap GetRootMap(string fieldName)
         {
-            QueryPhraseMap result;
-            rootMaps.TryGetValue(fieldMatch ? fieldName : null, out result);
+            rootMaps.TryGetValue(fieldMatch ? fieldName : null, out QueryPhraseMap result);
             return result;
         }
 
@@ -457,8 +450,7 @@ namespace Lucene.Net.Search.VectorHighlight
 
             private QueryPhraseMap GetOrNewMap(IDictionary<string, QueryPhraseMap> subMap, string term)
             {
-                QueryPhraseMap map;
-                if (!subMap.TryGetValue(term, out map) || map == null)
+                if (!subMap.TryGetValue(term, out QueryPhraseMap map) || map == null)
                 {
                     map = new QueryPhraseMap(fieldQuery);
                     subMap[term] = map;
@@ -509,25 +501,13 @@ namespace Lucene.Net.Search.VectorHighlight
                 this.termOrPhraseNumber = fieldQuery.NextTermOrPhraseNumber();
             }
 
-            public virtual bool IsTerminal
-            {
-                get { return terminal; }
-            }
+            public virtual bool IsTerminal => terminal;
 
-            public virtual int Slop
-            {
-                get { return slop; }
-            }
+            public virtual int Slop => slop;
 
-            public virtual float Boost
-            {
-                get { return boost; }
-            }
+            public virtual float Boost => boost;
 
-            public virtual int TermOrPhraseNumber
-            {
-                get { return termOrPhraseNumber; }
-            }
+            public virtual int TermOrPhraseNumber => termOrPhraseNumber;
 
             public virtual QueryPhraseMap SearchPhrase(IList<TermInfo> phraseCandidate)
             {
diff --git a/src/Lucene.Net.Highlighter/VectorHighlight/SimpleBoundaryScanner.cs b/src/Lucene.Net.Highlighter/VectorHighlight/SimpleBoundaryScanner.cs
index e16faa1..527284b 100644
--- a/src/Lucene.Net.Highlighter/VectorHighlight/SimpleBoundaryScanner.cs
+++ b/src/Lucene.Net.Highlighter/VectorHighlight/SimpleBoundaryScanner.cs
@@ -1,6 +1,6 @@
-using Lucene.Net.Support;
-using System.Collections.Generic;
+using System.Collections.Generic;
 using System.Text;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Search.VectorHighlight
 {
@@ -51,8 +51,8 @@ namespace Lucene.Net.Search.VectorHighlight
         public SimpleBoundaryScanner(int maxScan, char[] boundaryChars)
         {
             this.m_maxScan = maxScan;
-            this.m_boundaryChars = new HashSet<char>();
-            this.m_boundaryChars.UnionWith(Arrays.AsList(boundaryChars));
+            this.m_boundaryChars = new JCG.HashSet<char>();
+            this.m_boundaryChars.UnionWith(boundaryChars);
         }
 
         public SimpleBoundaryScanner(int maxScan, ISet<char> boundaryChars)
diff --git a/src/Lucene.Net.Highlighter/VectorHighlight/WeightedFieldFragList.cs b/src/Lucene.Net.Highlighter/VectorHighlight/WeightedFieldFragList.cs
index 4ab8e79..3c262f8 100644
--- a/src/Lucene.Net.Highlighter/VectorHighlight/WeightedFieldFragList.cs
+++ b/src/Lucene.Net.Highlighter/VectorHighlight/WeightedFieldFragList.cs
@@ -1,5 +1,6 @@
 using System;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 using SubInfo = Lucene.Net.Search.VectorHighlight.FieldFragList.WeightedFragInfo.SubInfo;
 using TermInfo = Lucene.Net.Search.VectorHighlight.FieldTermStack.TermInfo;
 using WeightedPhraseInfo = Lucene.Net.Search.VectorHighlight.FieldPhraseList.WeightedPhraseInfo;
@@ -7,21 +8,21 @@ using WeightedPhraseInfo = Lucene.Net.Search.VectorHighlight.FieldPhraseList.Wei
 namespace Lucene.Net.Search.VectorHighlight
 {
     /*
-	 * Licensed to the Apache Software Foundation (ASF) under one or more
-	 * contributor license agreements.  See the NOTICE file distributed with
-	 * this work for additional information regarding copyright ownership.
-	 * The ASF licenses this file to You under the Apache License, Version 2.0
-	 * (the "License"); you may not use this file except in compliance with
-	 * the License.  You may obtain a copy of the License at
-	 *
-	 *     http://www.apache.org/licenses/LICENSE-2.0
-	 *
-	 * Unless required by applicable law or agreed to in writing, software
-	 * distributed under the License is distributed on an "AS IS" BASIS,
-	 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	 * See the License for the specific language governing permissions and
-	 * limitations under the License.
-	 */
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
 
     /// <summary>
     /// A weighted implementation of <see cref="FieldFragList"/>.
@@ -42,9 +43,9 @@ namespace Lucene.Net.Search.VectorHighlight
         /// </summary>
         public override void Add(int startOffset, int endOffset, IList<WeightedPhraseInfo> phraseInfoList)
         {
-            List<SubInfo> tempSubInfos = new List<SubInfo>();
-            List<SubInfo> realSubInfos = new List<SubInfo>();
-            HashSet<string> distinctTerms = new HashSet<string>();
+            IList<SubInfo> tempSubInfos = new List<SubInfo>();
+            IList<SubInfo> realSubInfos = new List<SubInfo>();
+            ISet<string> distinctTerms = new JCG.HashSet<string>();
             int length = 0;
 
             foreach (WeightedPhraseInfo phraseInfo in phraseInfoList)
@@ -57,7 +58,7 @@ namespace Lucene.Net.Search.VectorHighlight
                     length++;
                 }
                 tempSubInfos.Add(new SubInfo(phraseInfo.GetText(), phraseInfo.TermsOffsets,
-                  phraseInfo.Seqnum, phraseTotalBoost));
+                    phraseInfo.Seqnum, phraseTotalBoost));
             }
 
             // We want that terms per fragment (length) is included into the weight. Otherwise a one-word-query
diff --git a/src/Lucene.Net.Join/TermsWithScoreCollector.cs b/src/Lucene.Net.Join/TermsWithScoreCollector.cs
index d2581f9..6fe8b5a 100644
--- a/src/Lucene.Net.Join/TermsWithScoreCollector.cs
+++ b/src/Lucene.Net.Join/TermsWithScoreCollector.cs
@@ -8,21 +8,21 @@ using System.Diagnostics.CodeAnalysis;
 namespace Lucene.Net.Join
 {
     /*
-	 * Licensed to the Apache Software Foundation (ASF) under one or more
-	 * contributor license agreements.  See the NOTICE file distributed with
-	 * this work for additional information regarding copyright ownership.
-	 * The ASF licenses this file to You under the Apache License, Version 2.0
-	 * (the "License"); you may not use this file except in compliance with
-	 * the License.  You may obtain a copy of the License at
-	 *
-	 *     http://www.apache.org/licenses/LICENSE-2.0
-	 *
-	 * Unless required by applicable law or agreed to in writing, software
-	 * distributed under the License is distributed on an "AS IS" BASIS,
-	 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	 * See the License for the specific language governing permissions and
-	 * limitations under the License.
-	 */
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
 
     internal abstract class TermsWithScoreCollector : ICollector
     {
diff --git a/src/Lucene.Net.Join/ToChildBlockJoinQuery.cs b/src/Lucene.Net.Join/ToChildBlockJoinQuery.cs
index 08cadf7..26452bf 100644
--- a/src/Lucene.Net.Join/ToChildBlockJoinQuery.cs
+++ b/src/Lucene.Net.Join/ToChildBlockJoinQuery.cs
@@ -9,21 +9,21 @@ using System.Diagnostics;
 namespace Lucene.Net.Join
 {
     /*
-	 * Licensed to the Apache Software Foundation (ASF) under one or more
-	 * contributor license agreements.  See the NOTICE file distributed with
-	 * this work for additional information regarding copyright ownership.
-	 * The ASF licenses this file to You under the Apache License, Version 2.0
-	 * (the "License"); you may not use this file except in compliance with
-	 * the License.  You may obtain a copy of the License at
-	 *
-	 *     http://www.apache.org/licenses/LICENSE-2.0
-	 *
-	 * Unless required by applicable law or agreed to in writing, software
-	 * distributed under the License is distributed on an "AS IS" BASIS,
-	 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	 * See the License for the specific language governing permissions and
-	 * limitations under the License.
-	 */
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
 
     /// <summary>
     /// Just like <see cref="ToParentBlockJoinQuery"/>, except this
@@ -180,7 +180,7 @@ namespace Lucene.Net.Join
 
             public override ICollection<ChildScorer> GetChildren()
             {
-                return Collections.Singleton(new ChildScorer(_parentScorer, "BLOCK_JOIN"));
+                return new List<ChildScorer> { new ChildScorer(_parentScorer, "BLOCK_JOIN") };
             }
             
             public override int NextDoc()
diff --git a/src/Lucene.Net.Join/ToParentBlockJoinCollector.cs b/src/Lucene.Net.Join/ToParentBlockJoinCollector.cs
index 184a368..1ae7c76 100644
--- a/src/Lucene.Net.Join/ToParentBlockJoinCollector.cs
+++ b/src/Lucene.Net.Join/ToParentBlockJoinCollector.cs
@@ -12,21 +12,21 @@ using System.IO;
 namespace Lucene.Net.Join
 {
     /*
-	 * Licensed to the Apache Software Foundation (ASF) under one or more
-	 * contributor license agreements.  See the NOTICE file distributed with
-	 * this work for additional information regarding copyright ownership.
-	 * The ASF licenses this file to You under the Apache License, Version 2.0
-	 * (the "License"); you may not use this file except in compliance with
-	 * the License.  You may obtain a copy of the License at
-	 *
-	 *     http://www.apache.org/licenses/LICENSE-2.0
-	 *
-	 * Unless required by applicable law or agreed to in writing, software
-	 * distributed under the License is distributed on an "AS IS" BASIS,
-	 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	 * See the License for the specific language governing permissions and
-	 * limitations under the License.
-	 */
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
 
     /// <summary>
     /// Collects parent document hits for a <see cref="Query"/> containing one more more
diff --git a/src/Lucene.Net.Join/ToParentBlockJoinQuery.cs b/src/Lucene.Net.Join/ToParentBlockJoinQuery.cs
index 518c9d9..f5596e1 100644
--- a/src/Lucene.Net.Join/ToParentBlockJoinQuery.cs
+++ b/src/Lucene.Net.Join/ToParentBlockJoinQuery.cs
@@ -9,21 +9,21 @@ using System.Diagnostics;
 namespace Lucene.Net.Join
 {
     /*
-	 * Licensed to the Apache Software Foundation (ASF) under one or more
-	 * contributor license agreements.  See the NOTICE file distributed with
-	 * this work for additional information regarding copyright ownership.
-	 * The ASF licenses this file to You under the Apache License, Version 2.0
-	 * (the "License"); you may not use this file except in compliance with
-	 * the License.  You may obtain a copy of the License at
-	 *
-	 *     http://www.apache.org/licenses/LICENSE-2.0
-	 *
-	 * Unless required by applicable law or agreed to in writing, software
-	 * distributed under the License is distributed on an "AS IS" BASIS,
-	 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	 * See the License for the specific language governing permissions and
-	 * limitations under the License.
-	 */
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
 
     /// <summary>
     /// This query requires that you index
@@ -226,7 +226,7 @@ namespace Lucene.Net.Join
 
             public override ICollection<ChildScorer> GetChildren()
             {
-                return Collections.Singleton(new ChildScorer(_childScorer, "BLOCK_JOIN"));
+                return new List<ChildScorer> { new ChildScorer(_childScorer, "BLOCK_JOIN") };
             }
 
             internal virtual int ChildCount
diff --git a/src/Lucene.Net.Misc/Document/LazyDocument.cs b/src/Lucene.Net.Misc/Document/LazyDocument.cs
index 5b94e37..d50a37a 100644
--- a/src/Lucene.Net.Misc/Document/LazyDocument.cs
+++ b/src/Lucene.Net.Misc/Document/LazyDocument.cs
@@ -5,25 +5,26 @@ using System;
 using System.Collections.Generic;
 using System.Diagnostics;
 using System.IO;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Documents
 {
     /*
-	 * Licensed to the Apache Software Foundation (ASF) under one or more
-	 * contributor license agreements.  See the NOTICE file distributed with
-	 * this work for additional information regarding copyright ownership.
-	 * The ASF licenses this file to You under the Apache License, Version 2.0
-	 * (the "License"); you may not use this file except in compliance with
-	 * the License.  You may obtain a copy of the License at
-	 *
-	 *     http://www.apache.org/licenses/LICENSE-2.0
-	 *
-	 * Unless required by applicable law or agreed to in writing, software
-	 * distributed under the License is distributed on an "AS IS" BASIS,
-	 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	 * See the License for the specific language governing permissions and
-	 * limitations under the License.
-	 */
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
 
     /// <summary>
     /// Defers actually loading a field's value until you ask
@@ -39,7 +40,7 @@ namespace Lucene.Net.Documents
         private Document doc;
 
         private IDictionary<int?, IList<LazyField>> fields = new Dictionary<int?, IList<LazyField>>();
-        private HashSet<string> fieldNames = new HashSet<string>();
+        private ISet<string> fieldNames = new JCG.HashSet<string>();
 
         public LazyDocument(IndexReader reader, int docID)
         {
diff --git a/src/Lucene.Net.Misc/Index/Sorter/SortingAtomicReader.cs b/src/Lucene.Net.Misc/Index/Sorter/SortingAtomicReader.cs
index f14f71d..1fddf02 100644
--- a/src/Lucene.Net.Misc/Index/Sorter/SortingAtomicReader.cs
+++ b/src/Lucene.Net.Misc/Index/Sorter/SortingAtomicReader.cs
@@ -9,21 +9,21 @@ using System.Diagnostics;
 namespace Lucene.Net.Index.Sorter
 {
     /*
-	 * Licensed to the Apache Software Foundation (ASF) under one or more
-	 * contributor license agreements.  See the NOTICE file distributed with
-	 * this work for additional information regarding copyright ownership.
-	 * The ASF licenses this file to You under the Apache License, Version 2.0
-	 * (the "License"); you may not use this file except in compliance with
-	 * the License.  You may obtain a copy of the License at
-	 *
-	 *     http://www.apache.org/licenses/LICENSE-2.0
-	 *
-	 * Unless required by applicable law or agreed to in writing, software
-	 * distributed under the License is distributed on an "AS IS" BASIS,
-	 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	 * See the License for the specific language governing permissions and
-	 * limitations under the License.
-	 */
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
 
     /// <summary>
     /// An <see cref="AtomicReader"/> which supports sorting documents by a given
diff --git a/src/Lucene.Net.Misc/Misc/HighFreqTerms.cs b/src/Lucene.Net.Misc/Misc/HighFreqTerms.cs
index 85f40f2..d1387fc 100644
--- a/src/Lucene.Net.Misc/Misc/HighFreqTerms.cs
+++ b/src/Lucene.Net.Misc/Misc/HighFreqTerms.cs
@@ -1,6 +1,6 @@
-using Lucene.Net.Index;
+using J2N.Text;
+using Lucene.Net.Index;
 using Lucene.Net.Store;
-using Lucene.Net.Support;
 using Lucene.Net.Util;
 using System;
 using System.Collections.Generic;
diff --git a/src/Lucene.Net.Misc/Util/Fst/ListOfOutputs.cs b/src/Lucene.Net.Misc/Util/Fst/ListOfOutputs.cs
index cffd774..61199ee 100644
--- a/src/Lucene.Net.Misc/Util/Fst/ListOfOutputs.cs
+++ b/src/Lucene.Net.Misc/Util/Fst/ListOfOutputs.cs
@@ -4,6 +4,7 @@ using System.Collections.Generic;
 using System.Diagnostics;
 using System.Runtime.CompilerServices;
 using System.Text;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Util.Fst
 {
@@ -94,7 +95,7 @@ namespace Lucene.Net.Util.Fst
             else
             {
                 IList outputList = (IList)output;
-                IList<T> addedList = new List<T>(outputList.Count);
+                IList<T> addedList = new JCG.List<T>(outputList.Count);
                 foreach (object _output in outputList)
                 {
                     addedList.Add(outputs.Add((T)prefix, (T)_output));
@@ -141,7 +142,7 @@ namespace Lucene.Net.Util.Fst
             }
             else
             {
-                IList<T> outputList = new List<T>(count);
+                IList<T> outputList = new JCG.List<T>(count);
                 for (int i = 0; i < count; i++)
                 {
                     outputList.Add(outputs.Read(@in));
@@ -187,27 +188,27 @@ namespace Lucene.Net.Util.Fst
         [MethodImpl(MethodImplOptions.NoInlining)]
         public override object Merge(object first, object second)
         {
-            List<T> outputList = new List<T>();
-            if (!(first is IList))
+            IList<T> outputList = new JCG.List<T>();
+            if (!(first is IList<T> firstList))
             {
                 outputList.Add((T)first);
             }
             else
             {
-                foreach (object value in first as IList)
+                foreach (T value in firstList)
                 {
-                    outputList.Add((T)value);
+                    outputList.Add(value);
                 }
             }
-            if (!(second is IList))
+            if (!(second is IList<T> secondList))
             {
                 outputList.Add((T)second);
             }
             else
             {
-                foreach (object value in second as IList)
+                foreach (T value in secondList)
                 {
-                    outputList.Add((T)value);
+                    outputList.Add(value);
                 }
             }
             //System.out.println("MERGE: now " + outputList.size() + " first=" + outputToString(first) + " second=" + outputToString(second));
@@ -222,15 +223,13 @@ namespace Lucene.Net.Util.Fst
 
         public IList<T> AsList(object output)
         {
-            if (!(output is IList))
+            if (!(output is IList<T> outputList))
             {
-                IList<T> result = new List<T>(1);
-                result.Add((T)output);
-                return result;
+                return new JCG.List<T>(1) { (T)output };
             }
             else
             {
-                return (IList<T>)output;
+                return outputList;
             }
         }
     }
diff --git a/src/Lucene.Net.Queries/BooleanFilter.cs b/src/Lucene.Net.Queries/BooleanFilter.cs
index da24d16..4b48a36 100644
--- a/src/Lucene.Net.Queries/BooleanFilter.cs
+++ b/src/Lucene.Net.Queries/BooleanFilter.cs
@@ -1,11 +1,11 @@
 using Lucene.Net.Index;
 using Lucene.Net.Search;
-using Lucene.Net.Support;
 using Lucene.Net.Util;
 using System.Collections;
 using System.Collections.Generic;
 using System.Diagnostics;
 using System.Text;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Queries
 {
@@ -37,7 +37,7 @@ namespace Lucene.Net.Queries
     /// </summary>
     public class BooleanFilter : Filter, IEnumerable<FilterClause>
     {
-        private readonly IList<FilterClause> clauses = new EquatableList<FilterClause>();
+        private readonly IList<FilterClause> clauses = new JCG.List<FilterClause>();
 
         /// <summary>
         /// Returns the a <see cref="DocIdSetIterator"/> representing the Boolean composition
diff --git a/src/Lucene.Net.Queries/CommonTermsQuery.cs b/src/Lucene.Net.Queries/CommonTermsQuery.cs
index 788e7e2..60e8ac7 100644
--- a/src/Lucene.Net.Queries/CommonTermsQuery.cs
+++ b/src/Lucene.Net.Queries/CommonTermsQuery.cs
@@ -1,6 +1,5 @@
 using Lucene.Net.Index;
 using Lucene.Net.Search;
-using Lucene.Net.Support;
 using Lucene.Net.Util;
 using System;
 using System.Collections;
@@ -9,6 +8,7 @@ using System.Diagnostics;
 using System.Globalization;
 using System.Linq;
 using System.Text;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Queries
 {
@@ -306,13 +306,7 @@ namespace Lucene.Net.Queries
         /// for the high and low frequency query instance. The top level query will
         /// always disable coords.
         /// </summary>
-        public virtual bool IsCoordDisabled
-        {
-            get
-            {
-                return m_disableCoord;
-            }
-        }
+        public virtual bool IsCoordDisabled => m_disableCoord;
 
         /// <summary>
         /// Gets or Sets a minimum number of the low frequent optional BooleanClauses which must be
@@ -408,9 +402,8 @@ namespace Lucene.Net.Queries
             result = prime * result + J2N.BitConversion.SingleToInt32Bits(m_maxTermFrequency);
             result = prime * result + J2N.BitConversion.SingleToInt32Bits(m_lowFreqMinNrShouldMatch);
             result = prime * result + J2N.BitConversion.SingleToInt32Bits(m_highFreqMinNrShouldMatch);
-            // LUCENENET specific: wrap the m_terms to ensure the collection values are
-            // compared for equalitly
-            result = prime * result + ((m_terms == null) ? 0 : Equatable.Wrap(m_terms).GetHashCode());
+            // LUCENENET specific: use structural equality comparison
+            result = prime * result + ((m_terms == null) ? 0 : JCG.ListEqualityComparer<Term>.Default.GetHashCode(m_terms));
             return result;
         }
 
@@ -468,9 +461,8 @@ namespace Lucene.Net.Queries
                     return false;
                 }
             }
-            // LUCENENET specific: wrap the m_terms to ensure the collection values are
-            // compared for equalitly
-            else if (!Equatable.Wrap(m_terms).Equals(other.m_terms))
+            // LUCENENET specific: use structural equality comparison
+            else if (!JCG.ListEqualityComparer<Term>.Default.Equals(m_terms, other.m_terms))
             {
                 return false;
             }
diff --git a/src/Lucene.Net.Queries/CustomScoreQuery.cs b/src/Lucene.Net.Queries/CustomScoreQuery.cs
index a7d5a46..b5a2c6c 100644
--- a/src/Lucene.Net.Queries/CustomScoreQuery.cs
+++ b/src/Lucene.Net.Queries/CustomScoreQuery.cs
@@ -384,7 +384,7 @@ namespace Lucene.Net.Queries
 
             public override ICollection<ChildScorer> GetChildren()
             {
-                return Collections.Singleton(new ChildScorer(subQueryScorer, "CUSTOM"));
+                return new List<ChildScorer> { new ChildScorer(subQueryScorer, "CUSTOM") };
             }
 
             public override int Advance(int target)
diff --git a/src/Lucene.Net.Queries/Function/BoostedQuery.cs b/src/Lucene.Net.Queries/Function/BoostedQuery.cs
index d8f6246..1e39cfd 100644
--- a/src/Lucene.Net.Queries/Function/BoostedQuery.cs
+++ b/src/Lucene.Net.Queries/Function/BoostedQuery.cs
@@ -195,7 +195,7 @@ namespace Lucene.Net.Queries.Function
 
             public override ICollection<ChildScorer> GetChildren()
             {
-                return Collections.Singleton(new ChildScorer(scorer, "CUSTOM"));
+                return new List<ChildScorer> { new ChildScorer(scorer, "CUSTOM") };
             }
 
             public Explanation Explain(int doc)
diff --git a/src/Lucene.Net.Queries/Function/ValueSource.cs b/src/Lucene.Net.Queries/Function/ValueSource.cs
index 60d10a8..14ac19f 100644
--- a/src/Lucene.Net.Queries/Function/ValueSource.cs
+++ b/src/Lucene.Net.Queries/Function/ValueSource.cs
@@ -1,6 +1,6 @@
-using Lucene.Net.Index;
+using J2N.Runtime.CompilerServices;
+using Lucene.Net.Index;
 using Lucene.Net.Search;
-using Lucene.Net.Support;
 using System;
 using System.Collections;
 
@@ -22,7 +22,7 @@ namespace Lucene.Net.Queries.Function
      * See the License for the specific language governing permissions and
      * limitations under the License.
      */
-    
+
     /// <summary>
     /// Instantiates <see cref="FunctionValues"/> for a particular reader.
     /// <para/>
@@ -65,9 +65,10 @@ namespace Lucene.Net.Queries.Function
         /// </summary>
         public static IDictionary NewContext(IndexSearcher searcher)
         {
-            var context = new Hashtable(IdentityComparer.Default);
-            context["searcher"] = searcher;
-            return context;
+            return new Hashtable(IdentityEqualityComparer<object>.Default)
+            {
+                ["searcher"] = searcher
+            };
         }
 
 
diff --git a/src/Lucene.Net.Queries/Function/ValueSources/EnumFieldSource.cs b/src/Lucene.Net.Queries/Function/ValueSources/EnumFieldSource.cs
index d8a4d65..140e458 100644
--- a/src/Lucene.Net.Queries/Function/ValueSources/EnumFieldSource.cs
+++ b/src/Lucene.Net.Queries/Function/ValueSources/EnumFieldSource.cs
@@ -1,13 +1,12 @@
 using Lucene.Net.Index;
 using Lucene.Net.Queries.Function.DocValues;
 using Lucene.Net.Search;
-using Lucene.Net.Support;
 using Lucene.Net.Util;
 using Lucene.Net.Util.Mutable;
-using System;
 using System.Collections;
 using System.Collections.Generic;
 using System.Globalization;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Queries.Function.ValueSources
 {
@@ -304,13 +303,13 @@ namespace Lucene.Net.Queries.Function.ValueSources
 
             EnumFieldSource that = (EnumFieldSource)o;
 
-            // LUCENENET specific: must use Collections.Equals() to ensure values
+            // LUCENENET specific: must use DictionaryEqualityComparer.Equals() to ensure values
             // contained within the dictionaries are compared for equality
-            if (!Collections.Equals(enumIntToStringMap, that.enumIntToStringMap))
+            if (!JCG.DictionaryEqualityComparer<int?, string>.Default.Equals(enumIntToStringMap, that.enumIntToStringMap))
             {
                 return false;
             }
-            if (!Collections.Equals(enumStringToIntMap, that.enumStringToIntMap))
+            if (!JCG.DictionaryEqualityComparer<string, int?>.Default.Equals(enumStringToIntMap, that.enumStringToIntMap))
             {
                 return false;
             }
@@ -326,10 +325,10 @@ namespace Lucene.Net.Queries.Function.ValueSources
         {
             int result = base.GetHashCode();
             result = 31 * result + parser.GetHashCode();
-            // LUCENENET specific: must use Collections.GetHashCode() to ensure values
+            // LUCENENET specific: must use DictionaryEqualityComparer.GetHashCode() to ensure values
             // contained within the dictionaries are compared for equality
-            result = 31 * result + Collections.GetHashCode(enumIntToStringMap);
-            result = 31 * result + Collections.GetHashCode(enumStringToIntMap);
+            result = 31 * result + JCG.DictionaryEqualityComparer<int?, string>.Default.GetHashCode(enumIntToStringMap);
+            result = 31 * result + JCG.DictionaryEqualityComparer<string, int?>.Default.GetHashCode(enumStringToIntMap);
             return result;
         }
     }
diff --git a/src/Lucene.Net.Queries/Function/ValueSources/MultiBoolFunction.cs b/src/Lucene.Net.Queries/Function/ValueSources/MultiBoolFunction.cs
index 48bf116..b02c6d2 100644
--- a/src/Lucene.Net.Queries/Function/ValueSources/MultiBoolFunction.cs
+++ b/src/Lucene.Net.Queries/Function/ValueSources/MultiBoolFunction.cs
@@ -1,10 +1,10 @@
 using Lucene.Net.Index;
 using Lucene.Net.Queries.Function.DocValues;
 using Lucene.Net.Search;
-using Lucene.Net.Support;
 using System.Collections;
 using System.Collections.Generic;
 using System.Text;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Queries.Function.ValueSources
 {
@@ -115,9 +115,8 @@ namespace Lucene.Net.Queries.Function.ValueSources
 
         public override int GetHashCode()
         {
-            // LUCENENET specific: ensure our passed in list is equatable by
-            // wrapping it in an EquatableList if it is not one already
-            return Equatable.Wrap(m_sources).GetHashCode() + Name.GetHashCode();
+            // LUCENENET specific: use structural equality comparison
+            return JCG.ListEqualityComparer<ValueSource>.Default.GetHashCode(m_sources) + Name.GetHashCode();
         }
 
         public override bool Equals(object o)
@@ -130,9 +129,8 @@ namespace Lucene.Net.Queries.Function.ValueSources
             if (other == null)
                 return false;
 
-            // LUCENENET specific: ensure our passed in list is equatable by
-            // wrapping it in an EquatableList if it is not one already
-            return Equatable.Wrap(this.m_sources).Equals(other.m_sources);
+            // LUCENENET specific: use structural equality comparison
+            return JCG.ListEqualityComparer<ValueSource>.Default.Equals(this.m_sources, other.m_sources);
         }
 
         public override void CreateWeight(IDictionary context, IndexSearcher searcher)
diff --git a/src/Lucene.Net.Queries/Function/ValueSources/MultiFunction.cs b/src/Lucene.Net.Queries/Function/ValueSources/MultiFunction.cs
index 55cd79c..e78e041 100644
--- a/src/Lucene.Net.Queries/Function/ValueSources/MultiFunction.cs
+++ b/src/Lucene.Net.Queries/Function/ValueSources/MultiFunction.cs
@@ -1,9 +1,9 @@
 using Lucene.Net.Index;
 using Lucene.Net.Search;
-using Lucene.Net.Support;
 using System.Collections;
 using System.Collections.Generic;
 using System.Text;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Queries.Function.ValueSources
 {
@@ -126,9 +126,8 @@ namespace Lucene.Net.Queries.Function.ValueSources
 
         public override int GetHashCode()
         {
-            // LUCENENET specific: ensure our passed in list is equatable by
-            // wrapping it in an EquatableList if it is not one already
-            return Equatable.Wrap(m_sources).GetHashCode() + Name.GetHashCode();
+            // LUCENENET specific: use structural equality comparison
+            return JCG.ListEqualityComparer<ValueSource>.Default.GetHashCode(m_sources) + Name.GetHashCode();
         }
 
         public override bool Equals(object o)
@@ -139,9 +138,8 @@ namespace Lucene.Net.Queries.Function.ValueSources
             }
             var other = (MultiFunction)o;
 
-            // LUCENENET specific: ensure our passed in list is equatable by
-            // wrapping it in an EquatableList if it is not one already
-            return Equatable.Wrap(this.m_sources).Equals(other.m_sources);
+            // LUCENENET specific: use structural equality comparison
+            return JCG.ListEqualityComparer<ValueSource>.Default.Equals(this.m_sources, other.m_sources);
         }
     }
 }
\ No newline at end of file
diff --git a/src/Lucene.Net.Queries/Function/ValueSources/VectorValueSource.cs b/src/Lucene.Net.Queries/Function/ValueSources/VectorValueSource.cs
index 7cb5c58..11835fb 100644
--- a/src/Lucene.Net.Queries/Function/ValueSources/VectorValueSource.cs
+++ b/src/Lucene.Net.Queries/Function/ValueSources/VectorValueSource.cs
@@ -1,9 +1,9 @@
 using Lucene.Net.Index;
 using Lucene.Net.Search;
-using Lucene.Net.Support;
 using System.Collections;
 using System.Collections.Generic;
 using System.Text;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Queries.Function.ValueSources
 {
@@ -292,16 +292,14 @@ namespace Lucene.Net.Queries.Function.ValueSources
 
             var that = (VectorValueSource)o;
 
-            // LUCENENET specific: ensure our passed in list is equatable by
-            // wrapping it in an EquatableList if it is not one already
-            return Equatable.Wrap(m_sources).Equals(that.m_sources);
+            // LUCENENET specific: use structural equality comparison
+            return JCG.ListEqualityComparer<ValueSource>.Default.Equals(m_sources, that.m_sources);
         }
 
         public override int GetHashCode()
         {
-            // LUCENENET specific: ensure our passed in list is equatable by
-            // wrapping it in an EquatableList if it is not one already
-            return Equatable.Wrap(m_sources).GetHashCode();
+            // LUCENENET specific: use structural equality comparison
+            return JCG.ListEqualityComparer<ValueSource>.Default.GetHashCode(m_sources);
         }
     }
 }
\ No newline at end of file
diff --git a/src/Lucene.Net.Queries/Mlt/MoreLikeThisQuery.cs b/src/Lucene.Net.Queries/Mlt/MoreLikeThisQuery.cs
index 58fae99..672a8be 100644
--- a/src/Lucene.Net.Queries/Mlt/MoreLikeThisQuery.cs
+++ b/src/Lucene.Net.Queries/Mlt/MoreLikeThisQuery.cs
@@ -9,6 +9,7 @@ using System;
 using System.Collections.Generic;
 using System.Diagnostics.CodeAnalysis;
 using System.IO;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Queries.Mlt
 {
@@ -147,8 +148,8 @@ namespace Lucene.Net.Queries.Mlt
             result = prime * result + minTermFrequency;
             result = prime * result + Arrays.GetHashCode(moreLikeFields);
             result = prime * result + J2N.BitConversion.SingleToInt32Bits(percentTermsToMatch);
-            // LUCENENET: wrap in Equatable to compare set contents
-            result = prime * result + ((stopWords == null) ? 0 : Equatable.Wrap(stopWords).GetHashCode());
+            // LUCENENET specific: use structural equality comparison
+            result = prime * result + ((stopWords == null) ? 0 : JCG.SetEqualityComparer<string>.Default.GetHashCode(stopWords));
             return result;
         }
 
@@ -227,8 +228,8 @@ namespace Lucene.Net.Queries.Mlt
                     return false;
                 }
             }
-            // LUCENENET: wrap in Equatable to compare set contents
-            else if (!Equatable.Wrap(stopWords).Equals(other.stopWords))
+            // LUCENENET specific: use structural equality comparison
+            else if (!JCG.SetEqualityComparer<string>.Default.Equals(stopWords, other.stopWords))
             {
                 return false;
             }
diff --git a/src/Lucene.Net.Queries/TermsFilter.cs b/src/Lucene.Net.Queries/TermsFilter.cs
index 6d728ba..87301de 100644
--- a/src/Lucene.Net.Queries/TermsFilter.cs
+++ b/src/Lucene.Net.Queries/TermsFilter.cs
@@ -66,7 +66,7 @@ namespace Lucene.Net.Queries
 
             public FieldAndTermEnumAnonymousInnerClassHelper(IList<Term> terms)
             {
-                if (!terms.Any())
+                if (terms.Count == 0)
                 {
                     throw new ArgumentException("no terms provided");
                 }
@@ -133,7 +133,7 @@ namespace Lucene.Net.Queries
         /// a single field.
         /// </summary>
         public TermsFilter(string field, params BytesRef[] terms)
-            : this(field, Arrays.AsList(terms))
+            : this(field, (IList<BytesRef>)terms)
         {
             // this ctor prevents unnecessary Term creations
         }
@@ -143,7 +143,7 @@ namespace Lucene.Net.Queries
         /// contain duplicate terms and multiple fields.
         /// </summary>
         public TermsFilter(params Term[] terms)
-            : this(terms.ToList())
+            : this((IList<Term>)terms)
         {
         }
 
diff --git a/src/Lucene.Net.QueryParser/Classic/QueryParserBase.cs b/src/Lucene.Net.QueryParser/Classic/QueryParserBase.cs
index cafc31f..72e634d 100644
--- a/src/Lucene.Net.QueryParser/Classic/QueryParserBase.cs
+++ b/src/Lucene.Net.QueryParser/Classic/QueryParserBase.cs
@@ -1,11 +1,11 @@
-using J2N.Numerics;
+using J2N;
+using J2N.Numerics;
 using Lucene.Net.Analysis;
 using Lucene.Net.Analysis.TokenAttributes;
 using Lucene.Net.Documents;
 using Lucene.Net.Index;
 using Lucene.Net.QueryParsers.Flexible.Standard;
 using Lucene.Net.Search;
-using Lucene.Net.Support;
 using Lucene.Net.Util;
 using System;
 using System.Collections.Generic;
diff --git a/src/Lucene.Net.QueryParser/Flexible/Core/Nodes/FieldQueryNode.cs b/src/Lucene.Net.QueryParser/Flexible/Core/Nodes/FieldQueryNode.cs
index c819a05..cda8b11 100644
--- a/src/Lucene.Net.QueryParser/Flexible/Core/Nodes/FieldQueryNode.cs
+++ b/src/Lucene.Net.QueryParser/Flexible/Core/Nodes/FieldQueryNode.cs
@@ -1,5 +1,5 @@
-using Lucene.Net.QueryParsers.Flexible.Core.Parser;
-using Lucene.Net.Support;
+using J2N.Text;
+using Lucene.Net.QueryParsers.Flexible.Core.Parser;
 using System.Globalization;
 using System.Text;
 
@@ -62,7 +62,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Core.Nodes
          // LUCENENET specific overload for passing text as string
         public FieldQueryNode(string field, string text, int begin,
             int end)
-            : this(field, text.ToCharSequence(), begin, end)
+            : this(field, text.AsCharSequence(), begin, end)
         {
         }
 
@@ -76,7 +76,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Core.Nodes
          // LUCENENET specific overload for passing text as StringBuilder
         public FieldQueryNode(string field, StringBuilder text, int begin,
             int end)
-            : this(field, text.ToCharSequence(), begin, end)
+            : this(field, text.AsCharSequence(), begin, end)
         {
         }
 
@@ -202,7 +202,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Core.Nodes
         public virtual string Value
         {
             get { return Text.ToString(); }
-            set { Text = value.ToCharSequence(); }
+            set { Text = value.AsCharSequence(); }
         }
     }
 }
diff --git a/src/Lucene.Net.QueryParser/Flexible/Core/Nodes/FuzzyQueryNode.cs b/src/Lucene.Net.QueryParser/Flexible/Core/Nodes/FuzzyQueryNode.cs
index 2618ee3..2446c3c 100644
--- a/src/Lucene.Net.QueryParser/Flexible/Core/Nodes/FuzzyQueryNode.cs
+++ b/src/Lucene.Net.QueryParser/Flexible/Core/Nodes/FuzzyQueryNode.cs
@@ -1,5 +1,5 @@
-using Lucene.Net.QueryParsers.Flexible.Core.Parser;
-using Lucene.Net.Support;
+using J2N.Text;
+using Lucene.Net.QueryParsers.Flexible.Core.Parser;
 using System.Text;
 
 namespace Lucene.Net.QueryParsers.Flexible.Core.Nodes
@@ -42,7 +42,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Core.Nodes
          // LUCENENET specific overload for string term
         public FuzzyQueryNode(string field, string term,
             float minSimilarity, int begin, int end)
-            : this(field, term.ToCharSequence(), minSimilarity, begin, end)
+            : this(field, term.AsCharSequence(), minSimilarity, begin, end)
         {
         }
 
@@ -57,7 +57,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Core.Nodes
          // LUCENENET specific overload for StringBuilder term
         public FuzzyQueryNode(string field, StringBuilder term,
             float minSimilarity, int begin, int end)
-            : this(field, term.ToCharSequence(), minSimilarity, begin, end)
+            : this(field, term.AsCharSequence(), minSimilarity, begin, end)
         {
         }
 
diff --git a/src/Lucene.Net.QueryParser/Flexible/Core/Nodes/PathQueryNode.cs b/src/Lucene.Net.QueryParser/Flexible/Core/Nodes/PathQueryNode.cs
index da7019b..bc4fbd8 100644
--- a/src/Lucene.Net.QueryParser/Flexible/Core/Nodes/PathQueryNode.cs
+++ b/src/Lucene.Net.QueryParser/Flexible/Core/Nodes/PathQueryNode.cs
@@ -1,5 +1,5 @@
-using Lucene.Net.QueryParsers.Flexible.Core.Parser;
-using Lucene.Net.Support;
+using J2N.Text;
+using Lucene.Net.QueryParsers.Flexible.Core.Parser;
 using System;
 using System.Collections.Generic;
 using System.Globalization;
@@ -195,7 +195,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Core.Nodes
 
             foreach (QueryText pathelement in GetPathElements(1))
             {
-                string value = escaper.Escape(new StringCharSequenceWrapper(pathelement.Value), 
+                string value = escaper.Escape(new StringCharSequence(pathelement.Value), 
                     CultureInfo.InvariantCulture, EscapeQuerySyntaxType.STRING).ToString();
                 path.Append("/\"").Append(value).Append("\"");
             }
diff --git a/src/Lucene.Net.QueryParser/Flexible/Core/Nodes/QuotedFieldQueryNode.cs b/src/Lucene.Net.QueryParser/Flexible/Core/Nodes/QuotedFieldQueryNode.cs
index 0e37fde..7d6c6cd 100644
--- a/src/Lucene.Net.QueryParser/Flexible/Core/Nodes/QuotedFieldQueryNode.cs
+++ b/src/Lucene.Net.QueryParser/Flexible/Core/Nodes/QuotedFieldQueryNode.cs
@@ -1,5 +1,5 @@
-using Lucene.Net.QueryParsers.Flexible.Core.Parser;
-using Lucene.Net.Support;
+using J2N.Text;
+using Lucene.Net.QueryParsers.Flexible.Core.Parser;
 using System.Text;
 
 namespace Lucene.Net.QueryParsers.Flexible.Core.Nodes
@@ -37,7 +37,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Core.Nodes
         // LUCENENET specific overload for string text
         public QuotedFieldQueryNode(string field, string text, int begin,
             int end)
-            : this(field, text.ToCharSequence(), begin, end)
+            : this(field, text.AsCharSequence(), begin, end)
         {
         }
 
@@ -51,7 +51,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Core.Nodes
         // LUCENENET specific overload for StringBuilder text
         public QuotedFieldQueryNode(string field, StringBuilder text, int begin,
             int end)
-            : this(field, text.ToCharSequence(), begin, end)
+            : this(field, text.AsCharSequence(), begin, end)
         {
         }
 
diff --git a/src/Lucene.Net.QueryParser/Flexible/Core/Nodes/TextableQueryNode.cs b/src/Lucene.Net.QueryParser/Flexible/Core/Nodes/TextableQueryNode.cs
index 152a4a7..7d372ae 100644
--- a/src/Lucene.Net.QueryParser/Flexible/Core/Nodes/TextableQueryNode.cs
+++ b/src/Lucene.Net.QueryParser/Flexible/Core/Nodes/TextableQueryNode.cs
@@ -1,4 +1,4 @@
-using Lucene.Net.Support;
+using J2N.Text;
 
 namespace Lucene.Net.QueryParsers.Flexible.Core.Nodes
 {
diff --git a/src/Lucene.Net.QueryParser/Flexible/Core/Parser/EscapeQuerySyntax.cs b/src/Lucene.Net.QueryParser/Flexible/Core/Parser/EscapeQuerySyntax.cs
index 7ea7386..0f0ba43 100644
--- a/src/Lucene.Net.QueryParser/Flexible/Core/Parser/EscapeQuerySyntax.cs
+++ b/src/Lucene.Net.QueryParser/Flexible/Core/Parser/EscapeQuerySyntax.cs
@@ -1,4 +1,4 @@
-using Lucene.Net.Support;
+using J2N.Text;
 using System.Globalization;
 
 namespace Lucene.Net.QueryParsers.Flexible.Core.Parser
diff --git a/src/Lucene.Net.QueryParser/Flexible/Core/Util/UnescapedCharSequence.cs b/src/Lucene.Net.QueryParser/Flexible/Core/Util/UnescapedCharSequence.cs
index 9066f8f..b4a0926 100644
--- a/src/Lucene.Net.QueryParser/Flexible/Core/Util/UnescapedCharSequence.cs
+++ b/src/Lucene.Net.QueryParser/Flexible/Core/Util/UnescapedCharSequence.cs
@@ -1,4 +1,4 @@
-using Lucene.Net.Support;
+using J2N.Text;
 using System.Globalization;
 using System.Text;
 
@@ -106,26 +106,22 @@ namespace Lucene.Net.QueryParsers.Flexible.Core.Util
         //    return this.chars[index];
         //}
 
+        bool ICharSequence.HasValue => this.chars != null;
 
-        public int Length
-        {
-            get { return this.chars.Length; }
-        }
+        public int Length => this.chars.Length;
 
         public char this[int index]
         {
-            get
-            {
-                return this.chars[index];
-            }
+            get => this.chars[index];
         }
 
-        public ICharSequence SubSequence(int start, int end)
+        public ICharSequence Subsequence(int startIndex, int length)
         {
-            int newLength = end - start;
+            // LUCENENET: Changed to have .NET semantics - startIndex/length rather than start/end
+            //int newLength = length - startIndex;
 
-            return new UnescapedCharSequence(this.chars, this.wasEscaped, start,
-                newLength);
+            return new UnescapedCharSequence(this.chars, this.wasEscaped, startIndex,
+                length);
         }
 
         public override string ToString()
@@ -184,7 +180,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Core.Util
 
                 result.Append(this.chars[i]);
             }
-            return new StringCharSequenceWrapper(result.ToString());
+            return new StringCharSequence(result.ToString());
         }
 
         public bool WasEscaped(int index)
diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Builders/FuzzyQueryNodeBuilder.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Builders/FuzzyQueryNodeBuilder.cs
index 6ae4c99..ac9d0c2 100644
--- a/src/Lucene.Net.QueryParser/Flexible/Standard/Builders/FuzzyQueryNodeBuilder.cs
+++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Builders/FuzzyQueryNodeBuilder.cs
@@ -1,7 +1,7 @@
-using Lucene.Net.Index;
+using J2N;
+using Lucene.Net.Index;
 using Lucene.Net.QueryParsers.Flexible.Core.Nodes;
 using Lucene.Net.Search;
-using Lucene.Net.Support;
 
 namespace Lucene.Net.QueryParsers.Flexible.Standard.Builders
 {
diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Builders/MultiPhraseQueryNodeBuilder.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Builders/MultiPhraseQueryNodeBuilder.cs
index 38eb07b..6da62e5 100644
--- a/src/Lucene.Net.QueryParser/Flexible/Standard/Builders/MultiPhraseQueryNodeBuilder.cs
+++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Builders/MultiPhraseQueryNodeBuilder.cs
@@ -4,6 +4,7 @@ using Lucene.Net.QueryParsers.Flexible.Core.Nodes;
 using Lucene.Net.QueryParsers.Flexible.Standard.Nodes;
 using Lucene.Net.Search;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.QueryParsers.Flexible.Standard.Builders
 {
@@ -45,7 +46,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Builders
 
             if (children != null)
             {
-                IDictionary<int?, List<Term>> positionTermMap = new SortedDictionary<int?, List<Term>>();
+                IDictionary<int?, List<Term>> positionTermMap = new JCG.SortedDictionary<int?, List<Term>>();
 
                 foreach (IQueryNode child in children)
                 {
diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Builders/PrefixWildcardQueryNodeBuilder.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Builders/PrefixWildcardQueryNodeBuilder.cs
index 3c2caa1..cf28f48 100644
--- a/src/Lucene.Net.QueryParser/Flexible/Standard/Builders/PrefixWildcardQueryNodeBuilder.cs
+++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Builders/PrefixWildcardQueryNodeBuilder.cs
@@ -38,7 +38,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Builders
         {
             PrefixWildcardQueryNode wildcardNode = (PrefixWildcardQueryNode)queryNode;
 
-            string text = wildcardNode.Text.SubSequence(0, wildcardNode.Text.Length - 1).ToString();
+            string text = wildcardNode.Text.Subsequence(0, wildcardNode.Text.Length - 1).ToString(); // LUCENENET: Checked 2nd Subsequence parameter
             PrefixQuery q = new PrefixQuery(new Term(wildcardNode.GetFieldAsString(), text));
 
             MultiTermQuery.RewriteMethod method = (MultiTermQuery.RewriteMethod)queryNode.GetTag(MultiTermRewriteMethodProcessor.TAG_ID);
diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Config/NumberDateFormat.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Config/NumberDateFormat.cs
index c53a8b8..149102e 100644
--- a/src/Lucene.Net.QueryParser/Flexible/Standard/Config/NumberDateFormat.cs
+++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Config/NumberDateFormat.cs
@@ -1,6 +1,5 @@
 using Lucene.Net.Support;
 using System;
-using System.Collections.Generic;
 using System.Globalization;
 
 namespace Lucene.Net.QueryParsers.Flexible.Standard.Config
diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Config/StandardQueryConfigHandler.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Config/StandardQueryConfigHandler.cs
index 96ceb70..5d9994f 100644
--- a/src/Lucene.Net.QueryParser/Flexible/Standard/Config/StandardQueryConfigHandler.cs
+++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Config/StandardQueryConfigHandler.cs
@@ -2,10 +2,10 @@
 using Lucene.Net.Documents;
 using Lucene.Net.QueryParsers.Flexible.Core.Config;
 using Lucene.Net.Search;
-using Lucene.Net.Support;
 using System;
 using System.Collections.Generic;
 using System.Globalization;
+using JCG = J2N.Collections.Generic;
 using Operator = Lucene.Net.QueryParsers.Flexible.Standard.Config.StandardQueryConfigHandler.Operator;
 
 namespace Lucene.Net.QueryParsers.Flexible.Standard.Config
@@ -50,11 +50,11 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Config
             Set(ConfigurationKeys.PHRASE_SLOP, 0); //default value 2.4
             Set(ConfigurationKeys.LOWERCASE_EXPANDED_TERMS, true); //default value 2.4
             Set(ConfigurationKeys.ENABLE_POSITION_INCREMENTS, false); //default value 2.4
-            Set(ConfigurationKeys.FIELD_BOOST_MAP, new LinkedHashMap<string, float?>());
+            Set(ConfigurationKeys.FIELD_BOOST_MAP, new JCG.LinkedDictionary<string, float?>());
             Set(ConfigurationKeys.FUZZY_CONFIG, new FuzzyConfig());
             Set(ConfigurationKeys.LOCALE, null);
             Set(ConfigurationKeys.MULTI_TERM_REWRITE_METHOD, MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT);
-            Set(ConfigurationKeys.FIELD_DATE_RESOLUTION_MAP, new HashMap<string, DateTools.Resolution?>());
+            Set(ConfigurationKeys.FIELD_DATE_RESOLUTION_MAP, new JCG.Dictionary<string, DateTools.Resolution?>());
         }
 
         /// <summary>
diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Nodes/PrefixWildcardQueryNode.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Nodes/PrefixWildcardQueryNode.cs
index c512efc..28300fd 100644
--- a/src/Lucene.Net.QueryParser/Flexible/Standard/Nodes/PrefixWildcardQueryNode.cs
+++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Nodes/PrefixWildcardQueryNode.cs
@@ -1,5 +1,5 @@
-using Lucene.Net.QueryParsers.Flexible.Core.Nodes;
-using Lucene.Net.Support;
+using J2N.Text;
+using Lucene.Net.QueryParsers.Flexible.Core.Nodes;
 using System.Text;
 
 namespace Lucene.Net.QueryParsers.Flexible.Standard.Nodes
@@ -39,7 +39,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Nodes
         // LUCENENET specific overload for passing text as string
         public PrefixWildcardQueryNode(string field, string text,
             int begin, int end)
-            : this(field, text.ToCharSequence(), begin, end)
+            : this(field, text.AsCharSequence(), begin, end)
         {
         }
 
@@ -53,7 +53,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Nodes
         // LUCENENET specific overload for passing text as StringBuilder
         public PrefixWildcardQueryNode(string field, StringBuilder text,
             int begin, int end)
-            : this(field, text.ToCharSequence(), begin, end)
+            : this(field, text.AsCharSequence(), begin, end)
         {
         }
 
diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Nodes/RegexpQueryNode.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Nodes/RegexpQueryNode.cs
index e81b948..48acb9a 100644
--- a/src/Lucene.Net.QueryParser/Flexible/Standard/Nodes/RegexpQueryNode.cs
+++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Nodes/RegexpQueryNode.cs
@@ -1,6 +1,6 @@
-using Lucene.Net.QueryParsers.Flexible.Core.Nodes;
+using J2N.Text;
+using Lucene.Net.QueryParsers.Flexible.Core.Nodes;
 using Lucene.Net.QueryParsers.Flexible.Core.Parser;
-using Lucene.Net.Support;
 using Lucene.Net.Util;
 using System.Text;
 
@@ -41,7 +41,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Nodes
         // LUCENENET specific overload for passing text as string
         public RegexpQueryNode(string field, string text, int begin,
             int end)
-            : this(field, text.ToCharSequence(), begin, end)
+            : this(field, text.AsCharSequence(), begin, end)
         {
         }
 
@@ -55,7 +55,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Nodes
         // LUCENENET specific overload for passing text as StringBuilder
         public RegexpQueryNode(string field, StringBuilder text, int begin,
             int end)
-            : this(field, text.ToCharSequence(), begin, end)
+            : this(field, text.AsCharSequence(), begin, end)
         {
         }
 
@@ -67,10 +67,10 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Nodes
         /// <param name="begin">position in the query string</param>
         /// <param name="end">position in the query string</param>
         public RegexpQueryNode(string field, ICharSequence text, int begin,
-            int end)
+            int end) // LUCENENET TODO: API - Change to use length rather than end index to match .NET
         {
             this.field = field;
-            this.text = text.SubSequence(begin, end);
+            this.text = text.Subsequence(begin, end - begin);
         }
 
         public virtual BytesRef TextToBytesRef()
diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Nodes/WildcardQueryNode.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Nodes/WildcardQueryNode.cs
index b1bb957..e3749ff 100644
--- a/src/Lucene.Net.QueryParser/Flexible/Standard/Nodes/WildcardQueryNode.cs
+++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Nodes/WildcardQueryNode.cs
@@ -1,6 +1,6 @@
-using Lucene.Net.QueryParsers.Flexible.Core.Nodes;
+using J2N.Text;
+using Lucene.Net.QueryParsers.Flexible.Core.Nodes;
 using Lucene.Net.QueryParsers.Flexible.Core.Parser;
-using Lucene.Net.Support;
 using System.Text;
 
 namespace Lucene.Net.QueryParsers.Flexible.Standard.Nodes
@@ -38,7 +38,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Nodes
         // LUCENENET specific overload for passing text as string
         public WildcardQueryNode(string field, string text, int begin,
             int end)
-            : this(field, text.ToCharSequence(), begin, end)
+            : this(field, text.AsCharSequence(), begin, end)
         {
         }
 
@@ -52,7 +52,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Nodes
         // LUCENENET specific overload for passing text as StringBuilder
         public WildcardQueryNode(string field, StringBuilder text, int begin,
             int end)
-            : this(field, text.ToCharSequence(), begin, end)
+            : this(field, text.AsCharSequence(), begin, end)
         {
         }
 
diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/EscapeQuerySyntaxImpl.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/EscapeQuerySyntaxImpl.cs
index 9d6b660..251b227 100644
--- a/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/EscapeQuerySyntaxImpl.cs
+++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/EscapeQuerySyntaxImpl.cs
@@ -1,8 +1,8 @@
-using Lucene.Net.QueryParsers.Flexible.Core.Messages;
+using J2N.Text;
+using Lucene.Net.QueryParsers.Flexible.Core.Messages;
 using Lucene.Net.QueryParsers.Flexible.Core.Parser;
 using Lucene.Net.QueryParsers.Flexible.Core.Util;
 using Lucene.Net.QueryParsers.Flexible.Messages;
-using Lucene.Net.Support;
 using System;
 using System.Globalization;
 using System.Text;
@@ -65,8 +65,8 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Parser
             {
                 if (buffer[0] == escapableTermExtraFirstChars[i][0])
                 {
-                    buffer = new StringCharSequenceWrapper("\\" + buffer[0]
-                        + buffer.SubSequence(1, buffer.Length).ToString());
+                    buffer = new StringCharSequence("\\" + buffer[0]
+                        + buffer.Subsequence(1, buffer.Length - 1).ToString()); // LUCENENET: Corrected 2nd Subsequence parameter
                     break;
                 }
             }
@@ -102,7 +102,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Parser
             for (int i = 0; i < escapableWordTokens.Length; i++)
             {
                 if (escapableWordTokens[i].Equals(term.ToString(), StringComparison.OrdinalIgnoreCase))
-                    return new StringCharSequenceWrapper("\\" + term);
+                    return new StringCharSequence("\\" + term);
             }
             return term;
         }
@@ -134,7 +134,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Parser
                     result2.Append(@string[i]);
                     result2.Append(escapeChar);
                 }
-                return result2.ToString().ToCharSequence();
+                return result2.ToString().AsCharSequence();
             }
 
             // normal case
@@ -177,7 +177,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Parser
             if (result.Length == 0 && copyStart == 0)
                 return @string;
             result.Append(@string.ToString().Substring(copyStart));
-            return result.ToString().ToCharSequence();
+            return result.ToString().AsCharSequence();
         }
 
         /// <summary>
@@ -208,7 +208,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Parser
             if (text == null || text.Length == 0)
                 return text;
 
-            return Escape(text.ToCharSequence(), locale, type).ToString();
+            return Escape(text.AsCharSequence(), locale, type).ToString();
         }
 
         public virtual ICharSequence Escape(ICharSequence text, CultureInfo locale, EscapeQuerySyntaxType type)  
diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/AnalyzerQueryNodeProcessor.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/AnalyzerQueryNodeProcessor.cs
index 49fdce1..f2ab884 100644
--- a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/AnalyzerQueryNodeProcessor.cs
+++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/AnalyzerQueryNodeProcessor.cs
@@ -1,10 +1,10 @@
-using Lucene.Net.Analysis;
+using J2N.Text;
+using Lucene.Net.Analysis;
 using Lucene.Net.Analysis.TokenAttributes;
 using Lucene.Net.QueryParsers.Flexible.Core.Nodes;
 using Lucene.Net.QueryParsers.Flexible.Core.Processors;
 using Lucene.Net.QueryParsers.Flexible.Standard.Config;
 using Lucene.Net.QueryParsers.Flexible.Standard.Nodes;
-using Lucene.Net.Support;
 using Lucene.Net.Util;
 using System;
 using System.Collections.Generic;
@@ -190,7 +190,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Processors
                         // safe to ignore, because we know the number of tokens
                     }
 
-                    fieldNode.Text = term.ToCharSequence();
+                    fieldNode.Text = term.AsCharSequence();
 
                     return fieldNode;
                 }
diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/LowercaseExpandedTermsQueryNodeProcessor.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/LowercaseExpandedTermsQueryNodeProcessor.cs
index 9001d3e..f91ee57 100644
--- a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/LowercaseExpandedTermsQueryNodeProcessor.cs
+++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/LowercaseExpandedTermsQueryNodeProcessor.cs
@@ -1,9 +1,9 @@
-using Lucene.Net.QueryParsers.Flexible.Core.Nodes;
+using J2N.Text;
+using Lucene.Net.QueryParsers.Flexible.Core.Nodes;
 using Lucene.Net.QueryParsers.Flexible.Core.Processors;
 using Lucene.Net.QueryParsers.Flexible.Core.Util;
 using Lucene.Net.QueryParsers.Flexible.Standard.Config;
 using Lucene.Net.QueryParsers.Flexible.Standard.Nodes;
-using Lucene.Net.Support;
 using System.Collections.Generic;
 using System.Globalization;
 
diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/OpenRangeQueryNodeProcessor.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/OpenRangeQueryNodeProcessor.cs
index 9eded07..261d56b 100644
--- a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/OpenRangeQueryNodeProcessor.cs
+++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/OpenRangeQueryNodeProcessor.cs
@@ -1,8 +1,8 @@
-using Lucene.Net.QueryParsers.Flexible.Core.Nodes;
+using J2N.Text;
+using Lucene.Net.QueryParsers.Flexible.Core.Nodes;
 using Lucene.Net.QueryParsers.Flexible.Core.Processors;
 using Lucene.Net.QueryParsers.Flexible.Core.Util;
 using Lucene.Net.QueryParsers.Flexible.Standard.Nodes;
-using Lucene.Net.Support;
 using System;
 using System.Collections.Generic;
 
@@ -48,14 +48,14 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Processors
                     && (!(upperText is UnescapedCharSequence) || !((UnescapedCharSequence)upperText)
                         .WasEscaped(0)))
                 {
-                    upperText = "".ToCharSequence();
+                    upperText = "".AsCharSequence();
                 }
 
                 if (OPEN_RANGE_TOKEN.Equals(lowerNode.GetTextAsString(), StringComparison.Ordinal)
                     && (!(lowerText is UnescapedCharSequence) || !((UnescapedCharSequence)lowerText)
                         .WasEscaped(0)))
                 {
-                    lowerText = "".ToCharSequence();
+                    lowerText = "".AsCharSequence();
                 }
 
                 lowerNode.Text = lowerText;
diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/TermRangeQueryNodeProcessor.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/TermRangeQueryNodeProcessor.cs
index a44f2e1..56ad89e 100644
--- a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/TermRangeQueryNodeProcessor.cs
+++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/TermRangeQueryNodeProcessor.cs
@@ -1,10 +1,10 @@
-using Lucene.Net.Documents;
+using J2N.Text;
+using Lucene.Net.Documents;
 using Lucene.Net.QueryParsers.Flexible.Core.Config;
 using Lucene.Net.QueryParsers.Flexible.Core.Nodes;
 using Lucene.Net.QueryParsers.Flexible.Core.Processors;
 using Lucene.Net.QueryParsers.Flexible.Standard.Config;
 using Lucene.Net.QueryParsers.Flexible.Standard.Nodes;
-using Lucene.Net.Support;
 using System;
 using System.Collections.Generic;
 using System.Globalization;
@@ -111,7 +111,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Processors
                     if (DateTime.TryParseExact(part1, shortDateFormat, locale, DateTimeStyles.None, out d1))
                     {
                         part1 = DateTools.DateToString(d1, dateRes);
-                        lower.Text = new StringCharSequenceWrapper(part1);
+                        lower.Text = new StringCharSequence(part1);
                     }
 
                     if (DateTime.TryParseExact(part2, shortDateFormat, locale, DateTimeStyles.None, out d2))
@@ -139,7 +139,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Processors
                         }
 
                         part2 = DateTools.DateToString(d2, dateRes);
-                        upper.Text = new StringCharSequenceWrapper(part2);
+                        upper.Text = new StringCharSequence(part2);
                     }
 
                 }
diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/WildcardQueryNodeProcessor.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/WildcardQueryNodeProcessor.cs
index 8b0f176..51e78a2 100644
--- a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/WildcardQueryNodeProcessor.cs
+++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/WildcardQueryNodeProcessor.cs
@@ -1,8 +1,8 @@
-using Lucene.Net.QueryParsers.Flexible.Core.Nodes;
+using J2N.Text;
+using Lucene.Net.QueryParsers.Flexible.Core.Nodes;
 using Lucene.Net.QueryParsers.Flexible.Core.Processors;
 using Lucene.Net.QueryParsers.Flexible.Core.Util;
 using Lucene.Net.QueryParsers.Flexible.Standard.Nodes;
-using Lucene.Net.Support;
 using System.Collections.Generic;
 
 namespace Lucene.Net.QueryParsers.Flexible.Standard.Processors
@@ -84,7 +84,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Processors
             // start at the end since it's more common to put wildcards at the end
             for (int i = text.Length - 1; i >= 0; i--)
             {
-                if ((text[i] == '*' || text[i] == '?') && !UnescapedCharSequence.WasEscaped(new StringCharSequenceWrapper(text), i))
+                if ((text[i] == '*' || text[i] == '?') && !UnescapedCharSequence.WasEscaped(new StringCharSequence(text), i))
                 {
                     return true;
                 }
@@ -100,7 +100,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Processors
             // Validate last character is a '*' and was not escaped
             // If single '*' is is a wildcard not prefix to simulate old queryparser
             if (text[text.Length - 1] != '*') return false;
-            if (UnescapedCharSequence.WasEscaped(new StringCharSequenceWrapper(text), text.Length - 1)) return false;
+            if (UnescapedCharSequence.WasEscaped(new StringCharSequence(text), text.Length - 1)) return false;
             if (text.Length == 1) return false;
 
             // Only make a prefix if there is only one single star at the end and no '?' or '*' characters
@@ -108,7 +108,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Processors
             for (int i = 0; i < text.Length; i++)
             {
                 if (text[i] == '?') return false;
-                if (text[i] == '*' && !UnescapedCharSequence.WasEscaped(new StringCharSequenceWrapper(text), i))
+                if (text[i] == '*' && !UnescapedCharSequence.WasEscaped(new StringCharSequence(text), i))
                 {
                     if (i == text.Length - 1)
                         return true;
diff --git a/src/Lucene.Net.QueryParser/Simple/SimpleQueryParser.cs b/src/Lucene.Net.QueryParser/Simple/SimpleQueryParser.cs
index 80ea56d..52e3f2c 100644
--- a/src/Lucene.Net.QueryParser/Simple/SimpleQueryParser.cs
+++ b/src/Lucene.Net.QueryParser/Simple/SimpleQueryParser.cs
@@ -1,13 +1,13 @@
 using Lucene.Net.Analysis;
 using Lucene.Net.Index;
 using Lucene.Net.Search;
-using Lucene.Net.Support;
 using Lucene.Net.Util;
 using Lucene.Net.Util.Automaton;
 using System;
 using System.Collections.Generic;
 using System.Diagnostics;
 using System.Linq;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.QueryParsers.Simple
 {
@@ -131,7 +131,7 @@ namespace Lucene.Net.QueryParsers.Simple
 
         /// <summary>Creates a new parser searching over a single field.</summary>
         public SimpleQueryParser(Analyzer analyzer, string field)
-            : this(analyzer, new HashMap<string, float>() { { field, 1.0F } })
+            : this(analyzer, new JCG.Dictionary<string, float>() { { field, 1.0F } })
         {
         }
 
diff --git a/src/Lucene.Net.QueryParser/Surround/Parser/QueryParserTokenManager.cs b/src/Lucene.Net.QueryParser/Surround/Parser/QueryParserTokenManager.cs
index 8f3800b..ac786e5 100644
--- a/src/Lucene.Net.QueryParser/Surround/Parser/QueryParserTokenManager.cs
+++ b/src/Lucene.Net.QueryParser/Surround/Parser/QueryParserTokenManager.cs
@@ -1,5 +1,4 @@
 using Lucene.Net.Support.IO;
-using System;
 using System.Diagnostics.CodeAnalysis;
 using System.IO;
 
diff --git a/src/Lucene.Net.QueryParser/Surround/Query/SimpleTerm.cs b/src/Lucene.Net.QueryParser/Surround/Query/SimpleTerm.cs
index 8657092..208d244 100644
--- a/src/Lucene.Net.QueryParser/Surround/Query/SimpleTerm.cs
+++ b/src/Lucene.Net.QueryParser/Surround/Query/SimpleTerm.cs
@@ -1,5 +1,5 @@
-using Lucene.Net.Index;
-using Lucene.Net.Support;
+using J2N.Text;
+using Lucene.Net.Index;
 using System;
 using System.Text;
 
diff --git a/src/Lucene.Net.QueryParser/Surround/Query/SpanNearClauseFactory.cs b/src/Lucene.Net.QueryParser/Surround/Query/SpanNearClauseFactory.cs
index 6bcd67b..e2f69f7 100644
--- a/src/Lucene.Net.QueryParser/Surround/Query/SpanNearClauseFactory.cs
+++ b/src/Lucene.Net.QueryParser/Surround/Query/SpanNearClauseFactory.cs
@@ -1,8 +1,8 @@
 using Lucene.Net.Index;
 using Lucene.Net.Search.Spans;
-using Lucene.Net.Support;
 using System;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.QueryParsers.Surround.Query
 {
@@ -64,7 +64,7 @@ namespace Lucene.Net.QueryParsers.Surround.Query
         public SpanNearClauseFactory(IndexReader reader, string fieldName, BasicQueryFactory qf) {
             this.reader = reader;
             this.fieldName = fieldName;
-            this.weightBySpanQuery = new HashMap<SpanQuery, float>();
+            this.weightBySpanQuery = new JCG.Dictionary<SpanQuery, float>();
             this.qf = qf;
           }
 
diff --git a/src/Lucene.Net.QueryParser/Xml/Builders/LikeThisQueryBuilder.cs b/src/Lucene.Net.QueryParser/Xml/Builders/LikeThisQueryBuilder.cs
index 88015da..15e6a7c 100644
--- a/src/Lucene.Net.QueryParser/Xml/Builders/LikeThisQueryBuilder.cs
+++ b/src/Lucene.Net.QueryParser/Xml/Builders/LikeThisQueryBuilder.cs
@@ -1,13 +1,14 @@
-using Lucene.Net.Analysis;
+using J2N.Text;
+using Lucene.Net.Analysis;
 using Lucene.Net.Analysis.TokenAttributes;
 using Lucene.Net.Queries.Mlt;
 using Lucene.Net.Search;
-using Lucene.Net.Support;
 using Lucene.Net.Util;
 using System;
 using System.Collections.Generic;
 using System.IO;
 using System.Xml;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.QueryParsers.Xml.Builders
 {
@@ -68,10 +69,10 @@ namespace Lucene.Net.QueryParsers.Xml.Builders
             //TODO MoreLikeThis needs to ideally have per-field stopWords lists - until then
             //I use all analyzers/fields to generate multi-field compatible stop list
             string stopWords = e.GetAttribute("stopWords");
-            HashSet<string> stopWordsSet = null;
+            ISet<string> stopWordsSet = null;
             if ((stopWords != null) && (fields != null))
             {
-                stopWordsSet = new HashSet<string>();
+                stopWordsSet = new JCG.HashSet<string>();
                 foreach (string field in fields)
                 {
                     TokenStream ts = null;
diff --git a/src/Lucene.Net.Replicator/Http/ReplicationService.cs b/src/Lucene.Net.Replicator/Http/ReplicationService.cs
index 3f09a6e..7e7de7e 100644
--- a/src/Lucene.Net.Replicator/Http/ReplicationService.cs
+++ b/src/Lucene.Net.Replicator/Http/ReplicationService.cs
@@ -1,6 +1,6 @@
 using J2N.IO;
+using J2N.Text;
 using Lucene.Net.Replicator.Http.Abstractions;
-using Lucene.Net.Support;
 using Newtonsoft.Json;
 using System;
 using System.Collections.Generic;
diff --git a/src/Lucene.Net.Replicator/IndexAndTaxonomyRevision.cs b/src/Lucene.Net.Replicator/IndexAndTaxonomyRevision.cs
index ba6e90e..e28759e 100644
--- a/src/Lucene.Net.Replicator/IndexAndTaxonomyRevision.cs
+++ b/src/Lucene.Net.Replicator/IndexAndTaxonomyRevision.cs
@@ -1,8 +1,8 @@
-using Lucene.Net.Facet.Taxonomy.Directory;
+using J2N.Text;
+using Lucene.Net.Facet.Taxonomy.Directory;
 using Lucene.Net.Facet.Taxonomy.WriterCache;
 using Lucene.Net.Index;
 using Lucene.Net.Store;
-using Lucene.Net.Support;
 using System;
 using System.Collections.Generic;
 using System.Diagnostics;
diff --git a/src/Lucene.Net.Replicator/IndexReplicationHandler.cs b/src/Lucene.Net.Replicator/IndexReplicationHandler.cs
index d941527..20f3f33 100644
--- a/src/Lucene.Net.Replicator/IndexReplicationHandler.cs
+++ b/src/Lucene.Net.Replicator/IndexReplicationHandler.cs
@@ -6,6 +6,7 @@ using System.Collections.Generic;
 using System.IO;
 using System.Linq;
 using System.Text.RegularExpressions;
+using JCG = J2N.Collections.Generic;
 using Directory = Lucene.Net.Store.Directory;
 
 namespace Lucene.Net.Replicator
@@ -165,7 +166,7 @@ namespace Lucene.Net.Replicator
 
                 if (commit != null && commit.SegmentsFileName.Equals(segmentsFile, StringComparison.Ordinal))
                 {
-                    HashSet<string> commitFiles = new HashSet<string>( commit.FileNames
+                    ISet<string> commitFiles = new JCG.HashSet<string>( commit.FileNames
                         .Union(new[] {IndexFileNames.SEGMENTS_GEN}));
 
                     Regex matcher = IndexFileNames.CODEC_FILE_PATTERN;
diff --git a/src/Lucene.Net.Replicator/ReplicationClient.cs b/src/Lucene.Net.Replicator/ReplicationClient.cs
index 7388093..2a2c4f4 100644
--- a/src/Lucene.Net.Replicator/ReplicationClient.cs
+++ b/src/Lucene.Net.Replicator/ReplicationClient.cs
@@ -8,6 +8,7 @@ using System.Diagnostics;
 using System.IO;
 using System.Linq;
 using System.Threading;
+using JCG = J2N.Collections.Generic;
 using Directory = Lucene.Net.Store.Directory;
 
 namespace Lucene.Net.Replicator
@@ -360,7 +361,7 @@ namespace Lucene.Net.Replicator
             foreach (KeyValuePair<string, IList<RevisionFile>> pair in handlerRevisionFiles)
             {
                 // put the handler files in a Set, for faster contains() checks later
-                HashSet<string> handlerFiles = new HashSet<string>(pair.Value.Select(v => v.FileName));
+                ISet<string> handlerFiles = new JCG.HashSet<string>(pair.Value.Select(v => v.FileName));
 
                 // make sure to preserve revisionFiles order
                 string source = pair.Key;
diff --git a/src/Lucene.Net.Sandbox/Queries/FuzzyLikeThisQuery.cs b/src/Lucene.Net.Sandbox/Queries/FuzzyLikeThisQuery.cs
index c44b033..555f3d9 100644
--- a/src/Lucene.Net.Sandbox/Queries/FuzzyLikeThisQuery.cs
+++ b/src/Lucene.Net.Sandbox/Queries/FuzzyLikeThisQuery.cs
@@ -3,10 +3,10 @@ using Lucene.Net.Analysis.TokenAttributes;
 using Lucene.Net.Index;
 using Lucene.Net.Search;
 using Lucene.Net.Search.Similarities;
-using Lucene.Net.Support;
 using Lucene.Net.Util;
 using System;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Sandbox.Queries
 {
@@ -50,7 +50,7 @@ namespace Lucene.Net.Sandbox.Queries
         // provided to TermQuery, so that the general idea is agnostic to any scoring system...
         internal static TFIDFSimilarity sim = new DefaultSimilarity();
         private Query rewrittenQuery = null;
-        private IList<FieldVals> fieldVals = new EquatableList<FieldVals>();
+        private IList<FieldVals> fieldVals = new JCG.List<FieldVals>();
         private Analyzer analyzer;
 
         private ScoreTermQueue q;
@@ -202,7 +202,7 @@ namespace Lucene.Net.Sandbox.Queries
                 ICharTermAttribute termAtt = ts.AddAttribute<ICharTermAttribute>();
 
                 int corpusNumDocs = reader.NumDocs;
-                HashSet<string> processedTerms = new HashSet<string>();
+                ISet<string> processedTerms = new JCG.HashSet<string>();
                 ts.Reset();
                 while (ts.IncrementToken())
                 {
diff --git a/src/Lucene.Net.Sandbox/Queries/SlowFuzzyQuery.cs b/src/Lucene.Net.Sandbox/Queries/SlowFuzzyQuery.cs
index 84b08dd..681a52c 100644
--- a/src/Lucene.Net.Sandbox/Queries/SlowFuzzyQuery.cs
+++ b/src/Lucene.Net.Sandbox/Queries/SlowFuzzyQuery.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Index;
+using J2N;
+using Lucene.Net.Index;
 using Lucene.Net.Search;
 using Lucene.Net.Support;
 using Lucene.Net.Util;
diff --git a/src/Lucene.Net.Spatial/Prefix/IntersectsPrefixTreeFilter.cs b/src/Lucene.Net.Spatial/Prefix/IntersectsPrefixTreeFilter.cs
index 0287eaa..c00bf92 100644
--- a/src/Lucene.Net.Spatial/Prefix/IntersectsPrefixTreeFilter.cs
+++ b/src/Lucene.Net.Spatial/Prefix/IntersectsPrefixTreeFilter.cs
@@ -1,7 +1,6 @@
 using Lucene.Net.Index;
 using Lucene.Net.Search;
 using Lucene.Net.Spatial.Prefix.Tree;
-using Lucene.Net.Support;
 using Lucene.Net.Util;
 using Spatial4n.Core.Shapes;
 
diff --git a/src/Lucene.Net.Spatial/Properties/AssemblyInfo.cs b/src/Lucene.Net.Spatial/Properties/AssemblyInfo.cs
index d5d0fc8..fa964fd 100644
--- a/src/Lucene.Net.Spatial/Properties/AssemblyInfo.cs
+++ b/src/Lucene.Net.Spatial/Properties/AssemblyInfo.cs
@@ -20,7 +20,6 @@ using System;
 using System.Reflection;
 using System.Runtime.CompilerServices;
 using System.Runtime.InteropServices;
-using System.Security;
 
 // General Information about an assembly is controlled through the following 
 // set of attributes. Change these attribute values to modify the information
diff --git a/src/Lucene.Net.Spatial/Util/CachingDoubleValueSource.cs b/src/Lucene.Net.Spatial/Util/CachingDoubleValueSource.cs
index eae1e4f..c242518 100644
--- a/src/Lucene.Net.Spatial/Util/CachingDoubleValueSource.cs
+++ b/src/Lucene.Net.Spatial/Util/CachingDoubleValueSource.cs
@@ -1,8 +1,8 @@
 using Lucene.Net.Index;
 using Lucene.Net.Queries.Function;
-using Lucene.Net.Support;
 using System.Collections;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Spatial.Util
 {
@@ -36,7 +36,7 @@ namespace Lucene.Net.Spatial.Util
         public CachingDoubleValueSource(ValueSource source)
         {
             this.m_source = source;
-            m_cache = new HashMap<int, double>();
+            m_cache = new JCG.Dictionary<int, double>();
         }
 
         public override string GetDescription()
diff --git a/src/Lucene.Net.Suggest/Spell/CombineSuggestion.cs b/src/Lucene.Net.Suggest/Spell/CombineSuggestion.cs
index 737adab..695e9f7 100644
--- a/src/Lucene.Net.Suggest/Spell/CombineSuggestion.cs
+++ b/src/Lucene.Net.Suggest/Spell/CombineSuggestion.cs
@@ -4,21 +4,21 @@ using System.Diagnostics.CodeAnalysis;
 namespace Lucene.Net.Search.Spell
 {
     /*
-	 * Licensed to the Apache Software Foundation (ASF) under one or more
-	 * contributor license agreements.  See the NOTICE file distributed with
-	 * this work for additional information regarding copyright ownership.
-	 * The ASF licenses this file to You under the Apache License, Version 2.0
-	 * (the "License"); you may not use this file except in compliance with
-	 * the License.  You may obtain a copy of the License at
-	 *
-	 *     http://www.apache.org/licenses/LICENSE-2.0
-	 *
-	 * Unless required by applicable law or agreed to in writing, software
-	 * distributed under the License is distributed on an "AS IS" BASIS,
-	 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	 * See the License for the specific language governing permissions and
-	 * limitations under the License.
-	 */
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
 
     /// <summary>
     /// <para>A suggestion generated by combining one or more original query terms</para>
diff --git a/src/Lucene.Net.Suggest/Spell/DirectSpellChecker.cs b/src/Lucene.Net.Suggest/Spell/DirectSpellChecker.cs
index 910c1ad..e03580c 100644
--- a/src/Lucene.Net.Suggest/Spell/DirectSpellChecker.cs
+++ b/src/Lucene.Net.Suggest/Spell/DirectSpellChecker.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Index;
+using J2N;
+using Lucene.Net.Index;
 using Lucene.Net.Support;
 using Lucene.Net.Util;
 using Lucene.Net.Util.Automaton;
@@ -6,6 +7,7 @@ using System;
 using System.Collections.Generic;
 using System.Globalization;
 using System.Linq;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Search.Spell
 {
@@ -114,17 +116,14 @@ namespace Lucene.Net.Search.Spell
         /// </summary>
         public virtual int MaxEdits
         {
-            get
-            {
-                return maxEdits;
-            }
+            get => maxEdits;
             set
             {
                 if (value < 1 || value > LevenshteinAutomata.MAXIMUM_SUPPORTED_DISTANCE)
                 {
                     throw new NotSupportedException("Invalid maxEdits");
                 }
-                this.maxEdits = value;
+                maxEdits = value;
             }
         }
 
@@ -137,14 +136,8 @@ namespace Lucene.Net.Search.Spell
         /// </summary>
         public virtual int MinPrefix
         {
-            get
-            {
-                return minPrefix;
-            }
-            set
-            {
-                this.minPrefix = value;
-            }
+            get => minPrefix;
+            set => minPrefix = value;
         }
 
 
@@ -156,14 +149,8 @@ namespace Lucene.Net.Search.Spell
         /// </summary>
         public virtual int MaxInspections
         {
-            get
-            {
-                return maxInspections;
-            }
-            set
-            {
-                this.maxInspections = value;
-            }
+            get => maxInspections;
+            set => maxInspections = value;
         }
 
 
@@ -173,14 +160,8 @@ namespace Lucene.Net.Search.Spell
         /// </summary>
         public virtual float Accuracy
         {
-            get
-            {
-                return accuracy;
-            }
-            set
-            {
-                this.accuracy = value;
-            }
+            get => accuracy;
+            set => accuracy = value;
         }
 
 
@@ -198,17 +179,14 @@ namespace Lucene.Net.Search.Spell
         /// </summary>
         public virtual float ThresholdFrequency
         {
-            get
-            {
-                return thresholdFrequency;
-            }
+            get => thresholdFrequency;
             set
             {
                 if (value >= 1f && value != (int)value)
                 {
                     throw new System.ArgumentException("Fractional absolute document frequencies are not allowed");
                 }
-                this.thresholdFrequency = value;
+                thresholdFrequency = value;
             }
         }
 
@@ -221,14 +199,8 @@ namespace Lucene.Net.Search.Spell
         /// </summary>
         public virtual int MinQueryLength
         {
-            get
-            {
-                return minQueryLength;
-            }
-            set
-            {
-                this.minQueryLength = value;
-            }
+            get => minQueryLength;
+            set => minQueryLength = value;
         }
 
 
@@ -246,17 +218,14 @@ namespace Lucene.Net.Search.Spell
         /// </summary>
         public virtual float MaxQueryFrequency
         {
-            get
-            {
-                return maxQueryFrequency;
-            }
+            get => maxQueryFrequency;
             set
             {
                 if (value >= 1f && value != (int)value)
                 {
                     throw new System.ArgumentException("Fractional absolute document frequencies are not allowed");
                 }
-                this.maxQueryFrequency = value;
+                maxQueryFrequency = value;
             }
         }
 
@@ -273,14 +242,8 @@ namespace Lucene.Net.Search.Spell
         /// </summary>
         public virtual bool LowerCaseTerms
         {
-            get
-            {
-                return lowerCaseTerms;
-            }
-            set
-            {
-                this.lowerCaseTerms = value;
-            }
+            get => lowerCaseTerms;
+            set => lowerCaseTerms = value;
         }
 
         /// <summary>
@@ -289,14 +252,8 @@ namespace Lucene.Net.Search.Spell
         /// </summary>
         public virtual CultureInfo LowerCaseTermsCulture // LUCENENET specific
         {
-            get
-            {
-                return lowerCaseTermsCulture ?? CultureInfo.CurrentCulture;
-            }
-            set
-            {
-                lowerCaseTermsCulture = value;
-            }
+            get => lowerCaseTermsCulture ?? CultureInfo.CurrentCulture;
+            set => lowerCaseTermsCulture = value;
         }
 
         /// <summary>
@@ -305,14 +262,8 @@ namespace Lucene.Net.Search.Spell
         /// </summary>
         public virtual IComparer<SuggestWord> Comparer
         {
-            get
-            {
-                return comparer;
-            }
-            set
-            {
-                this.comparer = value;
-            }
+            get => comparer;
+            set => comparer = value;
         }
 
 
@@ -328,14 +279,8 @@ namespace Lucene.Net.Search.Spell
         /// </summary>
         public virtual IStringDistance Distance
         {
-            get
-            {
-                return distance;
-            }
-            set
-            {
-                this.distance = value;
-            }
+            get => distance;
+            set => distance = value;
         }
 
 
@@ -427,7 +372,7 @@ namespace Lucene.Net.Search.Spell
             terms = SuggestSimilar(term, inspections, ir, docfreq, 1, accuracy, spare);
             if (maxEdits > 1 && terms.Count() < inspections)
             {
-                var moreTerms = new HashSet<ScoreTerm>();
+                var moreTerms = new JCG.HashSet<ScoreTerm>();
                 moreTerms.UnionWith(terms);
                 moreTerms.UnionWith(SuggestSimilar(term, inspections, ir, docfreq, maxEdits, accuracy, spare));
                 terms = moreTerms;
@@ -486,7 +431,7 @@ namespace Lucene.Net.Search.Spell
             }
             FuzzyTermsEnum e = new FuzzyTermsEnum(terms, atts, term, editDistance, Math.Max(minPrefix, editDistance - 1), true);
 
-            var stQueue = new Support.PriorityQueue<ScoreTerm>();
+            var stQueue = new JCG.PriorityQueue<ScoreTerm>();
 
             BytesRef queryTerm = new BytesRef(term.Text());
             BytesRef candidateTerm;
@@ -542,9 +487,9 @@ namespace Lucene.Net.Search.Spell
                 st.Docfreq = df;
                 st.TermAsString = termAsString;
                 st.Score = score;
-                stQueue.Offer(st);
+                stQueue.Enqueue(st);
                 // possibly drop entries from queue
-                st = (stQueue.Count > numSug) ? stQueue.Poll() : new ScoreTerm();
+                st = (stQueue.Count > numSug) ? stQueue.Dequeue() : new ScoreTerm();
                 maxBoostAtt.MaxNonCompetitiveBoost = (stQueue.Count >= numSug) ? stQueue.Peek().Boost : float.NegativeInfinity;
             }
 
diff --git a/src/Lucene.Net.Suggest/Spell/LuceneLevenshteinDistance.cs b/src/Lucene.Net.Suggest/Spell/LuceneLevenshteinDistance.cs
index 4010f80..c385f88 100644
--- a/src/Lucene.Net.Suggest/Spell/LuceneLevenshteinDistance.cs
+++ b/src/Lucene.Net.Suggest/Spell/LuceneLevenshteinDistance.cs
@@ -1,6 +1,7 @@
-using Lucene.Net.Support;
+using J2N;
 using Lucene.Net.Util;
 using System;
+using RectangularArrays = Lucene.Net.Support.RectangularArrays;
 
 namespace Lucene.Net.Search.Spell
 {
diff --git a/src/Lucene.Net.Suggest/Spell/SuggestWordFrequencyComparator.cs b/src/Lucene.Net.Suggest/Spell/SuggestWordFrequencyComparator.cs
index 202ea68..9720a03 100644
--- a/src/Lucene.Net.Suggest/Spell/SuggestWordFrequencyComparator.cs
+++ b/src/Lucene.Net.Suggest/Spell/SuggestWordFrequencyComparator.cs
@@ -1,4 +1,4 @@
-using Lucene.Net.Support;
+using J2N.Text;
 using System.Collections.Generic;
 
 namespace Lucene.Net.Search.Spell
diff --git a/src/Lucene.Net.Suggest/Spell/SuggestWordScoreComparator.cs b/src/Lucene.Net.Suggest/Spell/SuggestWordScoreComparator.cs
index 6306f0f..eb27c5b 100644
--- a/src/Lucene.Net.Suggest/Spell/SuggestWordScoreComparator.cs
+++ b/src/Lucene.Net.Suggest/Spell/SuggestWordScoreComparator.cs
@@ -1,4 +1,4 @@
-using Lucene.Net.Support;
+using J2N.Text;
 using System.Collections.Generic;
 
 namespace Lucene.Net.Search.Spell
diff --git a/src/Lucene.Net.Suggest/Spell/WordBreakSpellChecker.cs b/src/Lucene.Net.Suggest/Spell/WordBreakSpellChecker.cs
index 40c99d0..ee55587 100644
--- a/src/Lucene.Net.Suggest/Spell/WordBreakSpellChecker.cs
+++ b/src/Lucene.Net.Suggest/Spell/WordBreakSpellChecker.cs
@@ -1,8 +1,10 @@
-using Lucene.Net.Index;
-using Lucene.Net.Support;
+using J2N;
+using Lucene.Net.Index;
 using System;
 using System.Collections.Generic;
 using System.Diagnostics.CodeAnalysis;
+using JCG = J2N.Collections.Generic;
+using WritableArrayAttribute = Lucene.Net.Support.WritableArrayAttribute;
 
 namespace Lucene.Net.Search.Spell
 {
@@ -97,7 +99,7 @@ namespace Lucene.Net.Search.Spell
             IComparer<SuggestWordArrayWrapper> queueComparer = sortMethod == BreakSuggestionSortMethod.NUM_CHANGES_THEN_MAX_FREQUENCY 
                 ? (IComparer<SuggestWordArrayWrapper>)new LengthThenMaxFreqComparer(this) 
                 : new LengthThenSumFreqComparer(this);
-            PriorityQueue<SuggestWordArrayWrapper> suggestions = new PriorityQueue<SuggestWordArrayWrapper>(queueInitialCapacity, queueComparer);
+            JCG.PriorityQueue<SuggestWordArrayWrapper> suggestions = new JCG.PriorityQueue<SuggestWordArrayWrapper>(queueInitialCapacity, queueComparer);
 
             int origFreq = ir.DocFreq(term);
             if (origFreq > 0 && suggestMode == SuggestMode.SUGGEST_WHEN_NOT_IN_INDEX)
@@ -116,7 +118,7 @@ namespace Lucene.Net.Search.Spell
             SuggestWord[][] suggestionArray = new SuggestWord[suggestions.Count][];
             for (int i = suggestions.Count - 1; i >= 0; i--)
             {
-                suggestionArray[i] = suggestions.Remove().SuggestWords;
+                suggestionArray[i] = suggestions.Dequeue().SuggestWords;
             }
 
             return suggestionArray;
@@ -169,7 +171,7 @@ namespace Lucene.Net.Search.Spell
 
             int queueInitialCapacity = maxSuggestions > 10 ? 10 : maxSuggestions;
             IComparer<CombineSuggestionWrapper> queueComparer = new CombinationsThenFreqComparer(this);
-            PriorityQueue<CombineSuggestionWrapper> suggestions = new PriorityQueue<CombineSuggestionWrapper>(queueInitialCapacity, queueComparer);
+            JCG.PriorityQueue<CombineSuggestionWrapper> suggestions = new JCG.PriorityQueue<CombineSuggestionWrapper>(queueInitialCapacity, queueComparer);
 
             int thisTimeEvaluations = 0;
             for (int i = 0; i < terms.Length - 1; i++)
@@ -234,10 +236,10 @@ namespace Lucene.Net.Search.Spell
                                 word.Score = origIndexes.Length - 1;
                                 word.String = combinedTerm.Text();
                                 CombineSuggestionWrapper suggestion = new CombineSuggestionWrapper(this, new CombineSuggestion(word, origIndexes), (origIndexes.Length - 1));
-                                suggestions.Offer(suggestion);
+                                suggestions.Enqueue(suggestion);
                                 if (suggestions.Count > maxSuggestions)
                                 {
-                                    suggestions.Poll();
+                                    suggestions.TryDequeue(out CombineSuggestionWrapper _);
                                 }
                             }
                         }
@@ -252,14 +254,14 @@ namespace Lucene.Net.Search.Spell
             CombineSuggestion[] combineSuggestions = new CombineSuggestion[suggestions.Count];
             for (int i = suggestions.Count - 1; i >= 0; i--)
             {
-                combineSuggestions[i] = suggestions.Remove().CombineSuggestion;
+                combineSuggestions[i] = suggestions.Dequeue().CombineSuggestion;
             }
             return combineSuggestions;
         }
 
         private int GenerateBreakUpSuggestions(Term term, IndexReader ir, 
             int numberBreaks, int maxSuggestions, int useMinSuggestionFrequency, 
-            SuggestWord[] prefix, PriorityQueue<SuggestWordArrayWrapper> suggestions, 
+            SuggestWord[] prefix, JCG.PriorityQueue<SuggestWordArrayWrapper> suggestions, 
             int totalEvaluations, BreakSuggestionSortMethod sortMethod)
         {
             string termText = term.Text();
@@ -288,10 +290,10 @@ namespace Lucene.Net.Search.Spell
                     if (rightWord.Freq >= useMinSuggestionFrequency)
                     {
                         SuggestWordArrayWrapper suggestion = new SuggestWordArrayWrapper(this, NewSuggestion(prefix, leftWord, rightWord));
-                        suggestions.Offer(suggestion);
+                        suggestions.Enqueue(suggestion);
                         if (suggestions.Count > maxSuggestions)
                         {
-                            suggestions.Poll();
+                            suggestions.Dequeue();
                         }
                     }
                     int newNumberBreaks = numberBreaks + 1;
diff --git a/src/Lucene.Net.Suggest/Suggest/Analyzing/AnalyzingInfixSuggester.cs b/src/Lucene.Net.Suggest/Suggest/Analyzing/AnalyzingInfixSuggester.cs
index 22f2dd6..8c25f86 100644
--- a/src/Lucene.Net.Suggest/Suggest/Analyzing/AnalyzingInfixSuggester.cs
+++ b/src/Lucene.Net.Suggest/Suggest/Analyzing/AnalyzingInfixSuggester.cs
@@ -12,6 +12,7 @@ using System.Collections.Generic;
 using System.IO;
 using System.Linq;
 using System.Text;
+using JCG = J2N.Collections.Generic;
 using Directory = Lucene.Net.Store.Directory;
 
 namespace Lucene.Net.Search.Suggest.Analyzing
@@ -419,7 +420,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing
 
             TokenStream ts = null;
             BooleanQuery query;
-            var matchedTokens = new HashSet<string>();
+            var matchedTokens = new JCG.HashSet<string>();
             string prefixToken = null;
 
             try
@@ -433,7 +434,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing
                 string lastToken = null;
                 query = new BooleanQuery();
                 int maxEndOffset = -1;
-                matchedTokens = new HashSet<string>();
+                matchedTokens = new JCG.HashSet<string>();
                 while (ts.IncrementToken())
                 {
                     if (lastToken != null)
@@ -576,10 +577,10 @@ namespace Lucene.Net.Search.Suggest.Analyzing
                 // Must look up sorted-set by segment:
                 int segment = ReaderUtil.SubIndex(fd.Doc, leaves);
                 SortedSetDocValues contextsDV = leaves[segment].AtomicReader.GetSortedSetDocValues(CONTEXTS_FIELD_NAME);
-                HashSet<BytesRef> contexts;
+                ISet<BytesRef> contexts;
                 if (contextsDV != null)
                 {
-                    contexts = new HashSet<BytesRef>();
+                    contexts = new JCG.HashSet<BytesRef>();
                     contextsDV.SetDocument(fd.Doc - leaves[segment].DocBase);
                     long ord;
                     while ((ord = contextsDV.NextOrd()) != SortedSetDocValues.NO_MORE_ORDS)
diff --git a/src/Lucene.Net.Suggest/Suggest/Analyzing/AnalyzingSuggester.cs b/src/Lucene.Net.Suggest/Suggest/Analyzing/AnalyzingSuggester.cs
index 693f8b3..5076209 100644
--- a/src/Lucene.Net.Suggest/Suggest/Analyzing/AnalyzingSuggester.cs
+++ b/src/Lucene.Net.Suggest/Suggest/Analyzing/AnalyzingSuggester.cs
@@ -9,25 +9,26 @@ using System.Collections.Generic;
 using System.Diagnostics;
 using System.IO;
 using System.Linq;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Search.Suggest.Analyzing
 {
     /*
-	 * Licensed to the Apache Software Foundation (ASF) under one or more
-	 * contributor license agreements.  See the NOTICE file distributed with
-	 * this work for additional information regarding copyright ownership.
-	 * The ASF licenses this file to You under the Apache License, Version 2.0
-	 * (the "License"); you may not use this file except in compliance with
-	 * the License.  You may obtain a copy of the License at
-	 *
-	 *     http://www.apache.org/licenses/LICENSE-2.0
-	 *
-	 * Unless required by applicable law or agreed to in writing, software
-	 * distributed under the License is distributed on an "AS IS" BASIS,
-	 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	 * See the License for the specific language governing permissions and
-	 * limitations under the License.
-	 */
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
 
     /// <summary>
     /// Suggester that first analyzes the surface form, adds the
@@ -515,7 +516,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing
                 // still index the hightest-weight one).  We clear
                 // this when we see a new analyzed form, so it cannot
                 // grow unbounded (at most 256 entries):
-                var seenSurfaceForms = new HashSet<BytesRef>();
+                var seenSurfaceForms = new JCG.HashSet<BytesRef>();
 
                 var dedup = 0;
                 while (reader.Read(scratch))
@@ -888,10 +889,10 @@ namespace Lucene.Net.Search.Suggest.Analyzing
                 this.outerInstance = outerInstance;
                 this.utf8Key = utf8Key;
                 this.results = results;
-                seen = new HashSet<BytesRef>();
+                seen = new JCG.HashSet<BytesRef>();
             }
 
-            private readonly HashSet<BytesRef> seen;
+            private readonly ISet<BytesRef> seen;
 
             protected override bool AcceptResult(Int32sRef input, PairOutputs<long?, BytesRef>.Pair output)
             {
diff --git a/src/Lucene.Net.Suggest/Suggest/Analyzing/BlendedInfixSuggester.cs b/src/Lucene.Net.Suggest/Suggest/Analyzing/BlendedInfixSuggester.cs
index 4eb7ba7..1360b04 100644
--- a/src/Lucene.Net.Suggest/Suggest/Analyzing/BlendedInfixSuggester.cs
+++ b/src/Lucene.Net.Suggest/Suggest/Analyzing/BlendedInfixSuggester.cs
@@ -6,6 +6,7 @@ using System;
 using System.Collections.Generic;
 using System.Diagnostics;
 using System.Linq;
+using JCG = J2N.Collections.Generic;
 using Directory = Lucene.Net.Store.Directory;
 
 namespace Lucene.Net.Search.Suggest.Analyzing
@@ -149,7 +150,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing
             // TODO: maybe just stored fields?  they compress...
             BinaryDocValues payloadsDV = MultiDocValues.GetBinaryValues(searcher.IndexReader, "payloads");
 
-            SortedSet<Lookup.LookupResult> results = new SortedSet<Lookup.LookupResult>(LOOKUP_COMP);
+            JCG.SortedSet<Lookup.LookupResult> results = new JCG.SortedSet<Lookup.LookupResult>(LOOKUP_COMP);
 
             // we reduce the num to the one initially requested
             int actualNum = num / numFactor;
@@ -210,7 +211,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing
         /// <param name="results"> the tree to add in </param>
         /// <param name="result"> the result we try to add </param>
         /// <param name="num"> size limit </param>
-        private static void BoundedTreeAdd(SortedSet<Lookup.LookupResult> results, Lookup.LookupResult result, int num)
+        private static void BoundedTreeAdd(JCG.SortedSet<Lookup.LookupResult> results, Lookup.LookupResult result, int num)
         {
 
             if (results.Count >= num)
diff --git a/src/Lucene.Net.Suggest/Suggest/Analyzing/FreeTextSuggester.cs b/src/Lucene.Net.Suggest/Suggest/Analyzing/FreeTextSuggester.cs
index fe26f54..c7fdab5 100644
--- a/src/Lucene.Net.Suggest/Suggest/Analyzing/FreeTextSuggester.cs
+++ b/src/Lucene.Net.Suggest/Suggest/Analyzing/FreeTextSuggester.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Analysis;
+using J2N.Text;
+using Lucene.Net.Analysis;
 using Lucene.Net.Analysis.Shingle;
 using Lucene.Net.Analysis.TokenAttributes;
 using Lucene.Net.Codecs;
@@ -14,6 +15,7 @@ using System.Collections.Generic;
 using System.Diagnostics;
 using System.IO;
 using System.Linq;
+using JCG = J2N.Collections.Generic;
 using Directory = Lucene.Net.Store.Directory;
 
 namespace Lucene.Net.Search.Suggest.Analyzing
@@ -626,7 +628,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing
                 // We only add a given suffix once, from the highest
                 // order model that saw it; for subsequent lower order
                 // models we skip it:
-                var seen = new HashSet<BytesRef>();
+                var seen = new JCG.HashSet<BytesRef>();
 
                 for (int gram = grams - 1; gram >= 0; gram--)
                 {
diff --git a/src/Lucene.Net.Suggest/Suggest/Analyzing/FuzzySuggester.cs b/src/Lucene.Net.Suggest/Suggest/Analyzing/FuzzySuggester.cs
index 77c511b..a216941 100644
--- a/src/Lucene.Net.Suggest/Suggest/Analyzing/FuzzySuggester.cs
+++ b/src/Lucene.Net.Suggest/Suggest/Analyzing/FuzzySuggester.cs
@@ -1,5 +1,5 @@
-using Lucene.Net.Analysis;
-using Lucene.Net.Support;
+using J2N;
+using Lucene.Net.Analysis;
 using Lucene.Net.Util;
 using Lucene.Net.Util.Automaton;
 using Lucene.Net.Util.Fst;
@@ -240,9 +240,9 @@ namespace Lucene.Net.Search.Suggest.Analyzing
                     // to allow the trailing dedup bytes to be
                     // edited... but then 0 byte is "in general" allowed
                     // on input (but not in UTF8).
-                    LevenshteinAutomata lev = new LevenshteinAutomata(ints, unicodeAware ? Character.MAX_CODE_POINT : 255, transpositions);
+                    LevenshteinAutomata lev = new LevenshteinAutomata(ints, unicodeAware ? Character.MaxCodePoint : 255, transpositions);
                     Automaton levAutomaton = lev.ToAutomaton(maxEdits);
-                    Automaton combined = BasicOperations.Concatenate(Arrays.AsList(prefix, levAutomaton));
+                    Automaton combined = BasicOperations.Concatenate(prefix, levAutomaton);
                     combined.IsDeterministic = true; // its like the special case in concatenate itself, except we cloneExpanded already
                     subs[upto] = combined;
                     upto++;
@@ -263,7 +263,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing
             {
                 // multiple paths: this is really scary! is it slow?
                 // maybe we should not do this and throw UOE?
-                Automaton a = BasicOperations.Union(Arrays.AsList(subs));
+                Automaton a = BasicOperations.Union(subs);
                 // TODO: we could call toLevenshteinAutomata() before det? 
                 // this only happens if you have multiple paths anyway (e.g. synonyms)
                 BasicOperations.Determinize(a);
diff --git a/src/Lucene.Net.Suggest/Suggest/DocumentDictionary.cs b/src/Lucene.Net.Suggest/Suggest/DocumentDictionary.cs
index 9ce6cdd..21bc6f0 100644
--- a/src/Lucene.Net.Suggest/Suggest/DocumentDictionary.cs
+++ b/src/Lucene.Net.Suggest/Suggest/DocumentDictionary.cs
@@ -5,6 +5,7 @@ using Lucene.Net.Search.Spell;
 using Lucene.Net.Util;
 using System;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Search.Suggest
 {
@@ -112,14 +113,14 @@ namespace Lucene.Net.Search.Suggest
 
 
             private readonly int docCount;
-            private readonly HashSet<string> relevantFields;
+            private readonly ISet<string> relevantFields;
             private readonly bool hasPayloads;
             private readonly bool hasContexts;
             private readonly IBits liveDocs;
             private int currentDocId = -1;
             private long currentWeight;
             private BytesRef currentPayload;
-            private HashSet<BytesRef> currentContexts;
+            private ISet<BytesRef> currentContexts;
             private readonly NumericDocValues weightValues;
 
 
@@ -166,7 +167,7 @@ namespace Lucene.Net.Search.Suggest
 
                     BytesRef tempPayload = null;
                     BytesRef tempTerm = null;
-                    HashSet<BytesRef> tempContexts = new HashSet<BytesRef>();
+                    ISet<BytesRef> tempContexts = new JCG.HashSet<BytesRef>();
 
                     if (hasPayloads)
                     {
@@ -210,15 +211,9 @@ namespace Lucene.Net.Search.Suggest
                 return null;
             }
 
-            public virtual BytesRef Payload
-            {
-                get { return currentPayload; }
-            }
+            public virtual BytesRef Payload => currentPayload;
 
-            public virtual bool HasPayloads
-            {
-                get { return hasPayloads; }
-            }
+            public virtual bool HasPayloads => hasPayloads;
 
             /// <summary>
             /// Returns the value of the <see cref="Weight"/> property for the current document.
@@ -243,9 +238,9 @@ namespace Lucene.Net.Search.Suggest
                 }
             }
 
-            private HashSet<string> GetRelevantFields(params string[] fields)
+            private ISet<string> GetRelevantFields(params string[] fields)
             {
-                var relevantFields = new HashSet<string>();
+                var relevantFields = new JCG.HashSet<string>();
                 foreach (string relevantField in fields)
                 {
                     if (relevantField != null)
@@ -268,10 +263,7 @@ namespace Lucene.Net.Search.Suggest
                 }
             }
 
-            public virtual bool HasContexts
-            {
-                get { return hasContexts; }
-            }
+            public virtual bool HasContexts => hasContexts;
         }
     }
 }
\ No newline at end of file
diff --git a/src/Lucene.Net.Suggest/Suggest/Fst/FSTCompletion.cs b/src/Lucene.Net.Suggest/Suggest/Fst/FSTCompletion.cs
index d90be7c..c3ffc52 100644
--- a/src/Lucene.Net.Suggest/Suggest/Fst/FSTCompletion.cs
+++ b/src/Lucene.Net.Suggest/Suggest/Fst/FSTCompletion.cs
@@ -1,5 +1,4 @@
-using Lucene.Net.Support;
-using Lucene.Net.Util;
+using Lucene.Net.Util;
 using Lucene.Net.Util.Fst;
 using System;
 using System.Collections.Generic;
diff --git a/src/Lucene.Net.Suggest/Suggest/Jaspell/JaspellTernarySearchTrie.cs b/src/Lucene.Net.Suggest/Suggest/Jaspell/JaspellTernarySearchTrie.cs
index 07993ac..dd0b85f 100644
--- a/src/Lucene.Net.Suggest/Suggest/Jaspell/JaspellTernarySearchTrie.cs
+++ b/src/Lucene.Net.Suggest/Suggest/Jaspell/JaspellTernarySearchTrie.cs
@@ -25,7 +25,7 @@
 // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
 // THE POSSIBILITY OF SUCH DAMAGE.
 
-using Lucene.Net.Support;
... 51208 lines suppressed ...