You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucenenet.apache.org by ni...@apache.org on 2021/10/19 18:44:49 UTC

[lucenenet] branch master updated (3dcffb2 -> dd7ed62)

This is an automated email from the ASF dual-hosted git repository.

nightowl888 pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/lucenenet.git.


    from 3dcffb2  Lucene.Net.Tests.Index.TestIndexWriter: Removed [AwaitsFix] attribute from TestThreadInterruptDeadlock() and TestTwoThreadsInterruptDeadlock(), since they now are passing
     new 5736c97  BREAKING: Lucene.Net.Analysis.Stempel.Egothor.Stemmer.MultiTrie: Changed protected m_tries field from List<Trie> to IList<Trie>
     new 796aa12  BREAKING: Lucene.Net.Search.BooleanQuery: Changed protected m_weights field from List<Weight> to IList<Weight>
     new 74a574f  BREAKING: Lucene.Net.Search.DisjunctionMaxQuery: Changed protected m_weights field from List<Weight> to IList<Weight>
     new 972d1f5  SWEEP: Changed all instances of System.Collections.Generic.List<T> to J2N.Collections.Generic.List<T>, which is structurally equatable and structurally formattable.
     new 2e8fb68  Lucene.Net.Util.ListExtensions: Added optimized path for J2N.Collections.Generic.List<T> in AddRange and Sort methods
     new dd7ed62  BUG: Lucene.Net.Tests.Suggest.Suggest.Analyzing.TestFreeTextSuggester::TestRandom(): LookupResult calculation in the test was using different order of parentheses than the production code. This bug existed in Java, but apparently the order makes no difference on that platform. This test was getting a false positive because it was using List<T>.ToString() to make the result comparison, which J2N's List<T> corrects.

The 6 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../Analysis/Ar/ArabicStemmer.cs                   |   7 +-
 .../CharFilter/MappingCharFilterFactory.cs         |   5 +-
 .../Analysis/Compound/Hyphenation/PatternParser.cs |   9 +-
 .../Analysis/Hunspell/Dictionary.cs                |   6 +-
 .../Analysis/Hunspell/HunspellStemFilter.cs        |   5 +-
 .../Analysis/Hunspell/HunspellStemFilterFactory.cs |   5 +-
 .../Analysis/Hunspell/Stemmer.cs                   |  10 +-
 .../Miscellaneous/CapitalizationFilterFactory.cs   |   5 +-
 .../Miscellaneous/StemmerOverrideFilter.cs         |   5 +-
 .../Miscellaneous/WordDelimiterFilterFactory.cs    |   2 +-
 .../Analysis/Path/ReversePathHierarchyTokenizer.cs |   3 +-
 .../Analysis/Pt/RSLPStemmerBase.cs                 |   3 +-
 .../Analysis/Query/QueryAutoStopWordAnalyzer.cs    |   4 +-
 .../Analysis/Sinks/TeeSinkTokenFilter.cs           |   5 +-
 .../Analysis/Synonym/FSTSynonymFilterFactory.cs    |   3 +-
 .../Analysis/Synonym/SlowSynonymFilter.cs          |   5 +-
 .../Analysis/Synonym/SlowSynonymFilterFactory.cs   |  19 ++--
 .../Analysis/Synonym/SlowSynonymMap.cs             |   7 +-
 .../Analysis/Synonym/SolrSynonymParser.cs          |   4 +-
 .../Analysis/Synonym/SynonymMap.cs                 |   2 +-
 .../Analysis/Util/AbstractAnalysisFactory.cs       |   4 +-
 .../Analysis/Util/WordlistLoader.cs                |   5 +-
 .../Analysis/Wikipedia/WikipediaTokenizer.cs       |   3 +-
 .../Dict/UserDictionary.cs                         |   8 +-
 .../Tools/BinaryDictionaryWriter.cs                |   3 +-
 .../Tools/TokenInfoDictionaryBuilder.cs            |   5 +-
 .../Tools/UnknownDictionaryBuilder.cs              |   3 +-
 src/Lucene.Net.Analysis.Kuromoji/Util/CSVUtil.cs   |   4 +-
 .../Morfologik/MorfologikFilter.cs                 |   4 +-
 .../MorphosyntacticTagsAttribute.cs                |   5 +-
 .../OpenNLPChunkerFilter.cs                        |   7 +-
 .../OpenNLPLemmatizerFilter.cs                     |   7 +-
 .../OpenNLPPOSFilter.cs                            |   5 +-
 .../Language/Bm/Lang.cs                            |   4 +-
 .../Language/Bm/Rule.cs                            |   6 +-
 .../Language/DaitchMokotoffSoundex.cs              |   7 +-
 src/Lucene.Net.Analysis.SmartCn/Hhmm/BiSegGraph.cs |   2 +-
 src/Lucene.Net.Analysis.SmartCn/Hhmm/SegGraph.cs   |   7 +-
 .../Egothor.Stemmer/Compile.cs                     |   3 +-
 .../Egothor.Stemmer/DiffIt.cs                      |   5 +-
 .../Egothor.Stemmer/Gener.cs                       |   3 +-
 .../Egothor.Stemmer/Lift.cs                        |   3 +-
 .../Egothor.Stemmer/MultiTrie.cs                   |   5 +-
 .../Egothor.Stemmer/MultiTrie2.cs                  |   3 +-
 .../Egothor.Stemmer/Optimizer.cs                   |   5 +-
 .../Egothor.Stemmer/Reduce.cs                      |   5 +-
 .../Egothor.Stemmer/Trie.cs                        |   5 +-
 .../ByTask/Feeds/EnwikiQueryMaker.cs               |   5 +-
 .../ByTask/Feeds/FileBasedQueryMaker.cs            |   4 +-
 .../ByTask/Feeds/ReutersContentSource.cs           |   3 +-
 .../ByTask/Feeds/ReutersQueryMaker.cs              |   5 +-
 .../ByTask/Feeds/SimpleQueryMaker.cs               |   4 +-
 .../ByTask/Feeds/SimpleSloppyPhraseQueryMaker.cs   |   6 +-
 .../ByTask/Feeds/SpatialFileQueryMaker.cs          |   3 +-
 .../ByTask/Feeds/TrecContentSource.cs              |   3 +-
 src/Lucene.Net.Benchmark/ByTask/PerfRunData.cs     |   3 +-
 src/Lucene.Net.Benchmark/ByTask/Stats/Points.cs    |   3 +-
 .../ByTask/Tasks/AddFacetedDocTask.cs              |   3 +-
 .../ByTask/Tasks/AnalyzerFactoryTask.cs            |   6 +-
 .../ByTask/Tasks/NewAnalyzerTask.cs                |   3 +-
 .../ByTask/Tasks/TaskSequence.cs                   |  10 +-
 src/Lucene.Net.Benchmark/ByTask/Utils/Algorithm.cs |   2 +-
 src/Lucene.Net.Benchmark/ByTask/Utils/Config.cs    |  13 +--
 src/Lucene.Net.Benchmark/Quality/QualityStats.cs   |   4 +-
 .../Quality/Trec/Trec1MQReader.cs                  |   3 +-
 src/Lucene.Net.Benchmark/Quality/Trec/TrecJudge.cs |   2 +-
 .../Quality/Trec/TrecTopicsReader.cs               |   3 +-
 .../Quality/Utils/DocNameExtractor.cs              |   3 +-
 .../Support/Sax/Helpers/NamespaceSupport.cs        |   3 +-
 src/Lucene.Net.Benchmark/Support/TagSoup/Parser.cs |   4 +-
 .../BlockTerms/BlockTermsWriter.cs                 |   3 +-
 .../BlockTerms/FixedGapTermsIndexWriter.cs         |   4 +-
 .../BlockTerms/VariableGapTermsIndexWriter.cs      |   3 +-
 .../Bloom/BloomFilteringPostingsFormat.cs          |   2 +-
 src/Lucene.Net.Codecs/Memory/FSTOrdTermsWriter.cs  |   5 +-
 src/Lucene.Net.Codecs/Memory/FSTTermsWriter.cs     |   5 +-
 .../Pulsing/PulsingPostingsWriter.cs               |   6 +-
 src/Lucene.Net.Facet/DrillDownQuery.cs             |   4 +-
 src/Lucene.Net.Facet/FacetsCollector.cs            |   3 +-
 src/Lucene.Net.Facet/FacetsConfig.cs               |   8 +-
 src/Lucene.Net.Facet/MultiFacets.cs                |   3 +-
 .../RandomSamplingFacetsCollector.cs               |   4 +-
 src/Lucene.Net.Facet/Range/LongRangeCounter.cs     |   7 +-
 src/Lucene.Net.Facet/Range/RangeFacetCounts.cs     |   3 +-
 .../SortedSet/SortedSetDocValuesFacetCounts.cs     |   3 +-
 src/Lucene.Net.Facet/Taxonomy/TaxonomyFacets.cs    |   3 +-
 .../Taxonomy/WriterCache/CharBlockArray.cs         |   5 +-
 .../AbstractFirstPassGroupingCollector.cs          |   2 +-
 .../AbstractGroupFacetCollector.cs                 |   4 +-
 .../Function/FunctionDistinctValuesCollector.cs    |   2 +-
 src/Lucene.Net.Grouping/GroupingSearch.cs          |   5 +-
 src/Lucene.Net.Grouping/SearchGroup.cs             |   4 +-
 .../Term/TermAllGroupHeadsCollector.cs             |   6 +-
 .../Term/TermAllGroupsCollector.cs                 |   3 +-
 .../Term/TermDistinctValuesCollector.cs            |   5 +-
 .../Term/TermGroupFacetCollector.cs                |   6 +-
 .../Highlight/Highlighter.cs                       |   7 +-
 .../Highlight/TokenSources.cs                      |   5 +-
 .../Highlight/TokenStreamFromTermPositionVector.cs |   3 +-
 .../Highlight/WeightedSpanTerm.cs                  |   3 +-
 .../Highlight/WeightedSpanTermExtractor.cs         |  10 +-
 .../PostingsHighlight/MultiTermHighlighting.cs     |   3 +-
 .../PostingsHighlight/PostingsHighlighter.cs       |   2 +-
 .../VectorHighlight/BaseFragListBuilder.cs         |   4 +-
 .../VectorHighlight/BaseFragmentsBuilder.cs        |  12 +--
 .../VectorHighlight/FieldFragList.cs               |   3 +-
 .../VectorHighlight/FieldPhraseList.cs             |  21 ++--
 .../VectorHighlight/FieldTermStack.cs              |   4 +-
 .../VectorHighlight/SimpleFieldFragList.cs         |   3 +-
 .../VectorHighlight/SingleFragListBuilder.cs       |   3 +-
 .../VectorHighlight/WeightedFieldFragList.cs       |   4 +-
 .../Support/ToChildBlockJoinQuery.cs               |   3 +-
 .../Support/ToParentBlockJoinQuery.cs              |   3 +-
 src/Lucene.Net.Join/ToChildBlockJoinQuery.cs       |   4 +-
 src/Lucene.Net.Join/ToParentBlockJoinQuery.cs      |   3 +-
 src/Lucene.Net.Misc/Document/LazyDocument.cs       |   2 +-
 src/Lucene.Net.Misc/Index/IndexSplitter.cs         |   5 +-
 .../Index/MultiPassIndexSplitter.cs                |   4 +-
 .../Index/Sorter/SortingMergePolicy.cs             |   3 +-
 src/Lucene.Net.Queries/CommonTermsQuery.cs         |   2 +-
 src/Lucene.Net.Queries/CustomScoreQuery.cs         |   3 +-
 src/Lucene.Net.Queries/Function/BoostedQuery.cs    |   3 +-
 src/Lucene.Net.Queries/Mlt/MoreLikeThis.cs         |   5 +-
 src/Lucene.Net.Queries/TermsFilter.cs              |   3 +-
 .../Classic/MultiFieldQueryParser.cs               |  15 +--
 src/Lucene.Net.QueryParser/Classic/QueryParser.cs  |   5 +-
 .../ComplexPhrase/ComplexPhraseQueryParser.cs      |  15 +--
 .../Flexible/Core/Nodes/GroupQueryNode.cs          |   3 +-
 .../Flexible/Core/Nodes/ModifierQueryNode.cs       |   3 +-
 .../Flexible/Core/Nodes/PathQueryNode.cs           |   5 +-
 .../Core/Processors/QueryNodeProcessorImpl.cs      |   5 +-
 .../Flexible/Core/Util/QueryNodeOperation.cs       |   4 +-
 .../BooleanModifiersQueryNodeProcessor.cs          |   3 +-
 .../Builders/MultiPhraseQueryNodeBuilder.cs        |   8 +-
 .../Standard/Nodes/AbstractRangeQueryNode.cs       |   3 +-
 .../Standard/Parser/StandardSyntaxParser.cs        |  15 +--
 .../Processors/AnalyzerQueryNodeProcessor.cs       |   7 +-
 .../BooleanQuery2ModifierNodeProcessor.cs          |   3 +-
 .../Standard/Processors/GroupQueryNodeProcessor.cs |   7 +-
 .../Processors/MultiFieldQueryNodeProcessor.cs     |   3 +-
 .../RemoveEmptyNonLeafQueryNodeProcessor.cs        |   3 +-
 .../Surround/Parser/QueryParser.cs                 |  17 ++--
 .../Surround/Query/ComposedQuery.cs                |   2 +-
 .../Surround/Query/FieldsQuery.cs                  |   5 +-
 .../Surround/Query/SimpleTermRewriteQuery.cs       |   3 +-
 .../Surround/Query/SpanNearClauseFactory.cs        |   2 +-
 .../Xml/Builders/SpanNearBuilder.cs                |   3 +-
 .../Xml/Builders/SpanOrBuilder.cs                  |   4 +-
 .../Xml/Builders/SpanOrTermsBuilder.cs             |   5 +-
 .../Xml/Builders/TermsFilterBuilder.cs             |   4 +-
 src/Lucene.Net.Replicator/IndexRevision.cs         |   6 +-
 src/Lucene.Net.Replicator/ReplicationClient.cs     |   4 +-
 src/Lucene.Net.Replicator/SessionToken.cs          |   5 +-
 .../Queries/FuzzyLikeThisQuery.cs                  |  10 +-
 src/Lucene.Net.Spatial/Prefix/Tree/Cell.cs         |   6 +-
 .../Prefix/Tree/GeohashPrefixTree.cs               |   3 +-
 .../Prefix/Tree/QuadPrefixTree.cs                  |   7 +-
 .../Prefix/Tree/SpatialPrefixTree.cs               |   8 +-
 src/Lucene.Net.Spatial/Query/SpatialOperation.cs   |   3 +-
 src/Lucene.Net.Spatial/Util/ShapeFieldCache.cs     |   3 +-
 src/Lucene.Net.Suggest/Spell/SpellChecker.cs       |   3 +-
 .../Suggest/Analyzing/AnalyzingInfixSuggester.cs   |   2 +-
 .../Suggest/Analyzing/AnalyzingSuggester.cs        |   4 +-
 .../Suggest/Analyzing/BlendedInfixSuggester.cs     |   2 +-
 .../Suggest/Analyzing/FSTUtil.cs                   |   6 +-
 .../Suggest/Analyzing/FreeTextSuggester.cs         |   2 +-
 .../Suggest/BufferedInputIterator.cs               |   3 +-
 .../Suggest/Fst/FSTCompletion.cs                   |   2 +-
 .../Suggest/Fst/FSTCompletionLookup.cs             |   3 +-
 .../Suggest/Fst/WFSTCompletionLookup.cs            |   5 +-
 .../Suggest/Jaspell/JaspellLookup.cs               |   3 +-
 .../Suggest/Jaspell/JaspellTernarySearchTrie.cs    |   7 +-
 .../Suggest/Tst/TSTAutocomplete.cs                 |   3 +-
 src/Lucene.Net.Suggest/Suggest/Tst/TSTLookup.cs    |   7 +-
 .../Analysis/BaseTokenStreamTestCase.cs            |  13 +--
 .../Analysis/LookaheadTokenFilter.cs               |   5 +-
 .../Codecs/MockRandom/MockRandomPostingsFormat.cs  |   3 +-
 .../Codecs/RAMOnly/RAMOnlyPostingsFormat.cs        |   2 +-
 .../BaseCompressingDocValuesFormatTestCase.cs      |   3 +-
 .../Index/BaseDocValuesFormatTestCase.cs           |  12 +--
 .../Index/BasePostingsFormatTestCase.cs            |   8 +-
 .../Index/BaseStoredFieldsFormatTestCase.cs        |   4 +-
 .../Index/BaseTermVectorsFormatTestCase.cs         |   2 +-
 .../Index/ThreadedIndexingAndSearchingTestCase.cs  |   9 +-
 .../Search/AssertingScorer.cs                      |   5 +-
 .../Search/RandomSimilarityProvider.cs             |   5 +-
 .../Search/SearchEquivalenceTestBase.cs            |   2 +-
 .../Store/BaseDirectoryTestCase.cs                 |   4 +-
 .../Store/MockDirectoryWrapper.cs                  |   8 +-
 .../JavaCompatibility/SystemTypesHelpers.cs        |   3 +-
 .../Support/SynchronizedList.cs                    |   5 +-
 .../Util/Automaton/AutomatonTestUtil.cs            |   6 +-
 src/Lucene.Net.TestFramework/Util/Fst/FSTTester.cs |   2 +-
 .../Util/LuceneTestCase.cs                         |   6 +-
 .../Util/RunListenerPrintReproduceInfo.cs          |   4 +-
 .../Util/TestRuleMarkFailure.cs                    |   4 +-
 .../Analysis/CharFilters/TestMappingCharFilter.cs  |   6 +-
 .../Analysis/Core/TestRandomChains.cs              |  14 +--
 .../Analysis/Core/TestStopFilter.cs                |  10 +-
 .../Analysis/Core/TestUAX29URLEmailTokenizer.cs    |   9 +-
 .../Miscellaneous/TestASCIIFoldingFilter.cs        |   5 +-
 .../Miscellaneous/TestCapitalizationFilter.cs      |   5 +-
 .../Miscellaneous/TestStemmerOverrideFilter.cs     |   5 +-
 .../Analysis/Pattern/TestPatternTokenizer.cs       |   5 +-
 .../Analysis/Synonym/TestMultiWordSynonyms.cs      |   5 +-
 .../Analysis/Synonym/TestSlowSynonymFilter.cs      |   5 +-
 .../Analysis/Synonym/TestSynonymMap.cs             |  11 +-
 .../Analysis/Synonym/TestSynonymMapFilter.cs       |   4 +-
 .../Analysis/Th/TestThaiAnalyzer.cs                |   9 +-
 .../Analysis/Util/TestCharArraySet.cs              |  72 ++++++-------
 .../Analysis/Util/TestElision.cs                   |   5 +-
 .../Tools/TestBuildDictionary.cs                   |   4 +-
 .../Language/Bm/PhoneticEngineTest.cs              |   4 +-
 .../AssertingSubDocsAtOnceCollector.cs             |   3 +-
 src/Lucene.Net.Tests.Facet/FacetTestCase.cs        |   7 +-
 .../SortedSet/TestSortedSetDocValuesFacets.cs      |   7 +-
 .../Taxonomy/TestSearcherTaxonomyManager.cs        |   2 +-
 .../Taxonomy/TestTaxonomyCombined.cs               |   5 +-
 .../Taxonomy/TestTaxonomyFacetCounts.cs            |   6 +-
 .../Taxonomy/TestTaxonomyFacetCounts2.cs           |   2 +-
 .../Taxonomy/TestTaxonomyFacetSumValueSource.cs    |   7 +-
 src/Lucene.Net.Tests.Facet/TestDrillSideways.cs    |   4 +-
 .../AllGroupHeadsCollectorTest.cs                  |  12 +--
 .../DistinctValuesCollectorTest.cs                 |  46 ++++-----
 .../GroupFacetCollectorTest.cs                     |   6 +-
 .../GroupingSearchTest.cs                          |   5 +-
 src/Lucene.Net.Tests.Grouping/TestGrouping.cs      |  44 ++++----
 .../Highlight/HighlighterTest.cs                   |  12 +--
 .../VectorHighlight/AbstractTestCase.cs            |   7 +-
 .../VectorHighlight/FieldPhraseListTest.cs         |   3 +-
 .../VectorHighlight/FieldQueryTest.cs              |   6 +-
 .../VectorHighlight/SimpleFragmentsBuilderTest.cs  |   6 +-
 src/Lucene.Net.Tests.Join/TestBlockJoin.cs         |  31 +++---
 src/Lucene.Net.Tests.Join/TestBlockJoinSorting.cs  |   3 +-
 .../TestBlockJoinValidation.cs                     |   7 +-
 src/Lucene.Net.Tests.Join/TestJoinUtil.cs          |  12 +--
 .../Index/Sorter/IndexSortingTest.cs               |   3 +-
 .../Index/Sorter/SorterTestBase.cs                 |   3 +-
 .../Index/Sorter/TestBlockJoinSorter.cs            |   5 +-
 .../Index/Sorter/TestEarlyTermination.cs           |   4 +-
 .../Index/Sorter/TestSortingMergePolicy.cs         |   4 +-
 .../CommonTermsQueryTest.cs                        |   8 +-
 src/Lucene.Net.Tests.Queries/TermFilterTest.cs     |   2 +-
 src/Lucene.Net.Tests.Queries/TermsFilterTest.cs    |  10 +-
 .../DistanceStrategyTest.cs                        |   3 +-
 src/Lucene.Net.Tests.Spatial/PortedSolr3Test.cs    |   2 +-
 .../Prefix/SpatialOpRecursivePrefixTreeTest.cs     |   8 +-
 .../QueryEqualsHashCodeTest.cs                     |   3 +-
 src/Lucene.Net.Tests.Spatial/SpatialTestCase.cs    |   8 +-
 src/Lucene.Net.Tests.Spatial/SpatialTestData.cs    |   3 +-
 src/Lucene.Net.Tests.Spatial/SpatialTestQuery.cs   |   5 +-
 src/Lucene.Net.Tests.Spatial/StrategyTestCase.cs   |  14 +--
 src/Lucene.Net.Tests.Spatial/TestTestFramework.cs  |   4 +-
 .../Spell/TestWordBreakSpellChecker.cs             |   5 +-
 .../Analyzing/AnalyzingInfixSuggesterTest.cs       |  10 +-
 .../Suggest/Analyzing/AnalyzingSuggesterTest.cs    |  10 +-
 .../Suggest/Analyzing/FuzzySuggesterTest.cs        |  18 ++--
 .../Suggest/Analyzing/TestFreeTextSuggester.cs     |  12 ++-
 .../Suggest/DocumentDictionaryTest.cs              |  24 ++---
 .../Suggest/DocumentValueSourceDictionaryTest.cs   |   2 +-
 .../Suggest/FileDictionaryTest.cs                  |  48 ++++-----
 .../Suggest/Fst/FSTCompletionTest.cs               |   5 +-
 .../Suggest/Fst/WFSTCompletionTest.cs              |   2 +-
 .../Suggest/LookupBenchmarkTest.cs                 |   4 +-
 .../Analysis/TrivialLookaheadFilter.cs             | 112 +++++++++++----------
 .../Analysis/TestGraphTokenizers.cs                |   5 +-
 .../Analysis/TrivialLookaheadFilter.cs             |   5 +-
 .../Codecs/Lucene3x/TestSurrogates.cs              |   2 +-
 .../Codecs/Lucene3x/TestTermInfosReaderIndex.cs    |   5 +-
 .../Codecs/Lucene40/TestLucene40PostingsReader.cs  |   3 +-
 .../Codecs/Lucene41/TestBlockPostingsFormat3.cs    |   2 +-
 src/Lucene.Net.Tests/Index/Test2BTerms.cs          |   4 +-
 src/Lucene.Net.Tests/Index/TestAddIndexes.cs       |   5 +-
 .../Index/TestBackwardsCompatibility.cs            |   7 +-
 .../Index/TestBackwardsCompatibility3x.cs          |   5 +-
 src/Lucene.Net.Tests/Index/TestBagOfPositions.cs   |   3 +-
 src/Lucene.Net.Tests/Index/TestBagOfPostings.cs    |   2 +-
 src/Lucene.Net.Tests/Index/TestCheckIndex.cs       |   5 +-
 src/Lucene.Net.Tests/Index/TestDoc.cs              |   4 +-
 src/Lucene.Net.Tests/Index/TestDocTermOrds.cs      |   4 +-
 .../Index/TestDocValuesWithThreads.cs              |  10 +-
 src/Lucene.Net.Tests/Index/TestDocsAndPositions.cs |   6 +-
 .../Index/TestFlushByRamOrCountsPolicy.cs          |  11 +-
 src/Lucene.Net.Tests/Index/TestIndexReaderClose.cs |   3 +-
 src/Lucene.Net.Tests/Index/TestIndexWriter.cs      |  10 +-
 .../Index/TestIndexWriterDelete.cs                 |   3 +-
 .../Index/TestIndexWriterExceptions.cs             |   8 +-
 .../Index/TestIndexWriterMerging.cs                |   7 +-
 .../Index/TestIndexWriterOnJRECrash.cs             |   2 +-
 .../Index/TestIndexWriterReader.cs                 |   3 +-
 src/Lucene.Net.Tests/Index/TestIntBlockPool.cs     |   3 +-
 src/Lucene.Net.Tests/Index/TestMaxTermFrequency.cs |   7 +-
 src/Lucene.Net.Tests/Index/TestMultiDocValues.cs   |   5 +-
 src/Lucene.Net.Tests/Index/TestMultiFields.cs      |   8 +-
 src/Lucene.Net.Tests/Index/TestPayloads.cs         |   3 +-
 .../Index/TestPerSegmentDeletes.cs                 |   7 +-
 src/Lucene.Net.Tests/Index/TestPostingsOffsets.cs  |   5 +-
 src/Lucene.Net.Tests/Index/TestPrefixCodedTerms.cs |   4 +-
 src/Lucene.Net.Tests/Index/TestSegmentMerger.cs    |   6 +-
 .../Index/TestSnapshotDeletionPolicy.cs            |   3 +-
 src/Lucene.Net.Tests/Index/TestStressAdvance.cs    |   6 +-
 src/Lucene.Net.Tests/Index/TestStressIndexing2.cs  |   5 +-
 src/Lucene.Net.Tests/Index/TestStressNRT.cs        |   3 +-
 src/Lucene.Net.Tests/Index/TestTermsEnum.cs        |   6 +-
 src/Lucene.Net.Tests/Index/TestTermsEnum2.cs       |   4 +-
 src/Lucene.Net.Tests/Index/TestUniqueTermCount.cs  |   2 +-
 .../Search/Similarities/TestSimilarity2.cs         |   5 +-
 .../Search/Similarities/TestSimilarityBase.cs      |   5 +-
 src/Lucene.Net.Tests/Search/Spans/TestBasics.cs    |  11 +-
 src/Lucene.Net.Tests/Search/TestBooleanQuery.cs    |   5 +-
 src/Lucene.Net.Tests/Search/TestBooleanScorer.cs   |   5 +-
 .../Search/TestControlledRealTimeReopenThread.cs   |   3 +-
 src/Lucene.Net.Tests/Search/TestDocIdSet.cs        |   5 +-
 .../Search/TestDocTermOrdsRangeFilter.cs           |   3 +-
 .../Search/TestDocTermOrdsRewriteMethod.cs         |   3 +-
 .../Search/TestFieldCacheTermsFilter.cs            |  10 +-
 src/Lucene.Net.Tests/Search/TestFuzzyQuery.cs      |   5 +-
 src/Lucene.Net.Tests/Search/TestLiveFieldValues.cs |   3 +-
 src/Lucene.Net.Tests/Search/TestMinShouldMatch2.cs |   4 +-
 src/Lucene.Net.Tests/Search/TestPhraseQuery.cs     |   5 +-
 src/Lucene.Net.Tests/Search/TestRegexpRandom2.cs   |   4 +-
 .../Search/TestSameScoresWithThreads.cs            |   3 +-
 src/Lucene.Net.Tests/Search/TestSearchAfter.cs     |   5 +-
 src/Lucene.Net.Tests/Search/TestSearcherManager.cs |   3 +-
 src/Lucene.Net.Tests/Search/TestShardSearching.cs  |   8 +-
 src/Lucene.Net.Tests/Search/TestSort.cs            |  13 +--
 src/Lucene.Net.Tests/Search/TestSortRandom.cs      |   2 +-
 src/Lucene.Net.Tests/Search/TestTermScorer.cs      |   5 +-
 src/Lucene.Net.Tests/Search/TestTopDocsMerge.cs    |   5 +-
 .../Store/TestBufferedIndexInput.cs                |   3 +-
 src/Lucene.Net.Tests/Store/TestDirectory.cs        |   3 +-
 .../Store/TestNRTCachingDirectory.cs               |   3 +-
 src/Lucene.Net.Tests/TestSearch.cs                 |   5 +-
 .../Util/Automaton/TestBasicOperations.cs          |   3 +-
 .../Util/Automaton/TestCompiledAutomaton.cs        |   4 +-
 .../Util/Automaton/TestDeterminizeLexicon.cs       |   7 +-
 .../Util/Automaton/TestLevenshteinAutomata.cs      |  11 +-
 src/Lucene.Net.Tests/Util/Fst/TestFSTs.cs          |  32 +++---
 src/Lucene.Net.Tests/Util/Packed/TestPackedInts.cs |   4 +-
 src/Lucene.Net.Tests/Util/TestByteBlockPool.cs     |   3 +-
 src/Lucene.Net.Tests/Util/TestBytesRefArray.cs     |   9 +-
 src/Lucene.Net.Tests/Util/TestCollectionUtil.cs    |   8 +-
 src/Lucene.Net.Tests/Util/TestMergedIterator.cs    |  19 ++--
 src/Lucene.Net.Tests/Util/TestOfflineSorter.cs     |   3 +-
 .../Util/TestRecyclingByteBlockAllocator.cs        |   4 +-
 .../Util/TestRecyclingIntBlockAllocator.cs         |   4 +-
 src/Lucene.Net.Tests/Util/TestWAH8DocIdSet.cs      |   9 +-
 src/Lucene.Net/Analysis/CachingTokenFilter.cs      |   5 +-
 src/Lucene.Net/Codecs/BlockTreeTermsWriter.cs      |   6 +-
 .../Codecs/Lucene3x/TermInfosReaderIndex.cs        |   3 +-
 src/Lucene.Net/Document/Document.cs                |  17 ++--
 src/Lucene.Net/Index/BufferedUpdates.cs            |   5 +-
 src/Lucene.Net/Index/BufferedUpdatesStream.cs      |   6 +-
 src/Lucene.Net/Index/CheckIndex.cs                 |  13 ++-
 src/Lucene.Net/Index/CoalescedUpdates.cs           |  10 +-
 src/Lucene.Net/Index/CompositeReaderContext.cs     |   7 +-
 src/Lucene.Net/Index/ConcurrentMergeScheduler.cs   |   7 +-
 src/Lucene.Net/Index/DirectoryReader.cs            |   3 +-
 src/Lucene.Net/Index/DocTermOrds.cs                |   4 +-
 .../Index/DocumentsWriterFlushControl.cs           |   2 +-
 src/Lucene.Net/Index/FreqProxTermsWriter.cs        |   5 +-
 src/Lucene.Net/Index/FrozenBufferedUpdates.cs      |   7 +-
 src/Lucene.Net/Index/IndexFileDeleter.cs           |   9 +-
 src/Lucene.Net/Index/IndexWriter.cs                |  14 +--
 src/Lucene.Net/Index/LogMergePolicy.cs             |   5 +-
 src/Lucene.Net/Index/MergePolicy.cs                |   7 +-
 src/Lucene.Net/Index/MultiFields.cs                |  10 +-
 src/Lucene.Net/Index/MultiTerms.cs                 |   8 +-
 .../Index/PersistentSnapshotDeletionPolicy.cs      |   5 +-
 src/Lucene.Net/Index/SegmentInfos.cs               |   2 +-
 src/Lucene.Net/Index/SegmentMerger.cs              |  21 ++--
 src/Lucene.Net/Index/SegmentReader.cs              |   6 +-
 src/Lucene.Net/Index/SnapshotDeletionPolicy.cs     |   5 +-
 src/Lucene.Net/Index/StandardDirectoryReader.cs    |   3 +-
 src/Lucene.Net/Index/TieredMergePolicy.cs          |   6 +-
 src/Lucene.Net/Index/UpgradeIndexMergePolicy.cs    |   5 +-
 src/Lucene.Net/Search/BooleanQuery.cs              |  22 ++--
 src/Lucene.Net/Search/BooleanScorer2.cs            |   5 +-
 src/Lucene.Net/Search/CachingCollector.cs          |  11 +-
 src/Lucene.Net/Search/CachingWrapperFilter.cs      |   7 +-
 src/Lucene.Net/Search/ConjunctionScorer.cs         |   5 +-
 src/Lucene.Net/Search/DisjunctionMaxQuery.cs       |   6 +-
 src/Lucene.Net/Search/DisjunctionScorer.cs         |   5 +-
 src/Lucene.Net/Search/Explanation.cs               |   8 +-
 src/Lucene.Net/Search/FieldCacheImpl.cs            |   3 +-
 src/Lucene.Net/Search/MinShouldMatchSumScorer.cs   |   3 +-
 src/Lucene.Net/Search/MultiPhraseQuery.cs          |   2 +-
 src/Lucene.Net/Search/Payloads/PayloadSpanUtil.cs  |   8 +-
 src/Lucene.Net/Search/ReqOptSumScorer.cs           |   5 +-
 src/Lucene.Net/Search/SearcherLifetimeManager.cs   |   3 +-
 src/Lucene.Net/Search/SloppyPhraseScorer.cs        |  12 +--
 src/Lucene.Net/Search/Spans/NearSpansOrdered.cs    |   8 +-
 src/Lucene.Net/Search/Spans/NearSpansUnordered.cs  |   6 +-
 src/Lucene.Net/Search/Spans/SpanNearQuery.cs       |   2 +-
 src/Lucene.Net/Search/Spans/SpanNotQuery.cs        |   5 +-
 src/Lucene.Net/Search/Spans/SpanOrQuery.cs         |   4 +-
 .../Search/Spans/SpanPositionCheckQuery.cs         |   5 +-
 src/Lucene.Net/Search/WildcardQuery.cs             |   3 +-
 src/Lucene.Net/Store/FileSwitchDirectory.cs        |   4 +-
 src/Lucene.Net/Store/RAMFile.cs                    |   5 +-
 src/Lucene.Net/Support/Util/ListExtensions.cs      |  15 +++
 src/Lucene.Net/Util/Automaton/BasicAutomata.cs     |   7 +-
 src/Lucene.Net/Util/Automaton/BasicOperations.cs   |   8 +-
 src/Lucene.Net/Util/Automaton/CompiledAutomaton.cs |   3 +-
 .../Util/Automaton/MinimizationOperations.cs       |  16 +--
 src/Lucene.Net/Util/Automaton/RegExp.cs            |   4 +-
 src/Lucene.Net/Util/Automaton/UTF32ToUTF8.cs       |   5 +-
 src/Lucene.Net/Util/CloseableThreadLocal.cs        |   3 +-
 src/Lucene.Net/Util/FieldCacheSanityChecker.cs     |  10 +-
 src/Lucene.Net/Util/OfflineSorter.cs               |   3 +-
 src/Lucene.Net/Util/PagedBytes.cs                  |   5 +-
 src/Lucene.Net/Util/QueryBuilder.cs                |   3 +-
 src/Lucene.Net/Util/RamUsageEstimator.cs           |   2 +-
 413 files changed, 1422 insertions(+), 1182 deletions(-)

[lucenenet] 03/06: BREAKING: Lucene.Net.Search.DisjunctionMaxQuery: Changed protected m_weights field from List to IList

Posted by ni...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

nightowl888 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/lucenenet.git

commit 74a574f8f8606960dbdcddcd2ec2582c7de82787
Author: Shad Storhaug <sh...@shadstorhaug.com>
AuthorDate: Tue Oct 19 05:30:26 2021 +0700

    BREAKING: Lucene.Net.Search.DisjunctionMaxQuery: Changed protected m_weights field from List<Weight> to IList<Weight>
---
 src/Lucene.Net/Search/DisjunctionMaxQuery.cs | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/src/Lucene.Net/Search/DisjunctionMaxQuery.cs b/src/Lucene.Net/Search/DisjunctionMaxQuery.cs
index be1c56a..e8fa330 100644
--- a/src/Lucene.Net/Search/DisjunctionMaxQuery.cs
+++ b/src/Lucene.Net/Search/DisjunctionMaxQuery.cs
@@ -1,4 +1,4 @@
-using J2N.Collections.Generic.Extensions;
+using J2N.Collections.Generic.Extensions;
 using Lucene.Net.Util;
 using System;
 using System.Collections;
@@ -137,7 +137,7 @@ namespace Lucene.Net.Search
 
             /// <summary>
             /// The <see cref="Weight"/>s for our subqueries, in 1-1 correspondence with disjuncts </summary>
-            protected List<Weight> m_weights = new List<Weight>(); // The Weight's for our subqueries, in 1-1 correspondence with disjuncts
+            protected IList<Weight> m_weights = new List<Weight>(); // The Weight's for our subqueries, in 1-1 correspondence with disjuncts
 
             /// <summary>
             /// Construct the <see cref="Weight"/> for this <see cref="Search.Query"/> searched by <paramref name="searcher"/>.  Recursively construct subquery weights. </summary>

[lucenenet] 02/06: BREAKING: Lucene.Net.Search.BooleanQuery: Changed protected m_weights field from List to IList

Posted by ni...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

nightowl888 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/lucenenet.git

commit 796aa12e0b94930f79fc28b6d67f522be2bc50d7
Author: Shad Storhaug <sh...@shadstorhaug.com>
AuthorDate: Tue Oct 19 05:26:11 2021 +0700

    BREAKING: Lucene.Net.Search.BooleanQuery: Changed protected m_weights field from List<Weight> to IList<Weight>
---
 src/Lucene.Net/Search/BooleanQuery.cs | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/src/Lucene.Net/Search/BooleanQuery.cs b/src/Lucene.Net/Search/BooleanQuery.cs
index 29cc92a..93a62db 100644
--- a/src/Lucene.Net/Search/BooleanQuery.cs
+++ b/src/Lucene.Net/Search/BooleanQuery.cs
@@ -238,7 +238,7 @@ namespace Lucene.Net.Search
             /// The <see cref="Similarities.Similarity"/> implementation. </summary>
             protected Similarity m_similarity;
 
-            protected List<Weight> m_weights;
+            protected IList<Weight> m_weights;
             protected int m_maxCoord; // num optional + num required
             private readonly bool disableCoord;
 

[lucenenet] 04/06: SWEEP: Changed all instances of System.Collections.Generic.List to J2N.Collections.Generic.List, which is structurally equatable and structurally formattable.

Posted by ni...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

nightowl888 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/lucenenet.git

commit 972d1f50dcb7b49cd08ae91cd984309f942909b8
Author: Shad Storhaug <sh...@shadstorhaug.com>
AuthorDate: Tue Oct 19 05:40:44 2021 +0700

    SWEEP: Changed all instances of System.Collections.Generic.List<T> to J2N.Collections.Generic.List<T>, which is structurally equatable and structurally formattable.
---
 .../Analysis/Ar/ArabicStemmer.cs                   |   7 +-
 .../CharFilter/MappingCharFilterFactory.cs         |   5 +-
 .../Analysis/Compound/Hyphenation/PatternParser.cs |   9 +-
 .../Analysis/Hunspell/Dictionary.cs                |   6 +-
 .../Analysis/Hunspell/HunspellStemFilter.cs        |   5 +-
 .../Analysis/Hunspell/HunspellStemFilterFactory.cs |   5 +-
 .../Analysis/Hunspell/Stemmer.cs                   |  10 +-
 .../Miscellaneous/CapitalizationFilterFactory.cs   |   5 +-
 .../Miscellaneous/StemmerOverrideFilter.cs         |   5 +-
 .../Miscellaneous/WordDelimiterFilterFactory.cs    |   2 +-
 .../Analysis/Path/ReversePathHierarchyTokenizer.cs |   3 +-
 .../Analysis/Pt/RSLPStemmerBase.cs                 |   3 +-
 .../Analysis/Query/QueryAutoStopWordAnalyzer.cs    |   4 +-
 .../Analysis/Sinks/TeeSinkTokenFilter.cs           |   5 +-
 .../Analysis/Synonym/FSTSynonymFilterFactory.cs    |   3 +-
 .../Analysis/Synonym/SlowSynonymFilter.cs          |   5 +-
 .../Analysis/Synonym/SlowSynonymFilterFactory.cs   |  19 ++--
 .../Analysis/Synonym/SlowSynonymMap.cs             |   7 +-
 .../Analysis/Synonym/SolrSynonymParser.cs          |   4 +-
 .../Analysis/Synonym/SynonymMap.cs                 |   2 +-
 .../Analysis/Util/AbstractAnalysisFactory.cs       |   4 +-
 .../Analysis/Util/WordlistLoader.cs                |   5 +-
 .../Analysis/Wikipedia/WikipediaTokenizer.cs       |   3 +-
 .../Dict/UserDictionary.cs                         |   8 +-
 .../Tools/BinaryDictionaryWriter.cs                |   3 +-
 .../Tools/TokenInfoDictionaryBuilder.cs            |   5 +-
 .../Tools/UnknownDictionaryBuilder.cs              |   3 +-
 src/Lucene.Net.Analysis.Kuromoji/Util/CSVUtil.cs   |   4 +-
 .../Morfologik/MorfologikFilter.cs                 |   4 +-
 .../MorphosyntacticTagsAttribute.cs                |   5 +-
 .../OpenNLPChunkerFilter.cs                        |   7 +-
 .../OpenNLPLemmatizerFilter.cs                     |   7 +-
 .../OpenNLPPOSFilter.cs                            |   5 +-
 .../Language/Bm/Lang.cs                            |   4 +-
 .../Language/Bm/Rule.cs                            |   6 +-
 .../Language/DaitchMokotoffSoundex.cs              |   7 +-
 src/Lucene.Net.Analysis.SmartCn/Hhmm/BiSegGraph.cs |   2 +-
 src/Lucene.Net.Analysis.SmartCn/Hhmm/SegGraph.cs   |   7 +-
 .../Egothor.Stemmer/Compile.cs                     |   3 +-
 .../Egothor.Stemmer/DiffIt.cs                      |   5 +-
 .../Egothor.Stemmer/Gener.cs                       |   3 +-
 .../Egothor.Stemmer/Lift.cs                        |   3 +-
 .../Egothor.Stemmer/MultiTrie.cs                   |   5 +-
 .../Egothor.Stemmer/MultiTrie2.cs                  |   3 +-
 .../Egothor.Stemmer/Optimizer.cs                   |   5 +-
 .../Egothor.Stemmer/Reduce.cs                      |   5 +-
 .../Egothor.Stemmer/Trie.cs                        |   5 +-
 .../ByTask/Feeds/EnwikiQueryMaker.cs               |   5 +-
 .../ByTask/Feeds/FileBasedQueryMaker.cs            |   4 +-
 .../ByTask/Feeds/ReutersContentSource.cs           |   3 +-
 .../ByTask/Feeds/ReutersQueryMaker.cs              |   5 +-
 .../ByTask/Feeds/SimpleQueryMaker.cs               |   4 +-
 .../ByTask/Feeds/SimpleSloppyPhraseQueryMaker.cs   |   6 +-
 .../ByTask/Feeds/SpatialFileQueryMaker.cs          |   3 +-
 .../ByTask/Feeds/TrecContentSource.cs              |   3 +-
 src/Lucene.Net.Benchmark/ByTask/PerfRunData.cs     |   3 +-
 src/Lucene.Net.Benchmark/ByTask/Stats/Points.cs    |   3 +-
 .../ByTask/Tasks/AddFacetedDocTask.cs              |   3 +-
 .../ByTask/Tasks/AnalyzerFactoryTask.cs            |   6 +-
 .../ByTask/Tasks/NewAnalyzerTask.cs                |   3 +-
 .../ByTask/Tasks/TaskSequence.cs                   |  10 +-
 src/Lucene.Net.Benchmark/ByTask/Utils/Algorithm.cs |   2 +-
 src/Lucene.Net.Benchmark/ByTask/Utils/Config.cs    |  13 +--
 src/Lucene.Net.Benchmark/Quality/QualityStats.cs   |   4 +-
 .../Quality/Trec/Trec1MQReader.cs                  |   3 +-
 src/Lucene.Net.Benchmark/Quality/Trec/TrecJudge.cs |   2 +-
 .../Quality/Trec/TrecTopicsReader.cs               |   3 +-
 .../Quality/Utils/DocNameExtractor.cs              |   3 +-
 .../Support/Sax/Helpers/NamespaceSupport.cs        |   3 +-
 src/Lucene.Net.Benchmark/Support/TagSoup/Parser.cs |   4 +-
 .../BlockTerms/BlockTermsWriter.cs                 |   3 +-
 .../BlockTerms/FixedGapTermsIndexWriter.cs         |   4 +-
 .../BlockTerms/VariableGapTermsIndexWriter.cs      |   3 +-
 .../Bloom/BloomFilteringPostingsFormat.cs          |   2 +-
 src/Lucene.Net.Codecs/Memory/FSTOrdTermsWriter.cs  |   5 +-
 src/Lucene.Net.Codecs/Memory/FSTTermsWriter.cs     |   5 +-
 .../Pulsing/PulsingPostingsWriter.cs               |   6 +-
 src/Lucene.Net.Facet/DrillDownQuery.cs             |   4 +-
 src/Lucene.Net.Facet/FacetsCollector.cs            |   3 +-
 src/Lucene.Net.Facet/FacetsConfig.cs               |   8 +-
 src/Lucene.Net.Facet/MultiFacets.cs                |   3 +-
 .../RandomSamplingFacetsCollector.cs               |   4 +-
 src/Lucene.Net.Facet/Range/LongRangeCounter.cs     |   7 +-
 src/Lucene.Net.Facet/Range/RangeFacetCounts.cs     |   3 +-
 .../SortedSet/SortedSetDocValuesFacetCounts.cs     |   3 +-
 src/Lucene.Net.Facet/Taxonomy/TaxonomyFacets.cs    |   3 +-
 .../Taxonomy/WriterCache/CharBlockArray.cs         |   5 +-
 .../AbstractFirstPassGroupingCollector.cs          |   2 +-
 .../AbstractGroupFacetCollector.cs                 |   4 +-
 .../Function/FunctionDistinctValuesCollector.cs    |   2 +-
 src/Lucene.Net.Grouping/GroupingSearch.cs          |   5 +-
 src/Lucene.Net.Grouping/SearchGroup.cs             |   4 +-
 .../Term/TermAllGroupHeadsCollector.cs             |   6 +-
 .../Term/TermAllGroupsCollector.cs                 |   3 +-
 .../Term/TermDistinctValuesCollector.cs            |   5 +-
 .../Term/TermGroupFacetCollector.cs                |   6 +-
 .../Highlight/Highlighter.cs                       |   7 +-
 .../Highlight/TokenSources.cs                      |   5 +-
 .../Highlight/TokenStreamFromTermPositionVector.cs |   3 +-
 .../Highlight/WeightedSpanTerm.cs                  |   3 +-
 .../Highlight/WeightedSpanTermExtractor.cs         |  10 +-
 .../PostingsHighlight/MultiTermHighlighting.cs     |   3 +-
 .../PostingsHighlight/PostingsHighlighter.cs       |   2 +-
 .../VectorHighlight/BaseFragListBuilder.cs         |   4 +-
 .../VectorHighlight/BaseFragmentsBuilder.cs        |  12 +--
 .../VectorHighlight/FieldFragList.cs               |   3 +-
 .../VectorHighlight/FieldPhraseList.cs             |  21 ++--
 .../VectorHighlight/FieldTermStack.cs              |   4 +-
 .../VectorHighlight/SimpleFieldFragList.cs         |   3 +-
 .../VectorHighlight/SingleFragListBuilder.cs       |   3 +-
 .../VectorHighlight/WeightedFieldFragList.cs       |   4 +-
 .../Support/ToChildBlockJoinQuery.cs               |   3 +-
 .../Support/ToParentBlockJoinQuery.cs              |   3 +-
 src/Lucene.Net.Join/ToChildBlockJoinQuery.cs       |   4 +-
 src/Lucene.Net.Join/ToParentBlockJoinQuery.cs      |   3 +-
 src/Lucene.Net.Misc/Document/LazyDocument.cs       |   2 +-
 src/Lucene.Net.Misc/Index/IndexSplitter.cs         |   5 +-
 .../Index/MultiPassIndexSplitter.cs                |   4 +-
 .../Index/Sorter/SortingMergePolicy.cs             |   3 +-
 src/Lucene.Net.Queries/CommonTermsQuery.cs         |   2 +-
 src/Lucene.Net.Queries/CustomScoreQuery.cs         |   3 +-
 src/Lucene.Net.Queries/Function/BoostedQuery.cs    |   3 +-
 src/Lucene.Net.Queries/Mlt/MoreLikeThis.cs         |   5 +-
 src/Lucene.Net.Queries/TermsFilter.cs              |   3 +-
 .../Classic/MultiFieldQueryParser.cs               |  15 +--
 src/Lucene.Net.QueryParser/Classic/QueryParser.cs  |   5 +-
 .../ComplexPhrase/ComplexPhraseQueryParser.cs      |  15 +--
 .../Flexible/Core/Nodes/GroupQueryNode.cs          |   3 +-
 .../Flexible/Core/Nodes/ModifierQueryNode.cs       |   3 +-
 .../Flexible/Core/Nodes/PathQueryNode.cs           |   5 +-
 .../Core/Processors/QueryNodeProcessorImpl.cs      |   5 +-
 .../Flexible/Core/Util/QueryNodeOperation.cs       |   4 +-
 .../BooleanModifiersQueryNodeProcessor.cs          |   3 +-
 .../Builders/MultiPhraseQueryNodeBuilder.cs        |   8 +-
 .../Standard/Nodes/AbstractRangeQueryNode.cs       |   3 +-
 .../Standard/Parser/StandardSyntaxParser.cs        |  15 +--
 .../Processors/AnalyzerQueryNodeProcessor.cs       |   7 +-
 .../BooleanQuery2ModifierNodeProcessor.cs          |   3 +-
 .../Standard/Processors/GroupQueryNodeProcessor.cs |   7 +-
 .../Processors/MultiFieldQueryNodeProcessor.cs     |   3 +-
 .../RemoveEmptyNonLeafQueryNodeProcessor.cs        |   3 +-
 .../Surround/Parser/QueryParser.cs                 |  17 ++--
 .../Surround/Query/ComposedQuery.cs                |   2 +-
 .../Surround/Query/FieldsQuery.cs                  |   5 +-
 .../Surround/Query/SimpleTermRewriteQuery.cs       |   3 +-
 .../Surround/Query/SpanNearClauseFactory.cs        |   2 +-
 .../Xml/Builders/SpanNearBuilder.cs                |   3 +-
 .../Xml/Builders/SpanOrBuilder.cs                  |   4 +-
 .../Xml/Builders/SpanOrTermsBuilder.cs             |   5 +-
 .../Xml/Builders/TermsFilterBuilder.cs             |   4 +-
 src/Lucene.Net.Replicator/IndexRevision.cs         |   6 +-
 src/Lucene.Net.Replicator/ReplicationClient.cs     |   4 +-
 src/Lucene.Net.Replicator/SessionToken.cs          |   5 +-
 .../Queries/FuzzyLikeThisQuery.cs                  |  10 +-
 src/Lucene.Net.Spatial/Prefix/Tree/Cell.cs         |   6 +-
 .../Prefix/Tree/GeohashPrefixTree.cs               |   3 +-
 .../Prefix/Tree/QuadPrefixTree.cs                  |   7 +-
 .../Prefix/Tree/SpatialPrefixTree.cs               |   8 +-
 src/Lucene.Net.Spatial/Query/SpatialOperation.cs   |   3 +-
 src/Lucene.Net.Spatial/Util/ShapeFieldCache.cs     |   3 +-
 src/Lucene.Net.Suggest/Spell/SpellChecker.cs       |   3 +-
 .../Suggest/Analyzing/AnalyzingInfixSuggester.cs   |   2 +-
 .../Suggest/Analyzing/AnalyzingSuggester.cs        |   4 +-
 .../Suggest/Analyzing/BlendedInfixSuggester.cs     |   2 +-
 .../Suggest/Analyzing/FSTUtil.cs                   |   6 +-
 .../Suggest/Analyzing/FreeTextSuggester.cs         |   2 +-
 .../Suggest/BufferedInputIterator.cs               |   3 +-
 .../Suggest/Fst/FSTCompletion.cs                   |   2 +-
 .../Suggest/Fst/FSTCompletionLookup.cs             |   3 +-
 .../Suggest/Fst/WFSTCompletionLookup.cs            |   5 +-
 .../Suggest/Jaspell/JaspellLookup.cs               |   3 +-
 .../Suggest/Jaspell/JaspellTernarySearchTrie.cs    |   7 +-
 .../Suggest/Tst/TSTAutocomplete.cs                 |   3 +-
 src/Lucene.Net.Suggest/Suggest/Tst/TSTLookup.cs    |   7 +-
 .../Analysis/BaseTokenStreamTestCase.cs            |  13 +--
 .../Analysis/LookaheadTokenFilter.cs               |   5 +-
 .../Codecs/MockRandom/MockRandomPostingsFormat.cs  |   3 +-
 .../Codecs/RAMOnly/RAMOnlyPostingsFormat.cs        |   2 +-
 .../BaseCompressingDocValuesFormatTestCase.cs      |   3 +-
 .../Index/BaseDocValuesFormatTestCase.cs           |  12 +--
 .../Index/BasePostingsFormatTestCase.cs            |   8 +-
 .../Index/BaseStoredFieldsFormatTestCase.cs        |   4 +-
 .../Index/BaseTermVectorsFormatTestCase.cs         |   2 +-
 .../Index/ThreadedIndexingAndSearchingTestCase.cs  |   9 +-
 .../Search/AssertingScorer.cs                      |   5 +-
 .../Search/RandomSimilarityProvider.cs             |   5 +-
 .../Search/SearchEquivalenceTestBase.cs            |   2 +-
 .../Store/BaseDirectoryTestCase.cs                 |   4 +-
 .../Store/MockDirectoryWrapper.cs                  |   8 +-
 .../JavaCompatibility/SystemTypesHelpers.cs        |   3 +-
 .../Support/SynchronizedList.cs                    |   5 +-
 .../Util/Automaton/AutomatonTestUtil.cs            |   6 +-
 src/Lucene.Net.TestFramework/Util/Fst/FSTTester.cs |   2 +-
 .../Util/LuceneTestCase.cs                         |   6 +-
 .../Util/RunListenerPrintReproduceInfo.cs          |   4 +-
 .../Util/TestRuleMarkFailure.cs                    |   4 +-
 .../Analysis/CharFilters/TestMappingCharFilter.cs  |   6 +-
 .../Analysis/Core/TestRandomChains.cs              |  14 +--
 .../Analysis/Core/TestStopFilter.cs                |  10 +-
 .../Analysis/Core/TestUAX29URLEmailTokenizer.cs    |   9 +-
 .../Miscellaneous/TestASCIIFoldingFilter.cs        |   5 +-
 .../Miscellaneous/TestCapitalizationFilter.cs      |   5 +-
 .../Miscellaneous/TestStemmerOverrideFilter.cs     |   5 +-
 .../Analysis/Pattern/TestPatternTokenizer.cs       |   5 +-
 .../Analysis/Synonym/TestMultiWordSynonyms.cs      |   5 +-
 .../Analysis/Synonym/TestSlowSynonymFilter.cs      |   5 +-
 .../Analysis/Synonym/TestSynonymMap.cs             |  11 +-
 .../Analysis/Synonym/TestSynonymMapFilter.cs       |   4 +-
 .../Analysis/Th/TestThaiAnalyzer.cs                |   9 +-
 .../Analysis/Util/TestCharArraySet.cs              |  72 ++++++-------
 .../Analysis/Util/TestElision.cs                   |   5 +-
 .../Tools/TestBuildDictionary.cs                   |   4 +-
 .../Language/Bm/PhoneticEngineTest.cs              |   4 +-
 .../AssertingSubDocsAtOnceCollector.cs             |   3 +-
 src/Lucene.Net.Tests.Facet/FacetTestCase.cs        |   7 +-
 .../SortedSet/TestSortedSetDocValuesFacets.cs      |   7 +-
 .../Taxonomy/TestSearcherTaxonomyManager.cs        |   2 +-
 .../Taxonomy/TestTaxonomyCombined.cs               |   5 +-
 .../Taxonomy/TestTaxonomyFacetCounts.cs            |   6 +-
 .../Taxonomy/TestTaxonomyFacetCounts2.cs           |   2 +-
 .../Taxonomy/TestTaxonomyFacetSumValueSource.cs    |   7 +-
 src/Lucene.Net.Tests.Facet/TestDrillSideways.cs    |   4 +-
 .../AllGroupHeadsCollectorTest.cs                  |  12 +--
 .../DistinctValuesCollectorTest.cs                 |  46 ++++-----
 .../GroupFacetCollectorTest.cs                     |   6 +-
 .../GroupingSearchTest.cs                          |   5 +-
 src/Lucene.Net.Tests.Grouping/TestGrouping.cs      |  44 ++++----
 .../Highlight/HighlighterTest.cs                   |  12 +--
 .../VectorHighlight/AbstractTestCase.cs            |   7 +-
 .../VectorHighlight/FieldPhraseListTest.cs         |   3 +-
 .../VectorHighlight/FieldQueryTest.cs              |   6 +-
 .../VectorHighlight/SimpleFragmentsBuilderTest.cs  |   6 +-
 src/Lucene.Net.Tests.Join/TestBlockJoin.cs         |  31 +++---
 src/Lucene.Net.Tests.Join/TestBlockJoinSorting.cs  |   3 +-
 .../TestBlockJoinValidation.cs                     |   7 +-
 src/Lucene.Net.Tests.Join/TestJoinUtil.cs          |  12 +--
 .../Index/Sorter/IndexSortingTest.cs               |   3 +-
 .../Index/Sorter/SorterTestBase.cs                 |   3 +-
 .../Index/Sorter/TestBlockJoinSorter.cs            |   5 +-
 .../Index/Sorter/TestEarlyTermination.cs           |   4 +-
 .../Index/Sorter/TestSortingMergePolicy.cs         |   4 +-
 .../CommonTermsQueryTest.cs                        |   8 +-
 src/Lucene.Net.Tests.Queries/TermFilterTest.cs     |   2 +-
 src/Lucene.Net.Tests.Queries/TermsFilterTest.cs    |  10 +-
 .../DistanceStrategyTest.cs                        |   3 +-
 src/Lucene.Net.Tests.Spatial/PortedSolr3Test.cs    |   2 +-
 .../Prefix/SpatialOpRecursivePrefixTreeTest.cs     |   8 +-
 .../QueryEqualsHashCodeTest.cs                     |   3 +-
 src/Lucene.Net.Tests.Spatial/SpatialTestCase.cs    |   8 +-
 src/Lucene.Net.Tests.Spatial/SpatialTestData.cs    |   3 +-
 src/Lucene.Net.Tests.Spatial/SpatialTestQuery.cs   |   5 +-
 src/Lucene.Net.Tests.Spatial/StrategyTestCase.cs   |  14 +--
 src/Lucene.Net.Tests.Spatial/TestTestFramework.cs  |   4 +-
 .../Spell/TestWordBreakSpellChecker.cs             |   5 +-
 .../Analyzing/AnalyzingInfixSuggesterTest.cs       |  10 +-
 .../Suggest/Analyzing/AnalyzingSuggesterTest.cs    |  10 +-
 .../Suggest/Analyzing/FuzzySuggesterTest.cs        |  18 ++--
 .../Suggest/Analyzing/TestFreeTextSuggester.cs     |   6 +-
 .../Suggest/DocumentDictionaryTest.cs              |  24 ++---
 .../Suggest/DocumentValueSourceDictionaryTest.cs   |   2 +-
 .../Suggest/FileDictionaryTest.cs                  |  48 ++++-----
 .../Suggest/Fst/FSTCompletionTest.cs               |   5 +-
 .../Suggest/Fst/WFSTCompletionTest.cs              |   2 +-
 .../Suggest/LookupBenchmarkTest.cs                 |   4 +-
 .../Analysis/TrivialLookaheadFilter.cs             | 112 +++++++++++----------
 .../Analysis/TestGraphTokenizers.cs                |   5 +-
 .../Analysis/TrivialLookaheadFilter.cs             |   5 +-
 .../Codecs/Lucene3x/TestSurrogates.cs              |   2 +-
 .../Codecs/Lucene3x/TestTermInfosReaderIndex.cs    |   5 +-
 .../Codecs/Lucene40/TestLucene40PostingsReader.cs  |   3 +-
 .../Codecs/Lucene41/TestBlockPostingsFormat3.cs    |   2 +-
 src/Lucene.Net.Tests/Index/Test2BTerms.cs          |   4 +-
 src/Lucene.Net.Tests/Index/TestAddIndexes.cs       |   5 +-
 .../Index/TestBackwardsCompatibility.cs            |   7 +-
 .../Index/TestBackwardsCompatibility3x.cs          |   5 +-
 src/Lucene.Net.Tests/Index/TestBagOfPositions.cs   |   3 +-
 src/Lucene.Net.Tests/Index/TestBagOfPostings.cs    |   2 +-
 src/Lucene.Net.Tests/Index/TestCheckIndex.cs       |   5 +-
 src/Lucene.Net.Tests/Index/TestDoc.cs              |   4 +-
 src/Lucene.Net.Tests/Index/TestDocTermOrds.cs      |   4 +-
 .../Index/TestDocValuesWithThreads.cs              |  10 +-
 src/Lucene.Net.Tests/Index/TestDocsAndPositions.cs |   6 +-
 .../Index/TestFlushByRamOrCountsPolicy.cs          |  11 +-
 src/Lucene.Net.Tests/Index/TestIndexReaderClose.cs |   3 +-
 src/Lucene.Net.Tests/Index/TestIndexWriter.cs      |  10 +-
 .../Index/TestIndexWriterDelete.cs                 |   3 +-
 .../Index/TestIndexWriterExceptions.cs             |   8 +-
 .../Index/TestIndexWriterMerging.cs                |   7 +-
 .../Index/TestIndexWriterOnJRECrash.cs             |   2 +-
 .../Index/TestIndexWriterReader.cs                 |   3 +-
 src/Lucene.Net.Tests/Index/TestIntBlockPool.cs     |   3 +-
 src/Lucene.Net.Tests/Index/TestMaxTermFrequency.cs |   7 +-
 src/Lucene.Net.Tests/Index/TestMultiDocValues.cs   |   5 +-
 src/Lucene.Net.Tests/Index/TestMultiFields.cs      |   8 +-
 src/Lucene.Net.Tests/Index/TestPayloads.cs         |   3 +-
 .../Index/TestPerSegmentDeletes.cs                 |   7 +-
 src/Lucene.Net.Tests/Index/TestPostingsOffsets.cs  |   5 +-
 src/Lucene.Net.Tests/Index/TestPrefixCodedTerms.cs |   4 +-
 src/Lucene.Net.Tests/Index/TestSegmentMerger.cs    |   6 +-
 .../Index/TestSnapshotDeletionPolicy.cs            |   3 +-
 src/Lucene.Net.Tests/Index/TestStressAdvance.cs    |   6 +-
 src/Lucene.Net.Tests/Index/TestStressIndexing2.cs  |   5 +-
 src/Lucene.Net.Tests/Index/TestStressNRT.cs        |   3 +-
 src/Lucene.Net.Tests/Index/TestTermsEnum.cs        |   6 +-
 src/Lucene.Net.Tests/Index/TestTermsEnum2.cs       |   4 +-
 src/Lucene.Net.Tests/Index/TestUniqueTermCount.cs  |   2 +-
 .../Search/Similarities/TestSimilarity2.cs         |   5 +-
 .../Search/Similarities/TestSimilarityBase.cs      |   5 +-
 src/Lucene.Net.Tests/Search/Spans/TestBasics.cs    |  11 +-
 src/Lucene.Net.Tests/Search/TestBooleanQuery.cs    |   5 +-
 src/Lucene.Net.Tests/Search/TestBooleanScorer.cs   |   5 +-
 .../Search/TestControlledRealTimeReopenThread.cs   |   3 +-
 src/Lucene.Net.Tests/Search/TestDocIdSet.cs        |   5 +-
 .../Search/TestDocTermOrdsRangeFilter.cs           |   3 +-
 .../Search/TestDocTermOrdsRewriteMethod.cs         |   3 +-
 .../Search/TestFieldCacheTermsFilter.cs            |  10 +-
 src/Lucene.Net.Tests/Search/TestFuzzyQuery.cs      |   5 +-
 src/Lucene.Net.Tests/Search/TestLiveFieldValues.cs |   3 +-
 src/Lucene.Net.Tests/Search/TestMinShouldMatch2.cs |   4 +-
 src/Lucene.Net.Tests/Search/TestPhraseQuery.cs     |   5 +-
 src/Lucene.Net.Tests/Search/TestRegexpRandom2.cs   |   4 +-
 .../Search/TestSameScoresWithThreads.cs            |   3 +-
 src/Lucene.Net.Tests/Search/TestSearchAfter.cs     |   5 +-
 src/Lucene.Net.Tests/Search/TestSearcherManager.cs |   3 +-
 src/Lucene.Net.Tests/Search/TestShardSearching.cs  |   8 +-
 src/Lucene.Net.Tests/Search/TestSort.cs            |  13 +--
 src/Lucene.Net.Tests/Search/TestSortRandom.cs      |   2 +-
 src/Lucene.Net.Tests/Search/TestTermScorer.cs      |   5 +-
 src/Lucene.Net.Tests/Search/TestTopDocsMerge.cs    |   5 +-
 .../Store/TestBufferedIndexInput.cs                |   3 +-
 src/Lucene.Net.Tests/Store/TestDirectory.cs        |   3 +-
 .../Store/TestNRTCachingDirectory.cs               |   3 +-
 src/Lucene.Net.Tests/TestSearch.cs                 |   5 +-
 .../Util/Automaton/TestBasicOperations.cs          |   3 +-
 .../Util/Automaton/TestCompiledAutomaton.cs        |   4 +-
 .../Util/Automaton/TestDeterminizeLexicon.cs       |   7 +-
 .../Util/Automaton/TestLevenshteinAutomata.cs      |  11 +-
 src/Lucene.Net.Tests/Util/Fst/TestFSTs.cs          |  32 +++---
 src/Lucene.Net.Tests/Util/Packed/TestPackedInts.cs |   4 +-
 src/Lucene.Net.Tests/Util/TestByteBlockPool.cs     |   3 +-
 src/Lucene.Net.Tests/Util/TestBytesRefArray.cs     |   9 +-
 src/Lucene.Net.Tests/Util/TestCollectionUtil.cs    |   8 +-
 src/Lucene.Net.Tests/Util/TestMergedIterator.cs    |  19 ++--
 src/Lucene.Net.Tests/Util/TestOfflineSorter.cs     |   3 +-
 .../Util/TestRecyclingByteBlockAllocator.cs        |   4 +-
 .../Util/TestRecyclingIntBlockAllocator.cs         |   4 +-
 src/Lucene.Net.Tests/Util/TestWAH8DocIdSet.cs      |   9 +-
 src/Lucene.Net/Analysis/CachingTokenFilter.cs      |   5 +-
 src/Lucene.Net/Codecs/BlockTreeTermsWriter.cs      |   6 +-
 .../Codecs/Lucene3x/TermInfosReaderIndex.cs        |   3 +-
 src/Lucene.Net/Document/Document.cs                |  17 ++--
 src/Lucene.Net/Index/BufferedUpdates.cs            |   5 +-
 src/Lucene.Net/Index/BufferedUpdatesStream.cs      |   6 +-
 src/Lucene.Net/Index/CheckIndex.cs                 |  13 ++-
 src/Lucene.Net/Index/CoalescedUpdates.cs           |  10 +-
 src/Lucene.Net/Index/CompositeReaderContext.cs     |   7 +-
 src/Lucene.Net/Index/ConcurrentMergeScheduler.cs   |   7 +-
 src/Lucene.Net/Index/DirectoryReader.cs            |   3 +-
 src/Lucene.Net/Index/DocTermOrds.cs                |   4 +-
 .../Index/DocumentsWriterFlushControl.cs           |   2 +-
 src/Lucene.Net/Index/FreqProxTermsWriter.cs        |   5 +-
 src/Lucene.Net/Index/FrozenBufferedUpdates.cs      |   7 +-
 src/Lucene.Net/Index/IndexFileDeleter.cs           |   9 +-
 src/Lucene.Net/Index/IndexWriter.cs                |  14 +--
 src/Lucene.Net/Index/LogMergePolicy.cs             |   5 +-
 src/Lucene.Net/Index/MergePolicy.cs                |   7 +-
 src/Lucene.Net/Index/MultiFields.cs                |  10 +-
 src/Lucene.Net/Index/MultiTerms.cs                 |   8 +-
 .../Index/PersistentSnapshotDeletionPolicy.cs      |   5 +-
 src/Lucene.Net/Index/SegmentInfos.cs               |   2 +-
 src/Lucene.Net/Index/SegmentMerger.cs              |  21 ++--
 src/Lucene.Net/Index/SegmentReader.cs              |   6 +-
 src/Lucene.Net/Index/SnapshotDeletionPolicy.cs     |   5 +-
 src/Lucene.Net/Index/StandardDirectoryReader.cs    |   3 +-
 src/Lucene.Net/Index/TieredMergePolicy.cs          |   6 +-
 src/Lucene.Net/Index/UpgradeIndexMergePolicy.cs    |   5 +-
 src/Lucene.Net/Search/BooleanQuery.cs              |  20 ++--
 src/Lucene.Net/Search/BooleanScorer2.cs            |   5 +-
 src/Lucene.Net/Search/CachingCollector.cs          |  11 +-
 src/Lucene.Net/Search/CachingWrapperFilter.cs      |   7 +-
 src/Lucene.Net/Search/ConjunctionScorer.cs         |   5 +-
 src/Lucene.Net/Search/DisjunctionMaxQuery.cs       |   4 +-
 src/Lucene.Net/Search/DisjunctionScorer.cs         |   5 +-
 src/Lucene.Net/Search/Explanation.cs               |   8 +-
 src/Lucene.Net/Search/FieldCacheImpl.cs            |   3 +-
 src/Lucene.Net/Search/MinShouldMatchSumScorer.cs   |   3 +-
 src/Lucene.Net/Search/MultiPhraseQuery.cs          |   2 +-
 src/Lucene.Net/Search/Payloads/PayloadSpanUtil.cs  |   8 +-
 src/Lucene.Net/Search/ReqOptSumScorer.cs           |   5 +-
 src/Lucene.Net/Search/SearcherLifetimeManager.cs   |   3 +-
 src/Lucene.Net/Search/SloppyPhraseScorer.cs        |  12 +--
 src/Lucene.Net/Search/Spans/NearSpansOrdered.cs    |   8 +-
 src/Lucene.Net/Search/Spans/NearSpansUnordered.cs  |   6 +-
 src/Lucene.Net/Search/Spans/SpanNearQuery.cs       |   2 +-
 src/Lucene.Net/Search/Spans/SpanNotQuery.cs        |   5 +-
 src/Lucene.Net/Search/Spans/SpanOrQuery.cs         |   4 +-
 .../Search/Spans/SpanPositionCheckQuery.cs         |   5 +-
 src/Lucene.Net/Search/WildcardQuery.cs             |   3 +-
 src/Lucene.Net/Store/FileSwitchDirectory.cs        |   4 +-
 src/Lucene.Net/Store/RAMFile.cs                    |   5 +-
 src/Lucene.Net/Util/Automaton/BasicAutomata.cs     |   7 +-
 src/Lucene.Net/Util/Automaton/BasicOperations.cs   |   8 +-
 src/Lucene.Net/Util/Automaton/CompiledAutomaton.cs |   3 +-
 .../Util/Automaton/MinimizationOperations.cs       |  16 +--
 src/Lucene.Net/Util/Automaton/RegExp.cs            |   4 +-
 src/Lucene.Net/Util/Automaton/UTF32ToUTF8.cs       |   5 +-
 src/Lucene.Net/Util/CloseableThreadLocal.cs        |   3 +-
 src/Lucene.Net/Util/FieldCacheSanityChecker.cs     |  10 +-
 src/Lucene.Net/Util/OfflineSorter.cs               |   3 +-
 src/Lucene.Net/Util/PagedBytes.cs                  |   5 +-
 src/Lucene.Net/Util/QueryBuilder.cs                |   3 +-
 src/Lucene.Net/Util/RamUsageEstimator.cs           |   2 +-
 412 files changed, 1400 insertions(+), 1179 deletions(-)

diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Ar/ArabicStemmer.cs b/src/Lucene.Net.Analysis.Common/Analysis/Ar/ArabicStemmer.cs
index 17481ec..e36773f 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Ar/ArabicStemmer.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Ar/ArabicStemmer.cs
@@ -1,6 +1,7 @@
-// Lucene version compatibility level 4.8.1
+// Lucene version compatibility level 4.8.1
 using Lucene.Net.Analysis.Util;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Analysis.Ar
 {
@@ -52,7 +53,7 @@ namespace Lucene.Net.Analysis.Ar
 
         private static IList<char[]> InitializePrefix()
         {
-            return new List<char[]>(){ ("" + ALEF + LAM).ToCharArray(),
+            return new JCG.List<char[]>(){ ("" + ALEF + LAM).ToCharArray(),
             ("" + WAW + ALEF + LAM).ToCharArray(),
             ("" + BEH + ALEF + LAM).ToCharArray(),
             ("" + KAF + ALEF + LAM).ToCharArray(),
@@ -62,7 +63,7 @@ namespace Lucene.Net.Analysis.Ar
         }
         private static IList<char[]> InitializeSuffix()
         {
-            return new List<char[]>(){ ("" + HEH + ALEF).ToCharArray(),
+            return new JCG.List<char[]>(){ ("" + HEH + ALEF).ToCharArray(),
             ("" + ALEF + NOON).ToCharArray(),
             ("" + ALEF + TEH).ToCharArray(),
             ("" + WAW + NOON).ToCharArray(),
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/MappingCharFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/MappingCharFilterFactory.cs
index b6eb98d..f6e0f3a 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/MappingCharFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/MappingCharFilterFactory.cs
@@ -6,6 +6,7 @@ using System.Collections.Generic;
 using System.IO;
 using System.Text.RegularExpressions;
 using Integer = J2N.Numerics.Int32;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Analysis.CharFilters
 {
@@ -63,12 +64,12 @@ namespace Lucene.Net.Analysis.CharFilters
                 IList<string> wlist;
                 if (File.Exists(mapping))
                 {
-                    wlist = new List<string>(GetLines(loader, mapping));
+                    wlist = new JCG.List<string>(GetLines(loader, mapping));
                 }
                 else
                 {
                     var files = SplitFileNames(mapping);
-                    wlist = new List<string>();
+                    wlist = new JCG.List<string>();
                     foreach (string file in files)
                     {
                         var lines = GetLines(loader, file.Trim());
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Compound/Hyphenation/PatternParser.cs b/src/Lucene.Net.Analysis.Common/Analysis/Compound/Hyphenation/PatternParser.cs
index bc6112f..513ecb7 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Compound/Hyphenation/PatternParser.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Compound/Hyphenation/PatternParser.cs
@@ -6,6 +6,7 @@ using System.IO;
 using System.Linq;
 using System.Text;
 using System.Xml;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Analysis.Compound.Hyphenation
 {
@@ -289,7 +290,7 @@ namespace Lucene.Net.Analysis.Compound.Hyphenation
 
         protected virtual IList<object> NormalizeException<T1>(IList<T1> ex)
         {
-            List<object> res = new List<object>();
+            IList<object> res = new JCG.List<object>();
             for (int i = 0; i < ex.Count; i++)
             {
                 object item = ex[i];
@@ -424,7 +425,7 @@ namespace Lucene.Net.Analysis.Compound.Hyphenation
             else if (local.Equals("exceptions", StringComparison.Ordinal))
             {
                 currElement = ELEM_EXCEPTIONS;
-                exception = new List<object>();
+                exception = new JCG.List<object>();
             }
             else if (local.Equals("hyphen", StringComparison.Ordinal))
             {
@@ -461,7 +462,7 @@ namespace Lucene.Net.Analysis.Compound.Hyphenation
                     case ELEM_EXCEPTIONS:
                         exception.Add(word);
                         exception = NormalizeException(exception);
-                        consumer.AddException(GetExceptionWord(exception), new List<object>(exception));
+                        consumer.AddException(GetExceptionWord(exception), new JCG.List<object>(exception));
                         break;
                     case ELEM_PATTERNS:
                         consumer.AddPattern(GetPattern(word), GetInterletterValues(word));
@@ -514,7 +515,7 @@ namespace Lucene.Net.Analysis.Compound.Hyphenation
                     case ELEM_EXCEPTIONS:
                         exception.Add(word);
                         exception = NormalizeException(exception);
-                        consumer.AddException(GetExceptionWord(exception), new List<object>(exception));
+                        consumer.AddException(GetExceptionWord(exception), new JCG.List<object>(exception));
                         exception.Clear();
                         break;
                     case ELEM_PATTERNS:
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Dictionary.cs b/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Dictionary.cs
index 7fc13e7..dec0a51 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Dictionary.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Dictionary.cs
@@ -76,7 +76,7 @@ namespace Lucene.Net.Analysis.Hunspell
 
         // all condition checks used by prefixes and suffixes. these are typically re-used across
         // many affix stripping rules. so these are deduplicated, to save RAM.
-        internal List<CharacterRunAutomaton> patterns = new List<CharacterRunAutomaton>();
+        internal IList<CharacterRunAutomaton> patterns = new JCG.List<CharacterRunAutomaton>();
 
         // the entries in the .dic file, mapping to their set of flags.
         // the fst output is the ordinal list for flagLookup
@@ -153,7 +153,7 @@ namespace Lucene.Net.Analysis.Hunspell
         /// <exception cref="IOException"> Can be thrown while reading from the <see cref="Stream"/>s </exception>
         /// <exception cref="Exception"> Can be thrown if the content of the files does not meet expected formats </exception>
         public Dictionary(Stream affix, Stream dictionary) 
-            : this(affix, new List<Stream>() { dictionary }, false)
+            : this(affix, new JCG.List<Stream>() { dictionary }, false)
         {
         }
 
@@ -652,7 +652,7 @@ namespace Lucene.Net.Analysis.Hunspell
 
                 if (!affixes.TryGetValue(affixArg, out IList<int> list) || list == null)
                 {
-                    affixes[affixArg] = list = new List<int>();
+                    affixes[affixArg] = list = new JCG.List<int>();
                 }
 
                 list.Add(currentAffix);
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/HunspellStemFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/HunspellStemFilter.cs
index d6c15e1..318edab 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/HunspellStemFilter.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/HunspellStemFilter.cs
@@ -2,6 +2,7 @@
 using Lucene.Net.Analysis.TokenAttributes;
 using Lucene.Net.Util;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Analysis.Hunspell
 {
@@ -46,7 +47,7 @@ namespace Lucene.Net.Analysis.Hunspell
         private readonly IKeywordAttribute keywordAtt;
         private readonly Stemmer stemmer;
 
-        private List<CharsRef> buffer;
+        private JCG.List<CharsRef> buffer;
         private State savedState;
 
         private readonly bool dedup;
@@ -109,7 +110,7 @@ namespace Lucene.Net.Analysis.Hunspell
                 return true;
             }
 
-            buffer = new List<CharsRef>(dedup ? stemmer.UniqueStems(termAtt.Buffer, termAtt.Length) : stemmer.Stem(termAtt.Buffer, termAtt.Length));
+            buffer = new JCG.List<CharsRef>(dedup ? stemmer.UniqueStems(termAtt.Buffer, termAtt.Length) : stemmer.Stem(termAtt.Buffer, termAtt.Length));
 
             if (buffer.Count == 0) // we do not know this word, return it unchanged
             {
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/HunspellStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/HunspellStemFilterFactory.cs
index c2d6493..e15f57f 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/HunspellStemFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/HunspellStemFilterFactory.cs
@@ -5,6 +5,7 @@ using Lucene.Net.Util;
 using System;
 using System.Collections.Generic;
 using System.IO;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Analysis.Hunspell
 {
@@ -81,11 +82,11 @@ namespace Lucene.Net.Analysis.Hunspell
             string[] dicts = dictionaryFiles.Split(',').TrimEnd();
 
             Stream affix = null;
-            IList<Stream> dictionaries = new List<Stream>();
+            IList<Stream> dictionaries = new JCG.List<Stream>();
 
             try
             {
-                dictionaries = new List<Stream>();
+                dictionaries = new JCG.List<Stream>();
                 foreach (string file in dicts)
                 {
                     dictionaries.Add(loader.OpenResource(file));
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Stemmer.cs b/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Stemmer.cs
index ecf3857..b687b03 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Stemmer.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Stemmer.cs
@@ -8,8 +8,8 @@ using Lucene.Net.Util.Automaton;
 using Lucene.Net.Util.Fst;
 using System;
 using System.Collections.Generic;
-using System.IO;
 using System.Text;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Analysis.Hunspell
 {
@@ -190,7 +190,7 @@ namespace Lucene.Net.Analysis.Hunspell
 
         private IList<CharsRef> DoStem(char[] word, int length, bool caseVariant)
         {
-            List<CharsRef> stems = new List<CharsRef>();
+            JCG.List<CharsRef> stems = new JCG.List<CharsRef>();
             Int32sRef forms = dictionary.LookupWord(word, 0, length);
             if (forms != null)
             {
@@ -250,7 +250,7 @@ namespace Lucene.Net.Analysis.Hunspell
 #pragma warning disable 612, 618
                 LuceneVersion.LUCENE_CURRENT, 8, dictionary.ignoreCase);
 #pragma warning restore 612, 618
-            IList<CharsRef> deduped = new List<CharsRef>();
+            IList<CharsRef> deduped = new JCG.List<CharsRef>();
             foreach (CharsRef s in stems)
             {
                 if (!terms.Contains(s))
@@ -348,7 +348,7 @@ namespace Lucene.Net.Analysis.Hunspell
         private IList<CharsRef> Stem(char[] word, int length, int previous, int prevFlag, int prefixFlag, int recursionDepth, bool doPrefix, bool doSuffix, bool previousWasPrefix, bool circumfix, bool caseVariant)
         {
             // TODO: allow this stuff to be reused by tokenfilter
-            List<CharsRef> stems = new List<CharsRef>();
+            JCG.List<CharsRef> stems = new JCG.List<CharsRef>();
 
             if (doPrefix && dictionary.prefixes != null)
             {
@@ -619,7 +619,7 @@ namespace Lucene.Net.Analysis.Hunspell
             condition = condition.TripleShift(1);
             char append = (char)(affixReader.ReadInt16() & 0xffff);
 
-            List<CharsRef> stems = new List<CharsRef>();
+            JCG.List<CharsRef> stems = new JCG.List<CharsRef>();
 
             Int32sRef forms = dictionary.LookupWord(strippedWord, 0, length);
             if (forms != null)
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/CapitalizationFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/CapitalizationFilterFactory.cs
index be98da5..91e592c 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/CapitalizationFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/CapitalizationFilterFactory.cs
@@ -1,9 +1,10 @@
-// Lucene version compatibility level 4.8.1
+// Lucene version compatibility level 4.8.1
 using Lucene.Net.Support;
 using Lucene.Net.Analysis.Util;
 using System;
 using System.Collections.Generic;
 using System.Globalization;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Analysis.Miscellaneous
 {
@@ -94,7 +95,7 @@ namespace Lucene.Net.Analysis.Miscellaneous
             k = GetSet(args, OK_PREFIX);
             if (k != null)
             {
-                okPrefix = new List<char[]>();
+                okPrefix = new JCG.List<char[]>();
                 foreach (string item in k)
                 {
                     okPrefix.Add(item.ToCharArray());
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/StemmerOverrideFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/StemmerOverrideFilter.cs
index d2111b9..61db8a3 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/StemmerOverrideFilter.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/StemmerOverrideFilter.cs
@@ -1,4 +1,4 @@
-// Lucene version compatibility level 4.8.1
+// Lucene version compatibility level 4.8.1
 using J2N;
 using Lucene.Net.Analysis.TokenAttributes;
 using Lucene.Net.Util;
@@ -6,6 +6,7 @@ using Lucene.Net.Util.Fst;
 using System.Collections.Generic;
 using System.Globalization;
 using System.IO;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Analysis.Miscellaneous
 {
@@ -158,7 +159,7 @@ namespace Lucene.Net.Analysis.Miscellaneous
         {
             private readonly BytesRefHash hash = new BytesRefHash();
             private readonly BytesRef spare = new BytesRef();
-            private readonly List<string> outputValues = new List<string>();
+            private readonly IList<string> outputValues = new JCG.List<string>();
             private readonly bool ignoreCase;
             private readonly CharsRef charsSpare = new CharsRef();
 
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/WordDelimiterFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/WordDelimiterFilterFactory.cs
index 8ce8c14..6e5a4ae 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/WordDelimiterFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/WordDelimiterFilterFactory.cs
@@ -114,7 +114,7 @@ namespace Lucene.Net.Analysis.Miscellaneous
             if (types != null)
             {
                 IList<string> files = SplitFileNames(types);
-                IList<string> wlist = new List<string>();
+                IList<string> wlist = new JCG.List<string>();
                 foreach (string file in files)
                 {
                     IList<string> lines = GetLines(loader, file.Trim());
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Path/ReversePathHierarchyTokenizer.cs b/src/Lucene.Net.Analysis.Common/Analysis/Path/ReversePathHierarchyTokenizer.cs
index 7f32944..b77ed31 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Path/ReversePathHierarchyTokenizer.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Path/ReversePathHierarchyTokenizer.cs
@@ -4,6 +4,7 @@ using System;
 using System.Collections.Generic;
 using System.IO;
 using System.Text;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Analysis.Path
 {
@@ -112,7 +113,7 @@ namespace Lucene.Net.Analysis.Path
             this.skip = skip;
             resultToken = new StringBuilder(bufferSize);
             resultTokenBuffer = new char[bufferSize];
-            delimiterPositions = new List<int>(bufferSize / 10);
+            delimiterPositions = new JCG.List<int>(bufferSize / 10);
         }
 
         private const int DEFAULT_BUFFER_SIZE = 1024;
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Pt/RSLPStemmerBase.cs b/src/Lucene.Net.Analysis.Common/Analysis/Pt/RSLPStemmerBase.cs
index 72d12d9..89d3553 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Pt/RSLPStemmerBase.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Pt/RSLPStemmerBase.cs
@@ -10,6 +10,7 @@ using System.Globalization;
 using System.IO;
 using System.Text;
 using System.Text.RegularExpressions;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Analysis.Pt
 {
@@ -324,7 +325,7 @@ namespace Lucene.Net.Analysis.Pt
 
         private static Rule[] ParseRules(TextReader r, int type)
         {
-            IList<Rule> rules = new List<Rule>();
+            IList<Rule> rules = new JCG.List<Rule>();
             string line;
             while ((line = ReadLine(r)) != null)
             {
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Query/QueryAutoStopWordAnalyzer.cs b/src/Lucene.Net.Analysis.Common/Analysis/Query/QueryAutoStopWordAnalyzer.cs
index 4e7319c..6062c06 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Query/QueryAutoStopWordAnalyzer.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Query/QueryAutoStopWordAnalyzer.cs
@@ -1,4 +1,4 @@
-// Lucene version compatibility level 4.8.1
+// Lucene version compatibility level 4.8.1
 using J2N.Collections.Generic.Extensions;
 using Lucene.Net.Analysis.Core;
 using Lucene.Net.Analysis.Util;
@@ -180,7 +180,7 @@ namespace Lucene.Net.Analysis.Query
         /// <returns> the stop words (as terms) </returns>
         public Term[] GetStopWords()
         {
-            IList<Term> allStopWords = new List<Term>();
+            IList<Term> allStopWords = new JCG.List<Term>();
             foreach (string fieldName in stopWordsPerField.Keys)
             {
                 ISet<string> stopWords = stopWordsPerField[fieldName];
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Sinks/TeeSinkTokenFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/Sinks/TeeSinkTokenFilter.cs
index 678df88..b89ee2d 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Sinks/TeeSinkTokenFilter.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Sinks/TeeSinkTokenFilter.cs
@@ -1,7 +1,8 @@
-// Lucene version compatibility level 4.8.1
+// Lucene version compatibility level 4.8.1
 using Lucene.Net.Util;
 using System;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Analysis.Sinks
 {
@@ -200,7 +201,7 @@ namespace Lucene.Net.Analysis.Sinks
         /// </summary>
         public sealed class SinkTokenStream : TokenStream
         {
-            private readonly IList<AttributeSource.State> cachedStates = new List<AttributeSource.State>();
+            private readonly IList<AttributeSource.State> cachedStates = new JCG.List<AttributeSource.State>();
             private AttributeSource.State finalState;
             private IEnumerator<AttributeSource.State> it = null;
             private readonly SinkFilter filter; // LUCENENET: marked readonly
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Synonym/FSTSynonymFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Synonym/FSTSynonymFilterFactory.cs
index 9aa8336..d62a9fc 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Synonym/FSTSynonymFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Synonym/FSTSynonymFilterFactory.cs
@@ -7,6 +7,7 @@ using System.Collections.Generic;
 using System.IO;
 using System.Text;
 using System.Text.RegularExpressions;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Analysis.Synonym
 {
@@ -58,7 +59,7 @@ namespace Lucene.Net.Analysis.Synonym
                 AssureMatchVersion();
                 tokArgs["luceneMatchVersion"] = LuceneMatchVersion.ToString();
 
-                var keys = new List<string>(args.Keys);
+                var keys = new JCG.List<string>(args.Keys);
                 foreach (string key in keys)
                 {
                     tokArgs[Holder.TOKENIZER_FACTORY_REPLACEMENT_PATTERN.Replace(key, "")] = args[key];
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SlowSynonymFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SlowSynonymFilter.cs
index c6610bf..13dd3d0 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SlowSynonymFilter.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SlowSynonymFilter.cs
@@ -1,8 +1,9 @@
-// Lucene version compatibility level 4.8.1
+// Lucene version compatibility level 4.8.1
 using Lucene.Net.Analysis.TokenAttributes;
 using Lucene.Net.Util;
 using System;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Analysis.Synonym
 {
@@ -120,7 +121,7 @@ namespace Lucene.Net.Analysis.Synonym
                 }
 
                 // reuse, or create new one each time?
-                List<AttributeSource> generated = new List<AttributeSource>(result.Synonyms.Length + matched.Count + 1);
+                IList<AttributeSource> generated = new JCG.List<AttributeSource>(result.Synonyms.Length + matched.Count + 1);
 
                 //
                 // there was a match... let's generate the new tokens, merging
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SlowSynonymFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SlowSynonymFilterFactory.cs
index 9954d3b..3cce61d 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SlowSynonymFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SlowSynonymFilterFactory.cs
@@ -6,6 +6,7 @@ using System.Collections.Generic;
 using System.IO;
 using System.Text;
 using System.Text.RegularExpressions;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Analysis.Synonym
 {
@@ -66,7 +67,7 @@ namespace Lucene.Net.Analysis.Synonym
                 AssureMatchVersion();
                 tokArgs["luceneMatchVersion"] = LuceneMatchVersion.ToString();
 
-                var keys = new List<string>(args.Keys);
+                var keys = new JCG.List<string>(args.Keys);
                 foreach (string key in keys)
                 {
                     tokArgs[Holder.TOKENIZER_FACTORY_REPLACEMENT_PATTERN.Replace(key, "")] = args[key];
@@ -96,15 +97,15 @@ namespace Lucene.Net.Analysis.Synonym
         /// <returns> a list of all rules </returns>
         private IEnumerable<string> LoadRules(string synonyms, IResourceLoader loader)
         {
-            List<string> wlist = null;
+            JCG.List<string> wlist = null;
             if (File.Exists(synonyms))
             {
-                wlist = new List<string>(GetLines(loader, synonyms));
+                wlist = new JCG.List<string>(GetLines(loader, synonyms));
             }
             else
             {
                 IList<string> files = SplitFileNames(synonyms);
-                wlist = new List<string>();
+                wlist = new JCG.List<string>();
                 foreach (string file in files)
                 {
                     IList<string> lines = GetLines(loader, file.Trim());
@@ -151,7 +152,7 @@ namespace Lucene.Net.Analysis.Synonym
                     else
                     {
                         // reduce to first argument
-                        target = new List<IList<string>>(1)
+                        target = new JCG.List<IList<string>>(1)
                         {
                             source[0]
                         };
@@ -175,7 +176,7 @@ namespace Lucene.Net.Analysis.Synonym
         {
             IList<string> strList = SplitSmart(str, separator, false);
             // now split on whitespace to get a list of token strings
-            IList<IList<string>> synList = new List<IList<string>>();
+            IList<IList<string>> synList = new JCG.List<IList<string>>();
             foreach (string toks in strList)
             {
                 IList<string> tokList = tokFactory == null ? SplitWS(toks, true) : SplitByTokenizer(toks, tokFactory);
@@ -188,7 +189,7 @@ namespace Lucene.Net.Analysis.Synonym
         {
             StringReader reader = new StringReader(source);
             TokenStream ts = LoadTokenizer(tokFactory, reader);
-            IList<string> tokList = new List<string>();
+            IList<string> tokList = new JCG.List<string>();
             try
             {
                 ICharTermAttribute termAtt = ts.AddAttribute<ICharTermAttribute>();
@@ -241,7 +242,7 @@ namespace Lucene.Net.Analysis.Synonym
 
         public static IList<string> SplitWS(string s, bool decode)
         {
-            List<string> lst = new List<string>(2);
+            IList<string> lst = new JCG.List<string>(2);
             StringBuilder sb = new StringBuilder();
             int pos = 0, end = s.Length;
             while (pos < end)
@@ -314,7 +315,7 @@ namespace Lucene.Net.Analysis.Synonym
         /// <param name="decode"> decode backslash escaping </param>
         public static IList<string> SplitSmart(string s, string separator, bool decode)
         {
-            List<string> lst = new List<string>(2);
+            IList<string> lst = new JCG.List<string>(2);
             StringBuilder sb = new StringBuilder();
             int pos = 0, end = s.Length;
             while (pos < end)
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SlowSynonymMap.cs b/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SlowSynonymMap.cs
index ed61071..51e6e6b 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SlowSynonymMap.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SlowSynonymMap.cs
@@ -1,4 +1,4 @@
-// Lucene version compatibility level 4.8.1
+// Lucene version compatibility level 4.8.1
 using J2N.Collections.Generic.Extensions;
 using Lucene.Net.Analysis.Util;
 using Lucene.Net.Support;
@@ -7,6 +7,7 @@ using System;
 using System.Collections.Generic;
 using System.Diagnostics.CodeAnalysis;
 using System.Text;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Analysis.Synonym
 {
@@ -143,7 +144,7 @@ namespace Lucene.Net.Analysis.Synonym
         /// </summary>
         public static IList<Token> MakeTokens(IList<string> strings)
         {
-            IList<Token> ret = new List<Token>(strings.Count);
+            IList<Token> ret = new JCG.List<Token>(strings.Count);
             foreach (string str in strings)
             {
                 //Token newTok = new Token(str,0,0,"SYNONYM");
@@ -163,7 +164,7 @@ namespace Lucene.Net.Analysis.Synonym
         /// </summary>
         public static IList<Token> MergeTokens(IList<Token> lst1, IList<Token> lst2)
         {
-            var result = new List<Token>();
+            var result = new JCG.List<Token>();
             if (lst1 == null || lst2 == null)
             {
                 if (lst2 != null)
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SolrSynonymParser.cs b/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SolrSynonymParser.cs
index 7b6807d..128c449 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SolrSynonymParser.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SolrSynonymParser.cs
@@ -2,9 +2,9 @@
 using J2N.Text;
 using Lucene.Net.Util;
 using System;
-using System.Collections.Generic;
 using System.IO;
 using System.Text;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Analysis.Synonym
 {
@@ -149,7 +149,7 @@ namespace Lucene.Net.Analysis.Synonym
 
         private static string[] Split(string s, string separator)
         {
-            List<string> list = new List<string>(2);
+            JCG.List<string> list = new JCG.List<string>(2);
             StringBuilder sb = new StringBuilder();
             int pos = 0, end = s.Length;
             while (pos < end)
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymMap.cs b/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymMap.cs
index 6cbb511..c3f62f6 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymMap.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymMap.cs
@@ -90,7 +90,7 @@ namespace Lucene.Net.Analysis.Synonym
             {
                 internal bool includeOrig;
                 // we could sort for better sharing ultimately, but it could confuse people
-                internal List<int> ords = new List<int>();
+                internal JCG.List<int> ords = new JCG.List<int>();
             }
 
             /// <summary>
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Util/AbstractAnalysisFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Util/AbstractAnalysisFactory.cs
index 8046a81..9f0211d 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Util/AbstractAnalysisFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Util/AbstractAnalysisFactory.cs
@@ -1,4 +1,4 @@
-// Lucene version compatibility level 4.8.1
+// Lucene version compatibility level 4.8.1
 using J2N.Collections.Generic.Extensions;
 using Lucene.Net.Analysis.Core;
 using Lucene.Net.Support;
@@ -395,7 +395,7 @@ namespace Lucene.Net.Analysis.Util
                 return Collections.EmptyList<string>();
             }
 
-            IList<string> result = new List<string>();
+            IList<string> result = new JCG.List<string>();
             foreach (string file in SplitFileNameHolder.FILE_SPLIT_PATTERN.Split(fileNames))
             {
                 result.Add(SplitFileNameHolder.FILE_REPLACE_PATTERN.Replace(file, string.Empty));
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Util/WordlistLoader.cs b/src/Lucene.Net.Analysis.Common/Analysis/Util/WordlistLoader.cs
index ee0c46c..0834d0f 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Util/WordlistLoader.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Util/WordlistLoader.cs
@@ -1,4 +1,4 @@
-// Lucene version compatibility level 4.8.1
+// Lucene version compatibility level 4.8.1
 using J2N.Text;
 using Lucene.Net.Util;
 using System;
@@ -6,6 +6,7 @@ using System.Collections.Generic;
 using System.IO;
 using System.Text;
 using System.Text.RegularExpressions;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Analysis.Util
 {
@@ -227,7 +228,7 @@ namespace Lucene.Net.Analysis.Util
         /// <exception cref="IOException"> If there is a low-level I/O error. </exception>
         public static IList<string> GetLines(Stream stream, Encoding encoding)
         {
-            List<string> lines = new List<string>();
+            IList<string> lines = new JCG.List<string>();
 
             using (StreamReader reader = new StreamReader(stream, encoding))
             {
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Wikipedia/WikipediaTokenizer.cs b/src/Lucene.Net.Analysis.Common/Analysis/Wikipedia/WikipediaTokenizer.cs
index ca83fb4..a5e0b04 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Wikipedia/WikipediaTokenizer.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Wikipedia/WikipediaTokenizer.cs
@@ -6,6 +6,7 @@ using System;
 using System.Collections.Generic;
 using System.IO;
 using System.Text;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Analysis.Wikipedia
 {
@@ -231,7 +232,7 @@ namespace Lucene.Net.Analysis.Wikipedia
             int lastPos = theStart + numAdded;
             int tmpTokType;
             int numSeen = 0;
-            IList<AttributeSource.State> tmp = new List<AttributeSource.State>();
+            IList<AttributeSource.State> tmp = new JCG.List<AttributeSource.State>();
             SetupSavedToken(0, type);
             tmp.Add(CaptureState());
             //while we can get a token and that token is the same type and we have not transitioned to a new wiki-item of the same type
diff --git a/src/Lucene.Net.Analysis.Kuromoji/Dict/UserDictionary.cs b/src/Lucene.Net.Analysis.Kuromoji/Dict/UserDictionary.cs
index 974d506..7d36901 100644
--- a/src/Lucene.Net.Analysis.Kuromoji/Dict/UserDictionary.cs
+++ b/src/Lucene.Net.Analysis.Kuromoji/Dict/UserDictionary.cs
@@ -59,7 +59,7 @@ namespace Lucene.Net.Analysis.Ja.Dict
         {
             string line = null;
             int wordId = CUSTOM_DICTIONARY_WORD_ID_OFFSET;
-            List<string[]> featureEntries = new List<string[]>();
+            JCG.List<string[]> featureEntries = new JCG.List<string[]>();
 
             // text, segmentation, readings, POS
             while ((line = reader.ReadLine()) != null)
@@ -80,8 +80,8 @@ namespace Lucene.Net.Analysis.Ja.Dict
             // the old treemap didn't support this either, and i'm not sure if its needed/useful?
             featureEntries.Sort(Comparer<string[]>.Create((left, right) => left[0].CompareToOrdinal(right[0])));
 
-            List<string> data = new List<string>(featureEntries.Count);
-            List<int[]> segmentations = new List<int[]>(featureEntries.Count);
+            JCG.List<string> data = new JCG.List<string>(featureEntries.Count);
+            JCG.List<int[]> segmentations = new JCG.List<int[]>(featureEntries.Count);
 
             PositiveInt32Outputs fstOutput = PositiveInt32Outputs.Singleton;
             Builder<long?> fstBuilder = new Builder<long?>(Lucene.Net.Util.Fst.FST.INPUT_TYPE.BYTE2, fstOutput);
@@ -179,7 +179,7 @@ namespace Lucene.Net.Analysis.Ja.Dict
         /// <returns>Array of {wordId, index, length}.</returns>
         private static int[][] ToIndexArray(IDictionary<int, int[]> input) // LUCENENET: CA1822: Mark members as static
         {
-            List<int[]> result = new List<int[]>();
+            JCG.List<int[]> result = new JCG.List<int[]>();
             foreach (int i in input.Keys)
             {
                 int[] wordIdAndLength = input[i];
diff --git a/src/Lucene.Net.Analysis.Kuromoji/Tools/BinaryDictionaryWriter.cs b/src/Lucene.Net.Analysis.Kuromoji/Tools/BinaryDictionaryWriter.cs
index 9843dc7..9568b9a 100644
--- a/src/Lucene.Net.Analysis.Kuromoji/Tools/BinaryDictionaryWriter.cs
+++ b/src/Lucene.Net.Analysis.Kuromoji/Tools/BinaryDictionaryWriter.cs
@@ -9,6 +9,7 @@ using System.Collections.Generic;
 using System.Globalization;
 using System.IO;
 using System.Text;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Analysis.Ja.Util
 {
@@ -36,7 +37,7 @@ namespace Lucene.Net.Analysis.Ja.Util
         private int targetMapEndOffset = 0, lastWordId = -1, lastSourceId = -1;
         private int[] targetMap = new int[8192];
         private int[] targetMapOffsets = new int[8192];
-        private readonly List<string> posDict = new List<string>();
+        private readonly IList<string> posDict = new JCG.List<string>();
 
         protected BinaryDictionaryWriter(Type implClazz, int size) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected)
         {
diff --git a/src/Lucene.Net.Analysis.Kuromoji/Tools/TokenInfoDictionaryBuilder.cs b/src/Lucene.Net.Analysis.Kuromoji/Tools/TokenInfoDictionaryBuilder.cs
index 8562f01..032d1db 100644
--- a/src/Lucene.Net.Analysis.Kuromoji/Tools/TokenInfoDictionaryBuilder.cs
+++ b/src/Lucene.Net.Analysis.Kuromoji/Tools/TokenInfoDictionaryBuilder.cs
@@ -8,6 +8,7 @@ using System.Collections.Generic;
 using System.IO;
 using System.Text;
 using Console = Lucene.Net.Util.SystemConsole;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Analysis.Ja.Util
 {
@@ -50,7 +51,7 @@ namespace Lucene.Net.Analysis.Ja.Util
 
         public virtual TokenInfoDictionaryWriter Build(string dirname)
         {
-            List<string> csvFiles = new List<string>();
+            JCG.List<string> csvFiles = new JCG.List<string>();
             foreach (FileInfo file in new DirectoryInfo(dirname).EnumerateFiles("*.csv"))
             {
                 csvFiles.Add(file.FullName);
@@ -65,7 +66,7 @@ namespace Lucene.Net.Analysis.Ja.Util
 
             // all lines in the file
             Console.WriteLine("  parse...");
-            List<string[]> lines = new List<string[]>(400000);
+            JCG.List<string[]> lines = new JCG.List<string[]>(400000);
             foreach (string file in csvFiles)
             {
                 using Stream inputStream = new FileStream(file, FileMode.Open, FileAccess.Read);
diff --git a/src/Lucene.Net.Analysis.Kuromoji/Tools/UnknownDictionaryBuilder.cs b/src/Lucene.Net.Analysis.Kuromoji/Tools/UnknownDictionaryBuilder.cs
index 060ce8a..3fde184 100644
--- a/src/Lucene.Net.Analysis.Kuromoji/Tools/UnknownDictionaryBuilder.cs
+++ b/src/Lucene.Net.Analysis.Kuromoji/Tools/UnknownDictionaryBuilder.cs
@@ -6,6 +6,7 @@ using System.Globalization;
 using System.IO;
 using System.Text;
 using System.Text.RegularExpressions;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Analysis.Ja.Util
 {
@@ -53,7 +54,7 @@ namespace Lucene.Net.Analysis.Ja.Util
         {
             UnknownDictionaryWriter dictionary = new UnknownDictionaryWriter(5 * 1024 * 1024);
 
-            List<string[]> lines = new List<string[]>();
+            JCG.List<string[]> lines = new JCG.List<string[]>();
             Encoding decoder = Encoding.GetEncoding(encoding);
             using (Stream inputStream = new FileStream(filename, FileMode.Open, FileAccess.Read))
             using (TextReader reader = new StreamReader(inputStream, decoder))
diff --git a/src/Lucene.Net.Analysis.Kuromoji/Util/CSVUtil.cs b/src/Lucene.Net.Analysis.Kuromoji/Util/CSVUtil.cs
index f9e939c..d982a70 100644
--- a/src/Lucene.Net.Analysis.Kuromoji/Util/CSVUtil.cs
+++ b/src/Lucene.Net.Analysis.Kuromoji/Util/CSVUtil.cs
@@ -1,8 +1,8 @@
 using Lucene.Net.Support;
 using System;
-using System.Collections.Generic;
 using System.Text;
 using System.Text.RegularExpressions;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Analysis.Ja.Util
 {
@@ -46,7 +46,7 @@ namespace Lucene.Net.Analysis.Ja.Util
         public static string[] Parse(string line)
         {
             bool insideQuote = false;
-            List<string> result = new List<string>();
+            JCG.List<string> result = new JCG.List<string>();
             int quoteCount = 0;
             StringBuilder sb = new StringBuilder();
             for (int i = 0; i < line.Length; i++)
diff --git a/src/Lucene.Net.Analysis.Morfologik/Morfologik/MorfologikFilter.cs b/src/Lucene.Net.Analysis.Morfologik/Morfologik/MorfologikFilter.cs
index b2f0bd1..01d6d92 100644
--- a/src/Lucene.Net.Analysis.Morfologik/Morfologik/MorfologikFilter.cs
+++ b/src/Lucene.Net.Analysis.Morfologik/Morfologik/MorfologikFilter.cs
@@ -83,7 +83,7 @@ namespace Lucene.Net.Analysis.Morfologik
 
             this.input = input;
             this.stemmer = new DictionaryLookup(dict);
-            this.lemmaList = new List<WordData>();
+            this.lemmaList = new JCG.List<WordData>();
         }
 
         /// <summary>
@@ -180,7 +180,7 @@ namespace Lucene.Net.Analysis.Morfologik
         public override void Reset()
         {
             lemmaListIndex = 0;
-            lemmaList = new List<WordData>();
+            lemmaList = new JCG.List<WordData>();
             tagsList.Clear();
             base.Reset();
         }
diff --git a/src/Lucene.Net.Analysis.Morfologik/Morfologik/TokenAttributes/MorphosyntacticTagsAttribute.cs b/src/Lucene.Net.Analysis.Morfologik/Morfologik/TokenAttributes/MorphosyntacticTagsAttribute.cs
index 5dc9834..1b0c778 100644
--- a/src/Lucene.Net.Analysis.Morfologik/Morfologik/TokenAttributes/MorphosyntacticTagsAttribute.cs
+++ b/src/Lucene.Net.Analysis.Morfologik/Morfologik/TokenAttributes/MorphosyntacticTagsAttribute.cs
@@ -2,6 +2,7 @@
 using Lucene.Net.Util;
 using System.Collections.Generic;
 using System.Text;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Analysis.Morfologik.TokenAttributes
 {
@@ -76,10 +77,10 @@ namespace Lucene.Net.Analysis.Morfologik.TokenAttributes
 
         public override void CopyTo(IAttribute target)
         {
-            List<StringBuilder> cloned = null;
+            IList<StringBuilder> cloned = null;
             if (tags != null)
             {
-                cloned = new List<StringBuilder>(tags.Count);
+                cloned = new JCG.List<StringBuilder>(tags.Count);
                 foreach (StringBuilder b in tags)
                 {
                     cloned.Add(new StringBuilder(b.ToString()));
diff --git a/src/Lucene.Net.Analysis.OpenNLP/OpenNLPChunkerFilter.cs b/src/Lucene.Net.Analysis.OpenNLP/OpenNLPChunkerFilter.cs
index 0dacec4..447f9ef 100644
--- a/src/Lucene.Net.Analysis.OpenNLP/OpenNLPChunkerFilter.cs
+++ b/src/Lucene.Net.Analysis.OpenNLP/OpenNLPChunkerFilter.cs
@@ -3,6 +3,7 @@ using Lucene.Net.Analysis.OpenNlp.Tools;
 using Lucene.Net.Analysis.TokenAttributes;
 using Lucene.Net.Util;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Analysis.OpenNlp
 {
@@ -29,7 +30,7 @@ namespace Lucene.Net.Analysis.OpenNlp
     /// </summary>
     public sealed class OpenNLPChunkerFilter : TokenFilter
     {
-        private readonly IList<AttributeSource> sentenceTokenAttrs = new List<AttributeSource>();
+        private readonly IList<AttributeSource> sentenceTokenAttrs = new JCG.List<AttributeSource>();
         private int tokenNum = 0;
         private bool moreTokensAvailable = true;
         private string[] sentenceTerms = null;
@@ -74,8 +75,8 @@ namespace Lucene.Net.Analysis.OpenNlp
 
         private void NextSentence()
         {
-            var termList = new List<string>();
-            var posTagList = new List<string>();
+            var termList = new JCG.List<string>();
+            var posTagList = new JCG.List<string>();
             sentenceTokenAttrs.Clear();
             bool endOfSentence = false;
             while (!endOfSentence && (moreTokensAvailable = m_input.IncrementToken()))
diff --git a/src/Lucene.Net.Analysis.OpenNLP/OpenNLPLemmatizerFilter.cs b/src/Lucene.Net.Analysis.OpenNLP/OpenNLPLemmatizerFilter.cs
index a15b6f8..d9568a7 100644
--- a/src/Lucene.Net.Analysis.OpenNLP/OpenNLPLemmatizerFilter.cs
+++ b/src/Lucene.Net.Analysis.OpenNLP/OpenNLPLemmatizerFilter.cs
@@ -3,6 +3,7 @@ using Lucene.Net.Analysis.OpenNlp.Tools;
 using Lucene.Net.Analysis.TokenAttributes;
 using Lucene.Net.Util;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Analysis.OpenNlp
 {
@@ -41,7 +42,7 @@ namespace Lucene.Net.Analysis.OpenNlp
         private readonly ITypeAttribute typeAtt;
         private readonly IKeywordAttribute keywordAtt;
         private readonly IFlagsAttribute flagsAtt;
-        private readonly IList<AttributeSource> sentenceTokenAttrs = new List<AttributeSource>(); // LUCENENET: marked readonly
+        private readonly IList<AttributeSource> sentenceTokenAttrs = new JCG.List<AttributeSource>(); // LUCENENET: marked readonly
         private IEnumerator<AttributeSource> sentenceTokenAttrsIter = null;
         private bool moreTokensAvailable = true;
         private string[] sentenceTokens = null;     // non-keyword tokens
@@ -91,8 +92,8 @@ namespace Lucene.Net.Analysis.OpenNlp
 
         private void NextSentence()
         {
-            var tokenList = new List<string>();
-            var typeList = new List<string>();
+            var tokenList = new JCG.List<string>();
+            var typeList = new JCG.List<string>();
             sentenceTokenAttrs.Clear();
             bool endOfSentence = false;
             while (!endOfSentence && (moreTokensAvailable = m_input.IncrementToken()))
diff --git a/src/Lucene.Net.Analysis.OpenNLP/OpenNLPPOSFilter.cs b/src/Lucene.Net.Analysis.OpenNLP/OpenNLPPOSFilter.cs
index 3e7e200..f80ca6f 100644
--- a/src/Lucene.Net.Analysis.OpenNLP/OpenNLPPOSFilter.cs
+++ b/src/Lucene.Net.Analysis.OpenNLP/OpenNLPPOSFilter.cs
@@ -3,6 +3,7 @@ using Lucene.Net.Analysis.OpenNlp.Tools;
 using Lucene.Net.Analysis.TokenAttributes;
 using Lucene.Net.Util;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Analysis.OpenNlp
 {
@@ -28,7 +29,7 @@ namespace Lucene.Net.Analysis.OpenNlp
     /// </summary>
     public sealed class OpenNLPPOSFilter : TokenFilter
     {
-        private readonly IList<AttributeSource> sentenceTokenAttrs = new List<AttributeSource>();
+        private readonly IList<AttributeSource> sentenceTokenAttrs = new JCG.List<AttributeSource>();
         private string[] tags = null;
         private int tokenNum = 0;
         private bool moreTokensAvailable = true;
@@ -73,7 +74,7 @@ namespace Lucene.Net.Analysis.OpenNlp
 
         private string[] NextSentence()
         {
-            var termList = new List<string>();
+            var termList = new JCG.List<string>();
             sentenceTokenAttrs.Clear();
             bool endOfSentence = false;
             while (!endOfSentence && (moreTokensAvailable = m_input.IncrementToken()))
diff --git a/src/Lucene.Net.Analysis.Phonetic/Language/Bm/Lang.cs b/src/Lucene.Net.Analysis.Phonetic/Language/Bm/Lang.cs
index 201cd97..e339654 100644
--- a/src/Lucene.Net.Analysis.Phonetic/Language/Bm/Lang.cs
+++ b/src/Lucene.Net.Analysis.Phonetic/Language/Bm/Lang.cs
@@ -146,7 +146,7 @@ namespace Lucene.Net.Analysis.Phonetic.Language.Bm
         /// <returns>A Lang encapsulating the loaded language-guessing rules.</returns>
         public static Lang LoadFromResource(string languageRulesResourceName, Languages languages)
         {
-            IList<LangRule> rules = new List<LangRule>();
+            IList<LangRule> rules = new JCG.List<LangRule>();
             Stream lRulesIS = typeof(Lang).FindAndGetManifestResourceStream(languageRulesResourceName);
 
             if (lRulesIS == null)
@@ -249,7 +249,7 @@ namespace Lucene.Net.Analysis.Phonetic.Language.Bm
                 {
                     if (rule.acceptOnMatch)
                     {
-                        List<string> toRemove = new List<string>();
+                        IList<string> toRemove = new JCG.List<string>();
                         foreach (var item in langs)
                         {
                             if (!rule.languages.Contains(item))
diff --git a/src/Lucene.Net.Analysis.Phonetic/Language/Bm/Rule.cs b/src/Lucene.Net.Analysis.Phonetic/Language/Bm/Rule.cs
index 03116bd..3dbd4bd 100644
--- a/src/Lucene.Net.Analysis.Phonetic/Language/Bm/Rule.cs
+++ b/src/Lucene.Net.Analysis.Phonetic/Language/Bm/Rule.cs
@@ -287,7 +287,7 @@ namespace Lucene.Net.Analysis.Phonetic.Language.Bm
                                      LanguageSet langs)
         {
             IDictionary<string, IList<Rule>> ruleMap = GetInstanceMap(nameType, rt, langs);
-            IList<Rule> allRules = new List<Rule>();
+            IList<Rule> allRules = new JCG.List<Rule>();
             foreach (IList<Rule> rules in ruleMap.Values)
             {
                 allRules.AddRange(rules);
@@ -379,7 +379,7 @@ namespace Lucene.Net.Analysis.Phonetic.Language.Bm
                     throw new ArgumentException("Phoneme starts with '(' so must end with ')'");
                 }
 
-                IList<Phoneme> phs = new List<Phoneme>();
+                IList<Phoneme> phs = new JCG.List<Phoneme>();
                 string body = ph.Substring(1, (ph.Length - 1) - 1);
                 foreach (string part in PIPE.Split(body).TrimEnd())
                 {
@@ -502,7 +502,7 @@ namespace Lucene.Net.Analysis.Phonetic.Language.Bm
                                         string patternKey = r.pattern.Substring(0, 1 - 0);
                                         if (!lines.TryGetValue(patternKey, out IList<Rule> rules) || rules == null)
                                         {
-                                            rules = new List<Rule>();
+                                            rules = new JCG.List<Rule>();
                                             lines[patternKey] = rules;
                                         }
                                         rules.Add(r);
diff --git a/src/Lucene.Net.Analysis.Phonetic/Language/DaitchMokotoffSoundex.cs b/src/Lucene.Net.Analysis.Phonetic/Language/DaitchMokotoffSoundex.cs
index f8f8ee8..8c229f1 100644
--- a/src/Lucene.Net.Analysis.Phonetic/Language/DaitchMokotoffSoundex.cs
+++ b/src/Lucene.Net.Analysis.Phonetic/Language/DaitchMokotoffSoundex.cs
@@ -9,6 +9,7 @@ using System.IO;
 using System.Runtime.CompilerServices;
 using System.Text;
 using System.Text.RegularExpressions;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Analysis.Phonetic.Language
 {
@@ -358,7 +359,7 @@ namespace Lucene.Net.Analysis.Phonetic.Language
                                 char patternKey = r.Pattern[0];
                                 if (!ruleMapping.TryGetValue(patternKey, out IList<Rule> rules) || rules == null)
                                 {
-                                    rules = new List<Rule>();
+                                    rules = new JCG.List<Rule>();
                                     ruleMapping[patternKey] = rules;
                                 }
                                 rules.Add(r);
@@ -509,7 +510,7 @@ namespace Lucene.Net.Analysis.Phonetic.Language
             string input = Cleanup(source);
 
             // LinkedHashSet preserves input order. In .NET we can use List for that purpose.
-            IList<Branch> currentBranches = new List<Branch>
+            IList<Branch> currentBranches = new JCG.List<Branch>
             {
                 new Branch()
             };
@@ -532,7 +533,7 @@ namespace Lucene.Net.Analysis.Phonetic.Language
                 }
 
                 // use an EMPTY_LIST to avoid false positive warnings wrt potential null pointer access
-                IList<Branch> nextBranches = branching ? new List<Branch>() : Collections.EmptyList<Branch>() as IList<Branch>;
+                IList<Branch> nextBranches = branching ? new JCG.List<Branch>() : Collections.EmptyList<Branch>() as IList<Branch>;
 
                 foreach (Rule rule in rules)
                 {
diff --git a/src/Lucene.Net.Analysis.SmartCn/Hhmm/BiSegGraph.cs b/src/Lucene.Net.Analysis.SmartCn/Hhmm/BiSegGraph.cs
index 94dd645..57ce4bf 100644
--- a/src/Lucene.Net.Analysis.SmartCn/Hhmm/BiSegGraph.cs
+++ b/src/Lucene.Net.Analysis.SmartCn/Hhmm/BiSegGraph.cs
@@ -154,7 +154,7 @@ namespace Lucene.Net.Analysis.Cn.Smart.Hhmm
             int to = tokenPair.To;
             if (!IsToExist(to))
             {
-                List<SegTokenPair> newlist = new List<SegTokenPair>
+                IList<SegTokenPair> newlist = new JCG.List<SegTokenPair>
                 {
                     tokenPair
                 };
diff --git a/src/Lucene.Net.Analysis.SmartCn/Hhmm/SegGraph.cs b/src/Lucene.Net.Analysis.SmartCn/Hhmm/SegGraph.cs
index a332311..a959d4c 100644
--- a/src/Lucene.Net.Analysis.SmartCn/Hhmm/SegGraph.cs
+++ b/src/Lucene.Net.Analysis.SmartCn/Hhmm/SegGraph.cs
@@ -1,6 +1,7 @@
 // lucene version compatibility level: 4.8.1
 using System.Collections.Generic;
 using System.Text;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Analysis.Cn.Smart.Hhmm
 {
@@ -70,7 +71,7 @@ namespace Lucene.Net.Analysis.Cn.Smart.Hhmm
         /// <returns>a <see cref="T:IList{SegToken}"/> of these ordered tokens.</returns>
         public virtual IList<SegToken> MakeIndex()
         {
-            IList<SegToken> result = new List<SegToken>();
+            IList<SegToken> result = new JCG.List<SegToken>();
             int s = -1, count = 0, size = tokenListTable.Count;
             IList<SegToken> tokenList;
             int index = 0;
@@ -101,7 +102,7 @@ namespace Lucene.Net.Analysis.Cn.Smart.Hhmm
             int s = token.StartOffset;
             if (!IsStartExist(s))
             {
-                List<SegToken> newlist = new List<SegToken>
+                IList<SegToken> newlist = new JCG.List<SegToken>
                 {
                     token
                 };
@@ -124,7 +125,7 @@ namespace Lucene.Net.Analysis.Cn.Smart.Hhmm
         /// <returns><see cref="T:IList{SegToken}"/> of all tokens in the map.</returns>
         public virtual IList<SegToken> ToTokenList()
         {
-            IList<SegToken> result = new List<SegToken>();
+            IList<SegToken> result = new JCG.List<SegToken>();
             int s = -1, count = 0, size = tokenListTable.Count;
             IList<SegToken> tokenList;
 
diff --git a/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Compile.cs b/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Compile.cs
index 9f5da0d..a42b1f5 100644
--- a/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Compile.cs
+++ b/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Compile.cs
@@ -6,6 +6,7 @@ using System.Collections.Generic;
 using System.IO;
 using System.Text;
 using Console = Lucene.Net.Util.SystemConsole;
+using JCG = J2N.Collections.Generic;
 
 /*
                     Egothor Software License version 1.00
@@ -110,7 +111,7 @@ namespace Egothor.Stemmer
             }
             // LUCENENET specific - reformatted with : and changed "charset" to "encoding"
             string charset = SystemProperties.GetProperty("egothor:stemmer:encoding", "UTF-8");
-            var stemmerTables = new List<string>();
+            var stemmerTables = new JCG.List<string>();
 
             // LUCENENET specific
             // command line argument overrides environment variable or default, if supplied
diff --git a/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/DiffIt.cs b/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/DiffIt.cs
index b6ecc42..9a48e1c 100644
--- a/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/DiffIt.cs
+++ b/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/DiffIt.cs
@@ -1,12 +1,11 @@
 using J2N.Text;
 using Lucene.Net.Util;
 using System;
-using System.Collections.Generic;
-using System.Globalization;
 using System.IO;
 using System.Text;
 using Console = Lucene.Net.Util.SystemConsole;
 using Integer = J2N.Numerics.Int32;
+using JCG = J2N.Collections.Generic;
 
 /*
                     Egothor Software License version 1.00
@@ -99,7 +98,7 @@ namespace Egothor.Stemmer
             int nop = Get(3, args[0]);
             // LUCENENET specific - reformatted with : and changed "charset" to "encoding"
             string charset = SystemProperties.GetProperty("egothor:stemmer:encoding", "UTF-8");
-            var stemmerTables = new List<string>();
+            var stemmerTables = new JCG.List<string>();
 
             // LUCENENET specific
             // command line argument overrides environment variable or default, if supplied
diff --git a/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Gener.cs b/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Gener.cs
index 05a3213..605ba88 100644
--- a/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Gener.cs
+++ b/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Gener.cs
@@ -1,5 +1,6 @@
 using Lucene.Net.Support;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 /*
                     Egothor Software License version 1.00
@@ -91,7 +92,7 @@ namespace Egothor.Stemmer
             }
 
             Arrays.Fill(remap, -1);
-            rows = RemoveGaps(orig.root, orows, new List<Row>(), remap);
+            rows = RemoveGaps(orig.root, orows, new JCG.List<Row>(), remap);
 
             return new Trie(orig.forward, remap[orig.root], cmds, rows);
         }
diff --git a/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Lift.cs b/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Lift.cs
index d5d2282..1b5171b 100644
--- a/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Lift.cs
+++ b/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Lift.cs
@@ -1,5 +1,6 @@
 using Lucene.Net.Support;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 /*
                     Egothor Software License version 1.00
@@ -100,7 +101,7 @@ namespace Egothor.Stemmer
             }
 
             Arrays.Fill(remap, -1);
-            rows = RemoveGaps(orig.root, orows, new List<Row>(), remap);
+            rows = RemoveGaps(orig.root, orows, new JCG.List<Row>(), remap);
 
             return new Trie(orig.forward, remap[orig.root], cmds, rows);
         }
diff --git a/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/MultiTrie.cs b/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/MultiTrie.cs
index ab4bef6..36fb20a 100644
--- a/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/MultiTrie.cs
+++ b/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/MultiTrie.cs
@@ -2,6 +2,7 @@
 using System.Collections.Generic;
 using System.IO;
 using System.Text;
+using JCG = J2N.Collections.Generic;
 
 /*
                     Egothor Software License version 1.00
@@ -70,7 +71,7 @@ namespace Egothor.Stemmer
         internal static char EOM = '*';
         internal static string EOM_NODE = "" + EOM;
 
-        protected IList<Trie> m_tries = new List<Trie>();
+        protected IList<Trie> m_tries = new JCG.List<Trie>();
 
         private readonly int BY = 1; // LUCENENET: marked readonly
 
@@ -189,7 +190,7 @@ namespace Egothor.Stemmer
         /// <returns>the newly reduced Trie</returns>
         public override Trie Reduce(Reduce by)
         {
-            List<Trie> h = new List<Trie>();
+            IList<Trie> h = new JCG.List<Trie>();
             foreach (Trie trie in m_tries)
                 h.Add(trie.Reduce(by));
 
diff --git a/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/MultiTrie2.cs b/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/MultiTrie2.cs
index 38c95c1..aa686a0 100644
--- a/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/MultiTrie2.cs
+++ b/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/MultiTrie2.cs
@@ -4,6 +4,7 @@ using System;
 using System.Collections.Generic;
 using System.IO;
 using System.Text;
+using JCG = J2N.Collections.Generic;
 
 /*
                     Egothor Software License version 1.00
@@ -339,7 +340,7 @@ namespace Egothor.Stemmer
         /// <returns>the newly reduced Trie</returns>
         public override Trie Reduce(Reduce by)
         {
-            List<Trie> h = new List<Trie>();
+            IList<Trie> h = new JCG.List<Trie>();
             foreach (Trie trie in m_tries)
                 h.Add(trie.Reduce(by));
 
diff --git a/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Optimizer.cs b/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Optimizer.cs
index 29673c4..18836d3 100644
--- a/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Optimizer.cs
+++ b/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Optimizer.cs
@@ -1,5 +1,6 @@
 using Lucene.Net.Support;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 /*
                     Egothor Software License version 1.00
@@ -81,7 +82,7 @@ namespace Egothor.Stemmer
         public override Trie Optimize(Trie orig)
         {
             IList<string> cmds = orig.cmds;
-            IList<Row> rows = new List<Row>();
+            IList<Row> rows = new JCG.List<Row>();
             IList<Row> orows = orig.rows;
             int[] remap = new int[orows.Count];
 
@@ -111,7 +112,7 @@ namespace Egothor.Stemmer
 
             int root = remap[orig.root];
             Arrays.Fill(remap, -1);
-            rows = RemoveGaps(root, rows, new List<Row>(), remap);
+            rows = RemoveGaps(root, rows, new JCG.List<Row>(), remap);
 
             return new Trie(orig.forward, remap[root], cmds, rows);
         }
diff --git a/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Reduce.cs b/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Reduce.cs
index 3b95735..7e85900 100644
--- a/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Reduce.cs
+++ b/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Reduce.cs
@@ -1,5 +1,6 @@
 using Lucene.Net.Support;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 /*
                     Egothor Software License version 1.00
@@ -77,12 +78,12 @@ namespace Egothor.Stemmer
         public virtual Trie Optimize(Trie orig)
         {
             IList<string> cmds = orig.cmds;
-            IList<Row> rows = new List<Row>();
+            IList<Row> rows = new JCG.List<Row>();
             IList<Row> orows = orig.rows;
             int[] remap = new int[orows.Count];
 
             Arrays.Fill(remap, -1);
-            rows = RemoveGaps(orig.root, rows, new List<Row>(), remap);
+            rows = RemoveGaps(orig.root, rows, new JCG.List<Row>(), remap);
 
             return new Trie(orig.forward, remap[orig.root], cmds, rows);
         }
diff --git a/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Trie.cs b/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Trie.cs
index f438c3c..92a5824 100644
--- a/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Trie.cs
+++ b/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Trie.cs
@@ -1,6 +1,7 @@
 using J2N.IO;
 using System.Collections.Generic;
 using System.IO;
+using JCG = J2N.Collections.Generic;
 
 /*
                     Egothor Software License version 1.00
@@ -70,8 +71,8 @@ namespace Egothor.Stemmer
     /// </summary>
     public class Trie
     {
-        internal IList<Row> rows = new List<Row>();
-        internal IList<string> cmds = new List<string>();
+        internal IList<Row> rows = new JCG.List<Row>();
+        internal IList<string> cmds = new JCG.List<string>();
         internal int root;
 
         internal bool forward = false;
diff --git a/src/Lucene.Net.Benchmark/ByTask/Feeds/EnwikiQueryMaker.cs b/src/Lucene.Net.Benchmark/ByTask/Feeds/EnwikiQueryMaker.cs
index b27faee..08df36a 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Feeds/EnwikiQueryMaker.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Feeds/EnwikiQueryMaker.cs
@@ -10,6 +10,7 @@ using Lucene.Net.Util;
 using System;
 using System.Collections.Generic;
 using Console = Lucene.Net.Util.SystemConsole;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Benchmarks.ByTask.Feeds
 {
@@ -93,7 +94,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds
                 LuceneVersion.LUCENE_CURRENT,
 #pragma warning restore 612, 618
                 DocMaker.BODY_FIELD, a);
-            IList<Query> queries = new List<Query>();
+            IList<Query> queries = new JCG.List<Query>();
             for (int i = 0; i < qs.Count; i++)
             {
                 try
@@ -134,7 +135,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds
             // analyzer (default is standard analyzer)
             Analyzer anlzr = NewAnalyzerTask.CreateAnalyzer(m_config.Get("analyzer", typeof(StandardAnalyzer).AssemblyQualifiedName));
 
-            List<object> queryList = new List<object>(20);
+            JCG.List<object> queryList = new JCG.List<object>(20);
             queryList.AddRange(STANDARD_QUERIES);
             if (!m_config.Get("enwikiQueryMaker.disableSpanQueries", false))
                 queryList.AddRange(GetPrebuiltQueries(DocMaker.BODY_FIELD));
diff --git a/src/Lucene.Net.Benchmark/ByTask/Feeds/FileBasedQueryMaker.cs b/src/Lucene.Net.Benchmark/ByTask/Feeds/FileBasedQueryMaker.cs
index ad94116..bbbdf3a 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Feeds/FileBasedQueryMaker.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Feeds/FileBasedQueryMaker.cs
@@ -5,10 +5,10 @@ using Lucene.Net.QueryParsers.Classic;
 using Lucene.Net.Search;
 using Lucene.Net.Util;
 using System;
-using System.Collections.Generic;
 using System.IO;
 using System.Text;
 using Console = Lucene.Net.Util.SystemConsole;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Benchmarks.ByTask.Feeds
 {
@@ -61,7 +61,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds
                 defaultField, anlzr);
             qp.AllowLeadingWildcard = true;
 
-            List<Query> qq = new List<Query>();
+            JCG.List<Query> qq = new JCG.List<Query>();
             string fileName = m_config.Get("file.query.maker.file", null);
             if (fileName != null)
             {
diff --git a/src/Lucene.Net.Benchmark/ByTask/Feeds/ReutersContentSource.cs b/src/Lucene.Net.Benchmark/ByTask/Feeds/ReutersContentSource.cs
index 13a78bb..1f1362c 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Feeds/ReutersContentSource.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Feeds/ReutersContentSource.cs
@@ -6,6 +6,7 @@ using System.Collections.Generic;
 using System.Globalization;
 using System.IO;
 using System.Text;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Benchmarks.ByTask.Feeds
 {
@@ -40,7 +41,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds
         // LUCENENET specific: DateFormatInfo not used
 
         private DirectoryInfo dataDir = null;
-        private readonly List<FileInfo> inputFiles = new List<FileInfo>(); // LUCENENET: marked readonly
+        private readonly IList<FileInfo> inputFiles = new JCG.List<FileInfo>(); // LUCENENET: marked readonly
         private int nextFile = 0;
         private int iteration = 0;
 
diff --git a/src/Lucene.Net.Benchmark/ByTask/Feeds/ReutersQueryMaker.cs b/src/Lucene.Net.Benchmark/ByTask/Feeds/ReutersQueryMaker.cs
index 126d9b8..6982a80 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Feeds/ReutersQueryMaker.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Feeds/ReutersQueryMaker.cs
@@ -9,6 +9,7 @@ using Lucene.Net.Util;
 using System;
 using System.Collections.Generic;
 using Console = Lucene.Net.Util.SystemConsole;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Benchmarks.ByTask.Feeds
 {
@@ -74,7 +75,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds
                 LuceneVersion.LUCENE_CURRENT,
 #pragma warning restore 612, 618
                 DocMaker.BODY_FIELD, a);
-            List<Query> queries = new List<Query>();
+            JCG.List<Query> queries = new JCG.List<Query>();
             for (int i = 0; i < qs.Count; i++)
             {
                 try
@@ -116,7 +117,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds
             Analyzer anlzr = NewAnalyzerTask.CreateAnalyzer(m_config.Get("analyzer",
                 typeof(Lucene.Net.Analysis.Standard.StandardAnalyzer).AssemblyQualifiedName));
 
-            List<object> queryList = new List<object>(20);
+            JCG.List<object> queryList = new JCG.List<object>(20);
             queryList.AddRange(STANDARD_QUERIES);
             queryList.AddRange(GetPrebuiltQueries(DocMaker.BODY_FIELD));
             return CreateQueries(queryList, anlzr);
diff --git a/src/Lucene.Net.Benchmark/ByTask/Feeds/SimpleQueryMaker.cs b/src/Lucene.Net.Benchmark/ByTask/Feeds/SimpleQueryMaker.cs
index fa14e8b..3f651fb 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Feeds/SimpleQueryMaker.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Feeds/SimpleQueryMaker.cs
@@ -5,7 +5,7 @@ using Lucene.Net.QueryParsers.Classic;
 using Lucene.Net.Search;
 using Lucene.Net.Util;
 using System;
-using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Benchmarks.ByTask.Feeds
 {
@@ -49,7 +49,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds
                 LuceneVersion.LUCENE_CURRENT,
 #pragma warning restore 612, 618
                 DocMaker.BODY_FIELD, anlzr);
-            List<Query> qq = new List<Query>();
+            JCG.List<Query> qq = new JCG.List<Query>();
             Query q1 = new TermQuery(new Term(DocMaker.ID_FIELD, "doc2"));
             qq.Add(q1);
             Query q2 = new TermQuery(new Term(DocMaker.BODY_FIELD, "simple"));
diff --git a/src/Lucene.Net.Benchmark/ByTask/Feeds/SimpleSloppyPhraseQueryMaker.cs b/src/Lucene.Net.Benchmark/ByTask/Feeds/SimpleSloppyPhraseQueryMaker.cs
index 571fef0..62de470 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Feeds/SimpleSloppyPhraseQueryMaker.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Feeds/SimpleSloppyPhraseQueryMaker.cs
@@ -1,7 +1,7 @@
 using J2N.Text;
 using Lucene.Net.Index;
 using Lucene.Net.Search;
-using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Benchmarks.ByTask.Feeds
 {
@@ -32,7 +32,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds
         {
             // extract some 100 words from doc text to an array
             string[] words;
-            List<string> w = new List<string>();
+            JCG.List<string> w = new JCG.List<string>();
             StringTokenizer st = new StringTokenizer(SingleDocSource.DOC_TEXT);
             while (st.MoveNext() && w.Count < 100)
             {
@@ -41,7 +41,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds
             words = w.ToArray();
 
             // create queries (that would find stuff) with varying slops
-            List<Query> queries = new List<Query>();
+            JCG.List<Query> queries = new JCG.List<Query>();
             for (int slop = 0; slop < 8; slop++)
             {
                 for (int qlen = 2; qlen < 6; qlen++)
diff --git a/src/Lucene.Net.Benchmark/ByTask/Feeds/SpatialFileQueryMaker.cs b/src/Lucene.Net.Benchmark/ByTask/Feeds/SpatialFileQueryMaker.cs
index 9d12160..a2e469e 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Feeds/SpatialFileQueryMaker.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Feeds/SpatialFileQueryMaker.cs
@@ -6,6 +6,7 @@ using Lucene.Net.Spatial;
 using Lucene.Net.Spatial.Queries;
 using Spatial4n.Core.Shapes;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Benchmarks.ByTask.Feeds
 {
@@ -64,7 +65,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds
             srcConfig.Set("line.parser", m_config.Get("query.file.line.parser", null));
             srcConfig.Set("content.source.forever", "false");
 
-            List<Query> queries = new List<Query>();
+            JCG.List<Query> queries = new JCG.List<Query>();
             LineDocSource src = new LineDocSource();
             try
             {
diff --git a/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecContentSource.cs b/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecContentSource.cs
index a12d658..84c6329 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecContentSource.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecContentSource.cs
@@ -8,6 +8,7 @@ using System.Globalization;
 using System.IO;
 using System.Text;
 using Console = Lucene.Net.Util.SystemConsole;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Benchmarks.ByTask.Feeds
 {
@@ -87,7 +88,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds
 
         private readonly DisposableThreadLocal<StringBuilder> trecDocBuffer = new DisposableThreadLocal<StringBuilder>();
         private DirectoryInfo dataDir = null;
-        private readonly List<FileInfo> inputFiles = new List<FileInfo>();
+        private readonly IList<FileInfo> inputFiles = new JCG.List<FileInfo>();
         private int nextFile = 0;
         // Use to synchronize threads on reading from the TREC documents.
         private readonly object @lock = new object(); // LUCENENET: marked readonly
diff --git a/src/Lucene.Net.Benchmark/ByTask/PerfRunData.cs b/src/Lucene.Net.Benchmark/ByTask/PerfRunData.cs
index 2b6c50a..989f0b1 100644
--- a/src/Lucene.Net.Benchmark/ByTask/PerfRunData.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/PerfRunData.cs
@@ -14,6 +14,7 @@ using System.Collections.Generic;
 using System.Globalization;
 using System.IO;
 using Console = Lucene.Net.Util.SystemConsole;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Benchmarks.ByTask
 {
@@ -148,7 +149,7 @@ namespace Lucene.Net.Benchmarks.ByTask
                           docMaker, facetSource, contentSource);
 
                 // close all perf objects that are closeable.
-                List<IDisposable> perfObjectsToClose = new List<IDisposable>();
+                IList<IDisposable> perfObjectsToClose = new JCG.List<IDisposable>();
                 foreach (object obj in perfObjects.Values)
                 {
                     if (obj is IDisposable disposable)
diff --git a/src/Lucene.Net.Benchmark/ByTask/Stats/Points.cs b/src/Lucene.Net.Benchmark/ByTask/Stats/Points.cs
index ec76aa7..d0a9394 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Stats/Points.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Stats/Points.cs
@@ -2,6 +2,7 @@
 using Lucene.Net.Benchmarks.ByTask.Utils;
 using Lucene.Net.Support.Threading;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Benchmarks.ByTask.Stats
 {
@@ -30,7 +31,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Stats
         // stat points ordered by their start time. 
         // for now we collect points as TaskStats objects.
         // later might optimize to collect only native data.
-        private readonly List<TaskStats> points = new List<TaskStats>(); // LUCENENET: marked readonly
+        private readonly IList<TaskStats> points = new JCG.List<TaskStats>(); // LUCENENET: marked readonly
 
         private int nextTaskRunNum = 0;
 
diff --git a/src/Lucene.Net.Benchmark/ByTask/Tasks/AddFacetedDocTask.cs b/src/Lucene.Net.Benchmark/ByTask/Tasks/AddFacetedDocTask.cs
index 6ae761c..4f76112 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Tasks/AddFacetedDocTask.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Tasks/AddFacetedDocTask.cs
@@ -1,6 +1,7 @@
 using Lucene.Net.Benchmarks.ByTask.Feeds;
 using Lucene.Net.Facet;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Benchmarks.ByTask.Tasks
 {
@@ -81,7 +82,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Tasks
         {
             if (config != null)
             {
-                List<FacetField> facets = new List<FacetField>();
+                IList<FacetField> facets = new JCG.List<FacetField>();
                 RunData.FacetSource.GetNextFacets(facets);
                 foreach (FacetField ff in facets)
                 {
diff --git a/src/Lucene.Net.Benchmark/ByTask/Tasks/AnalyzerFactoryTask.cs b/src/Lucene.Net.Benchmark/ByTask/Tasks/AnalyzerFactoryTask.cs
index cd3e6ac..ef619c1 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Tasks/AnalyzerFactoryTask.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Tasks/AnalyzerFactoryTask.cs
@@ -6,8 +6,8 @@ using System;
 using System.Collections.Generic;
 using System.Globalization;
 using System.IO;
-using System.Reflection;
 using System.Text.RegularExpressions;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Benchmarks.ByTask.Tasks
 {
@@ -82,9 +82,9 @@ namespace Lucene.Net.Benchmarks.ByTask.Tasks
         private string factoryName = null;
         private int? positionIncrementGap = null;
         private int? offsetGap = null;
-        private readonly IList<CharFilterFactory> charFilterFactories = new List<CharFilterFactory>();
+        private readonly IList<CharFilterFactory> charFilterFactories = new JCG.List<CharFilterFactory>();
         private TokenizerFactory tokenizerFactory = null;
-        private readonly IList<TokenFilterFactory> tokenFilterFactories = new List<TokenFilterFactory>();
+        private readonly IList<TokenFilterFactory> tokenFilterFactories = new JCG.List<TokenFilterFactory>();
 
         public AnalyzerFactoryTask(PerfRunData runData)
             : base(runData)
diff --git a/src/Lucene.Net.Benchmark/ByTask/Tasks/NewAnalyzerTask.cs b/src/Lucene.Net.Benchmark/ByTask/Tasks/NewAnalyzerTask.cs
index 979bc88..0d9af88 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Tasks/NewAnalyzerTask.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Tasks/NewAnalyzerTask.cs
@@ -5,6 +5,7 @@ using Lucene.Net.Util;
 using System;
 using System.Collections.Generic;
 using System.IO;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Benchmarks.ByTask.Tasks
 {
@@ -36,7 +37,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Tasks
         public NewAnalyzerTask(PerfRunData runData)
             : base(runData)
         {
-            analyzerNames = new List<string>();
+            analyzerNames = new JCG.List<string>();
         }
 
         public static Analyzer CreateAnalyzer(string className)
diff --git a/src/Lucene.Net.Benchmark/ByTask/Tasks/TaskSequence.cs b/src/Lucene.Net.Benchmark/ByTask/Tasks/TaskSequence.cs
index 43bd6f9..640eab7 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Tasks/TaskSequence.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Tasks/TaskSequence.cs
@@ -1,13 +1,13 @@
 using J2N.Threading;
 using Lucene.Net.Benchmarks.ByTask.Feeds;
 using Lucene.Net.Benchmarks.ByTask.Stats;
-using Lucene.Net.Support.Threading;
 using Lucene.Net.Util;
 using System;
 using System.Collections.Generic;
 using System.Globalization;
 using System.Text;
 using System.Threading;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Benchmarks.ByTask.Tasks
 {
@@ -61,7 +61,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Tasks
             SetSequenceName();
             this.parent = parent;
             this.parallel = parallel;
-            tasks = new List<PerfTask>();
+            tasks = new JCG.List<PerfTask>();
             logByTimeMsec = runData.Config.Get("report.time.step.msec", 0);
         }
 
@@ -188,7 +188,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Tasks
             int count = 0;
 
             long runTime = (long)(runTimeSec * 1000);
-            List<RunBackgroundTask> bgTasks = null;
+            IList<RunBackgroundTask> bgTasks = null;
 
             long t0 = J2N.Time.NanoTime() / J2N.Time.MillisecondsPerNanosecond; // LUCENENET: Use NanoTime() rather than CurrentTimeMilliseconds() for more accurate/reliable results
             for (int k = 0; fixedTime || (repetitions == REPEAT_EXHAUST && !exhausted) || k < repetitions; k++)
@@ -204,7 +204,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Tasks
                     {
                         if (bgTasks == null)
                         {
-                            bgTasks = new List<RunBackgroundTask>();
+                            bgTasks = new JCG.List<RunBackgroundTask>();
                         }
                         RunBackgroundTask bgTask = new RunBackgroundTask(task, letChildReport);
                         bgTask.Priority = (task.BackgroundDeltaPriority + Thread.CurrentThread.Priority);
@@ -617,7 +617,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Tasks
         public override object Clone()
         {
             TaskSequence res = (TaskSequence)base.Clone();
-            res.tasks = new List<PerfTask>();
+            res.tasks = new JCG.List<PerfTask>();
             for (int i = 0; i < tasks.Count; i++)
             {
                 res.tasks.Add((PerfTask)tasks[i].Clone());
diff --git a/src/Lucene.Net.Benchmark/ByTask/Utils/Algorithm.cs b/src/Lucene.Net.Benchmark/ByTask/Utils/Algorithm.cs
index c965400..c7d4a52 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Utils/Algorithm.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Utils/Algorithm.cs
@@ -433,7 +433,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Utils
         /// <returns>All tasks participating in this algorithm.</returns>
         public virtual IList<PerfTask> ExtractTasks()
         {
-            List<PerfTask> res = new List<PerfTask>();
+            IList<PerfTask> res = new JCG.List<PerfTask>();
             ExtractTasks(res, sequence);
             return res;
         }
diff --git a/src/Lucene.Net.Benchmark/ByTask/Utils/Config.cs b/src/Lucene.Net.Benchmark/ByTask/Utils/Config.cs
index 2deee0b..cfaa427 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Utils/Config.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Utils/Config.cs
@@ -7,6 +7,7 @@ using System.Globalization;
 using System.IO;
 using System.Text;
 using Console = Lucene.Net.Util.SystemConsole;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Benchmarks.ByTask.Utils
 {
@@ -64,7 +65,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Utils
         public Config(TextReader algReader)
         {
             // read alg file to array of lines
-            IList<string> lines = new List<string>();
+            IList<string> lines = new JCG.List<string>();
             int lastConfigLine = 0;
             string line;
             while ((line = algReader.ReadLine()) != null)
@@ -141,7 +142,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Utils
         private void PrintProps()
         {
             Console.WriteLine("------------> config properties:");
-            List<string> propKeys = new List<string>(props.Keys);
+            IList<string> propKeys = new JCG.List<string>(props.Keys);
             propKeys.Sort();
             foreach (string propName in propKeys)
             {
@@ -381,7 +382,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Utils
                 return new string[] { s };
             }
 
-            List<string> a = new List<string>();
+            JCG.List<string> a = new JCG.List<string>();
             StringTokenizer st = new StringTokenizer(s, ":");
             while (st.MoveNext())
             {
@@ -399,7 +400,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Utils
                 return new int[] { int.Parse(s, CultureInfo.InvariantCulture) };
             }
 
-            List<int> a = new List<int>();
+            IList<int> a = new JCG.List<int>();
             StringTokenizer st = new StringTokenizer(s, ":");
             while (st.MoveNext())
             {
@@ -422,7 +423,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Utils
                 return new double[] { double.Parse(s, CultureInfo.InvariantCulture) };
             }
 
-            List<double> a = new List<double>();
+            IList<double> a = new JCG.List<double>();
             StringTokenizer st = new StringTokenizer(s, ":");
             while (st.MoveNext())
             {
@@ -445,7 +446,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Utils
                 return new bool[] { bool.Parse(s) };
             }
 
-            List<bool> a = new List<bool>();
+            IList<bool> a = new JCG.List<bool>();
             StringTokenizer st = new StringTokenizer(s, ":");
             while (st.MoveNext())
             {
diff --git a/src/Lucene.Net.Benchmark/Quality/QualityStats.cs b/src/Lucene.Net.Benchmark/Quality/QualityStats.cs
index f24db9f..5fd4c69 100644
--- a/src/Lucene.Net.Benchmark/Quality/QualityStats.cs
+++ b/src/Lucene.Net.Benchmark/Quality/QualityStats.cs
@@ -2,9 +2,9 @@
 using Lucene.Net.Diagnostics;
 using System;
 using System.Collections.Generic;
-using System.Diagnostics;
 using System.Globalization;
 using System.IO;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Benchmarks.Quality
 {
@@ -74,7 +74,7 @@ namespace Lucene.Net.Benchmarks.Quality
         {
             this.maxGoodPoints = maxGoodPoints;
             this.searchTime = searchTime;
-            this.recallPoints = new List<RecallPoint>();
+            this.recallPoints = new JCG.List<RecallPoint>();
             pAt = new double[MAX_POINTS + 1]; // pAt[0] unused. 
         }
 
diff --git a/src/Lucene.Net.Benchmark/Quality/Trec/Trec1MQReader.cs b/src/Lucene.Net.Benchmark/Quality/Trec/Trec1MQReader.cs
index 770b44a..6446601 100644
--- a/src/Lucene.Net.Benchmark/Quality/Trec/Trec1MQReader.cs
+++ b/src/Lucene.Net.Benchmark/Quality/Trec/Trec1MQReader.cs
@@ -2,6 +2,7 @@
 using System;
 using System.Collections.Generic;
 using System.IO;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Benchmarks.Quality.Trec
 {
@@ -55,7 +56,7 @@ namespace Lucene.Net.Benchmarks.Quality.Trec
         /// <exception cref="IOException">if cannot read the queries.</exception>
         public virtual QualityQuery[] ReadQueries(TextReader reader)
         {
-            IList<QualityQuery> res = new List<QualityQuery>();
+            IList<QualityQuery> res = new JCG.List<QualityQuery>();
             string line;
             try
             {
diff --git a/src/Lucene.Net.Benchmark/Quality/Trec/TrecJudge.cs b/src/Lucene.Net.Benchmark/Quality/Trec/TrecJudge.cs
index 5d6a2c1..7d79c85 100644
--- a/src/Lucene.Net.Benchmark/Quality/Trec/TrecJudge.cs
+++ b/src/Lucene.Net.Benchmark/Quality/Trec/TrecJudge.cs
@@ -132,7 +132,7 @@ namespace Lucene.Net.Benchmarks.Quality.Trec
         public virtual bool ValidateData(QualityQuery[] qq, TextWriter logger)
         {
             IDictionary<string, QRelJudgement> missingQueries = new Dictionary<string, QRelJudgement>(judgements);
-            IList<string> missingJudgements = new List<string>();
+            IList<string> missingJudgements = new JCG.List<string>();
             for (int i = 0; i < qq.Length; i++)
             {
                 string id = qq[i].QueryID;
diff --git a/src/Lucene.Net.Benchmark/Quality/Trec/TrecTopicsReader.cs b/src/Lucene.Net.Benchmark/Quality/Trec/TrecTopicsReader.cs
index 6470750..842f6be 100644
--- a/src/Lucene.Net.Benchmark/Quality/Trec/TrecTopicsReader.cs
+++ b/src/Lucene.Net.Benchmark/Quality/Trec/TrecTopicsReader.cs
@@ -4,6 +4,7 @@ using System;
 using System.Collections.Generic;
 using System.IO;
 using System.Text;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Benchmarks.Quality.Trec
 {
@@ -65,7 +66,7 @@ namespace Lucene.Net.Benchmarks.Quality.Trec
         /// <exception cref="IOException">if cannot read the queries.</exception>
         public virtual QualityQuery[] ReadQueries(TextReader reader)
         {
-            IList<QualityQuery> res = new List<QualityQuery>();
+            IList<QualityQuery> res = new JCG.List<QualityQuery>();
             StringBuilder sb;
             try
             {
diff --git a/src/Lucene.Net.Benchmark/Quality/Utils/DocNameExtractor.cs b/src/Lucene.Net.Benchmark/Quality/Utils/DocNameExtractor.cs
index 395d2bf..1b7a879 100644
--- a/src/Lucene.Net.Benchmark/Quality/Utils/DocNameExtractor.cs
+++ b/src/Lucene.Net.Benchmark/Quality/Utils/DocNameExtractor.cs
@@ -3,6 +3,7 @@ using Lucene.Net.Search;
 using System;
 using System.Collections.Generic;
 using System.IO;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Benchmarks.Quality.Utils
 {
@@ -48,7 +49,7 @@ namespace Lucene.Net.Benchmarks.Quality.Utils
         /// <exception cref="IOException">if cannot extract the doc name from the index.</exception>
         public virtual string DocName(IndexSearcher searcher, int docid)
         {
-            IList<string> name = new List<string>();
+            IList<string> name = new JCG.List<string>();
             searcher.IndexReader.Document(docid, new StoredFieldVisitorAnonymousClass(this, name));
 
             return name.Count > 0 ? name[0] : null;
diff --git a/src/Lucene.Net.Benchmark/Support/Sax/Helpers/NamespaceSupport.cs b/src/Lucene.Net.Benchmark/Support/Sax/Helpers/NamespaceSupport.cs
index 0ab40c7..26b9c23 100644
--- a/src/Lucene.Net.Benchmark/Support/Sax/Helpers/NamespaceSupport.cs
+++ b/src/Lucene.Net.Benchmark/Support/Sax/Helpers/NamespaceSupport.cs
@@ -9,6 +9,7 @@ using Lucene.Net.Support;
 using System;
 using System.Collections;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Sax.Helpers
 {
@@ -586,7 +587,7 @@ namespace Sax.Helpers
                 }
                 if (declarations == null)
                 {
-                    declarations = new List<string>();
+                    declarations = new JCG.List<string>();
                 }
 
                 prefix = prefix.Intern();
diff --git a/src/Lucene.Net.Benchmark/Support/TagSoup/Parser.cs b/src/Lucene.Net.Benchmark/Support/TagSoup/Parser.cs
index 87c239c..ec909e9 100644
--- a/src/Lucene.Net.Benchmark/Support/TagSoup/Parser.cs
+++ b/src/Lucene.Net.Benchmark/Support/TagSoup/Parser.cs
@@ -22,9 +22,9 @@ using Sax.Ext;
 using Sax.Helpers;
 using System;
 using System.Collections;
-using System.Collections.Generic;
 using System.IO;
 using System.Text;
+using JCG = J2N.Collections.Generic;
 
 namespace TagSoup
 {
@@ -1093,7 +1093,7 @@ namespace TagSoup
             {
                 return Arrays.Empty<string>();
             }
-            var l = new List<string>();
+            var l = new JCG.List<string>();
             int s = 0;
             int e; // LUCENENET: IDE0059: Remove unnecessary value assignment
             bool sq = false; // single quote
diff --git a/src/Lucene.Net.Codecs/BlockTerms/BlockTermsWriter.cs b/src/Lucene.Net.Codecs/BlockTerms/BlockTermsWriter.cs
index 44db7ec..d3a5e80 100644
--- a/src/Lucene.Net.Codecs/BlockTerms/BlockTermsWriter.cs
+++ b/src/Lucene.Net.Codecs/BlockTerms/BlockTermsWriter.cs
@@ -5,6 +5,7 @@ using Lucene.Net.Store;
 using Lucene.Net.Util;
 using System;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Codecs.BlockTerms
 {
@@ -84,7 +85,7 @@ namespace Lucene.Net.Codecs.BlockTerms
             }
         }
 
-        private readonly IList<FieldMetaData> fields = new List<FieldMetaData>();
+        private readonly IList<FieldMetaData> fields = new JCG.List<FieldMetaData>();
 
         // private final String segment;
 
diff --git a/src/Lucene.Net.Codecs/BlockTerms/FixedGapTermsIndexWriter.cs b/src/Lucene.Net.Codecs/BlockTerms/FixedGapTermsIndexWriter.cs
index e1daa47..db8f292 100644
--- a/src/Lucene.Net.Codecs/BlockTerms/FixedGapTermsIndexWriter.cs
+++ b/src/Lucene.Net.Codecs/BlockTerms/FixedGapTermsIndexWriter.cs
@@ -5,7 +5,7 @@ using Lucene.Net.Util;
 using Lucene.Net.Util.Packed;
 using System;
 using System.Collections.Generic;
-using System.Diagnostics;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Codecs.BlockTerms
 {
@@ -51,7 +51,7 @@ namespace Lucene.Net.Codecs.BlockTerms
 
         private readonly int termIndexInterval;
 
-        private readonly IList<SimpleFieldWriter> fields = new List<SimpleFieldWriter>();
+        private readonly IList<SimpleFieldWriter> fields = new JCG.List<SimpleFieldWriter>();
 
         //private readonly FieldInfos fieldInfos; // unread  // LUCENENET: Not used
 
diff --git a/src/Lucene.Net.Codecs/BlockTerms/VariableGapTermsIndexWriter.cs b/src/Lucene.Net.Codecs/BlockTerms/VariableGapTermsIndexWriter.cs
index 2a43b5f..8792881 100644
--- a/src/Lucene.Net.Codecs/BlockTerms/VariableGapTermsIndexWriter.cs
+++ b/src/Lucene.Net.Codecs/BlockTerms/VariableGapTermsIndexWriter.cs
@@ -5,6 +5,7 @@ using Lucene.Net.Util;
 using Lucene.Net.Util.Fst;
 using System;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Codecs.BlockTerms
 {
@@ -47,7 +48,7 @@ namespace Lucene.Net.Codecs.BlockTerms
         internal const int VERSION_CHECKSUM = 2;
         internal const int VERSION_CURRENT = VERSION_CHECKSUM;
 
-        private readonly IList<FSTFieldWriter> fields = new List<FSTFieldWriter>();
+        private readonly IList<FSTFieldWriter> fields = new JCG.List<FSTFieldWriter>();
 
         //private readonly FieldInfos fieldInfos; // unread  // LUCENENET: Not used
         private readonly IndexTermSelector policy;
diff --git a/src/Lucene.Net.Codecs/Bloom/BloomFilteringPostingsFormat.cs b/src/Lucene.Net.Codecs/Bloom/BloomFilteringPostingsFormat.cs
index 003f72e..28908e3 100644
--- a/src/Lucene.Net.Codecs/Bloom/BloomFilteringPostingsFormat.cs
+++ b/src/Lucene.Net.Codecs/Bloom/BloomFilteringPostingsFormat.cs
@@ -406,7 +406,7 @@ namespace Lucene.Net.Codecs.Bloom
                 {
                     _delegateFieldsConsumer.Dispose();
                     // Now we are done accumulating values for these fields
-                    var nonSaturatedBlooms = new List<KeyValuePair<FieldInfo, FuzzySet>>();
+                    var nonSaturatedBlooms = new JCG.List<KeyValuePair<FieldInfo, FuzzySet>>();
 
                     foreach (var entry in _bloomFilters)
                     {
diff --git a/src/Lucene.Net.Codecs/Memory/FSTOrdTermsWriter.cs b/src/Lucene.Net.Codecs/Memory/FSTOrdTermsWriter.cs
index 211fdcc..a02f02e 100644
--- a/src/Lucene.Net.Codecs/Memory/FSTOrdTermsWriter.cs
+++ b/src/Lucene.Net.Codecs/Memory/FSTOrdTermsWriter.cs
@@ -1,7 +1,7 @@
 using Lucene.Net.Util.Fst;
 using System;
 using System.Collections.Generic;
-using System.IO;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Codecs.Memory
 {
@@ -24,7 +24,6 @@ namespace Lucene.Net.Codecs.Memory
 
     using BytesRef = Util.BytesRef;
     using FieldInfo = Index.FieldInfo;
-    using FieldInfos = Index.FieldInfos;
     using FST = FST;
     using IndexFileNames = Index.IndexFileNames;
     using IndexOptions = Index.IndexOptions;
@@ -149,7 +148,7 @@ namespace Lucene.Net.Codecs.Memory
 
         private readonly PostingsWriterBase postingsWriter;
         //private readonly FieldInfos fieldInfos; // LUCENENET: Never read
-        private readonly IList<FieldMetaData> _fields = new List<FieldMetaData>();
+        private readonly IList<FieldMetaData> _fields = new JCG.List<FieldMetaData>();
 #pragma warning disable CA2213 // Disposable fields should be disposed
         private IndexOutput blockOut = null;
         private readonly IndexOutput indexOut = null; // LUCENENET: marked readonly
diff --git a/src/Lucene.Net.Codecs/Memory/FSTTermsWriter.cs b/src/Lucene.Net.Codecs/Memory/FSTTermsWriter.cs
index 5501172..3addf98 100644
--- a/src/Lucene.Net.Codecs/Memory/FSTTermsWriter.cs
+++ b/src/Lucene.Net.Codecs/Memory/FSTTermsWriter.cs
@@ -1,7 +1,7 @@
 using Lucene.Net.Util.Fst;
 using System;
 using System.Collections.Generic;
-using System.IO;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Codecs.Memory
 {
@@ -24,7 +24,6 @@ namespace Lucene.Net.Codecs.Memory
 
     using BytesRef = Util.BytesRef;
     using FieldInfo = Index.FieldInfo;
-    using FieldInfos = Index.FieldInfos;
     using FST = Util.Fst.FST;
     using IndexFileNames = Index.IndexFileNames;
     using IndexOptions = Index.IndexOptions;
@@ -126,7 +125,7 @@ namespace Lucene.Net.Codecs.Memory
 #pragma warning disable CA2213 // Disposable fields should be disposed
         private IndexOutput _output;
 #pragma warning restore CA2213 // Disposable fields should be disposed
-        private readonly IList<FieldMetaData> _fields = new List<FieldMetaData>();
+        private readonly IList<FieldMetaData> _fields = new JCG.List<FieldMetaData>();
 
         public FSTTermsWriter(SegmentWriteState state, PostingsWriterBase postingsWriter)
         {
diff --git a/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsWriter.cs b/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsWriter.cs
index 00550f4..04d4e79 100644
--- a/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsWriter.cs
+++ b/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsWriter.cs
@@ -3,7 +3,7 @@ using Lucene.Net.Index;
 using Lucene.Net.Store;
 using Lucene.Net.Util;
 using System.Collections.Generic;
-using System.Diagnostics;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Codecs.Pulsing
 {
@@ -53,7 +53,7 @@ namespace Lucene.Net.Codecs.Pulsing
 
         private readonly SegmentWriteState _segmentState;
         //private IndexOutput _termsOut; // LUCENENET: Never read
-        private readonly List<FieldMetaData> _fields;
+        private readonly IList<FieldMetaData> _fields;
         private IndexOptions _indexOptions;
         private bool _storePayloads;
 
@@ -126,7 +126,7 @@ namespace Lucene.Net.Codecs.Pulsing
             {
                 _pending[i] = new Position();
             }
-            _fields = new List<FieldMetaData>();
+            _fields = new JCG.List<FieldMetaData>();
 
             // We simply wrap another postings writer, but only call
             // on it when tot positions is >= the cutoff:
diff --git a/src/Lucene.Net.Facet/DrillDownQuery.cs b/src/Lucene.Net.Facet/DrillDownQuery.cs
index 0c3c261..e80556f 100644
--- a/src/Lucene.Net.Facet/DrillDownQuery.cs
+++ b/src/Lucene.Net.Facet/DrillDownQuery.cs
@@ -293,8 +293,8 @@ namespace Lucene.Net.Facet
                 return new MatchAllDocsQuery();
             }
 
-            IList<Filter> filters = new List<Filter>();
-            IList<Query> queries = new List<Query>();
+            IList<Filter> filters = new JCG.List<Filter>();
+            IList<Query> queries = new JCG.List<Query>();
             IList<BooleanClause> clauses = query.Clauses;
             Query baseQuery;
             int startIndex;
diff --git a/src/Lucene.Net.Facet/FacetsCollector.cs b/src/Lucene.Net.Facet/FacetsCollector.cs
index 15929fa..55fc201 100644
--- a/src/Lucene.Net.Facet/FacetsCollector.cs
+++ b/src/Lucene.Net.Facet/FacetsCollector.cs
@@ -6,6 +6,7 @@ using Lucene.Net.Util;
 using System;
 using System.Collections.Generic;
 using System.Diagnostics.CodeAnalysis;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Facet
 {
@@ -41,7 +42,7 @@ namespace Lucene.Net.Facet
         private int totalHits;
         private float[] scores;
         private readonly bool keepScores;
-        private readonly List<MatchingDocs> matchingDocs = new List<MatchingDocs>();
+        private readonly IList<MatchingDocs> matchingDocs = new JCG.List<MatchingDocs>();
         private Docs docs;
 
         /// <summary>
diff --git a/src/Lucene.Net.Facet/FacetsConfig.cs b/src/Lucene.Net.Facet/FacetsConfig.cs
index 83f60c1..c2d626c 100644
--- a/src/Lucene.Net.Facet/FacetsConfig.cs
+++ b/src/Lucene.Net.Facet/FacetsConfig.cs
@@ -327,7 +327,7 @@ namespace Lucene.Net.Facet
                     string indexFieldName = dimConfig.IndexFieldName;
                     if (!byField.TryGetValue(indexFieldName, out IList<FacetField> fields))
                     {
-                        fields = new List<FacetField>();
+                        fields = new JCG.List<FacetField>();
                         byField[indexFieldName] = fields;
                     }
                     fields.Add(facetField);
@@ -344,7 +344,7 @@ namespace Lucene.Net.Facet
                     string indexFieldName = dimConfig.IndexFieldName;
                     if (!dvByField.TryGetValue(indexFieldName, out IList<SortedSetDocValuesFacetField> fields))
                     {
-                        fields = new List<SortedSetDocValuesFacetField>();
+                        fields = new JCG.List<SortedSetDocValuesFacetField>();
                         dvByField[indexFieldName] = fields;
                     }
                     fields.Add(facetField);
@@ -370,7 +370,7 @@ namespace Lucene.Net.Facet
                     string indexFieldName = dimConfig.IndexFieldName;
                     if (!assocByField.TryGetValue(indexFieldName, out IList<AssociationFacetField> fields))
                     {
-                        fields = new List<AssociationFacetField>();
+                        fields = new JCG.List<AssociationFacetField>();
                         assocByField[indexFieldName] = fields;
                     }
                     fields.Add(facetField);
@@ -695,7 +695,7 @@ namespace Lucene.Net.Facet
         /// </summary>
         public static string[] StringToPath(string s)
         {
-            List<string> parts = new List<string>();
+            JCG.List<string> parts = new JCG.List<string>();
             int length = s.Length;
             if (length == 0)
             {
diff --git a/src/Lucene.Net.Facet/MultiFacets.cs b/src/Lucene.Net.Facet/MultiFacets.cs
index fe86597..6b1874e 100644
--- a/src/Lucene.Net.Facet/MultiFacets.cs
+++ b/src/Lucene.Net.Facet/MultiFacets.cs
@@ -1,6 +1,7 @@
 // Lucene version compatibility level 4.8.1
 using System;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Facet
 {
@@ -69,7 +70,7 @@ namespace Lucene.Net.Facet
 
         public override IList<FacetResult> GetAllDims(int topN)
         {
-            List<FacetResult> results = new List<FacetResult>();
+            IList<FacetResult> results = new JCG.List<FacetResult>();
 
             // First add the specific dim's facets:
             foreach (KeyValuePair<string, Facets> ent in dimToFacets)
diff --git a/src/Lucene.Net.Facet/RandomSamplingFacetsCollector.cs b/src/Lucene.Net.Facet/RandomSamplingFacetsCollector.cs
index 8bd0fcc..f22e1ab 100644
--- a/src/Lucene.Net.Facet/RandomSamplingFacetsCollector.cs
+++ b/src/Lucene.Net.Facet/RandomSamplingFacetsCollector.cs
@@ -2,7 +2,7 @@
 using J2N.Numerics;
 using System;
 using System.Collections.Generic;
-using System.IO;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Facet
 {
@@ -180,7 +180,7 @@ namespace Lucene.Net.Facet
         /// </summary>
         private IList<MatchingDocs> CreateSampledDocs(ICollection<MatchingDocs> matchingDocsList)
         {
-            List<MatchingDocs> sampledDocsList = new List<MatchingDocs>(matchingDocsList.Count);
+            IList<MatchingDocs> sampledDocsList = new JCG.List<MatchingDocs>(matchingDocsList.Count);
             foreach (MatchingDocs docs in matchingDocsList)
             {
                 sampledDocsList.Add(CreateSample(docs));
diff --git a/src/Lucene.Net.Facet/Range/LongRangeCounter.cs b/src/Lucene.Net.Facet/Range/LongRangeCounter.cs
index 2495c85..6740240 100644
--- a/src/Lucene.Net.Facet/Range/LongRangeCounter.cs
+++ b/src/Lucene.Net.Facet/Range/LongRangeCounter.cs
@@ -3,6 +3,7 @@ using J2N.Numerics;
 using Lucene.Net.Diagnostics;
 using System.Collections.Generic;
 using System.Text;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Facet.Range
 {
@@ -75,11 +76,11 @@ namespace Lucene.Net.Facet.Range
                 }
             }
 
-            var endsList = new List<long?>(endsMap.Keys);
+            var endsList = new JCG.List<long?>(endsMap.Keys);
             endsList.Sort();
 
             // Build elementaryIntervals (a 1D Venn diagram):
-            IList<InclusiveRange> elementaryIntervals = new List<InclusiveRange>();
+            IList<InclusiveRange> elementaryIntervals = new JCG.List<InclusiveRange>();
             int upto0 = 1;
             long v = endsList[0] ?? 0;
             long prev;
@@ -344,7 +345,7 @@ namespace Lucene.Net.Facet.Range
                     // range; add to our output list:
                     if (outputs == null)
                     {
-                        outputs = new List<int?>();
+                        outputs = new JCG.List<int?>();
                     }
                     outputs.Add(index);
                 }
diff --git a/src/Lucene.Net.Facet/Range/RangeFacetCounts.cs b/src/Lucene.Net.Facet/Range/RangeFacetCounts.cs
index af9a7cd..755ef7a 100644
--- a/src/Lucene.Net.Facet/Range/RangeFacetCounts.cs
+++ b/src/Lucene.Net.Facet/Range/RangeFacetCounts.cs
@@ -1,6 +1,7 @@
 // Lucene version compatibility level 4.8.1
 using System;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Facet.Range
 {
@@ -90,7 +91,7 @@ namespace Lucene.Net.Facet.Range
 
         public override IList<FacetResult> GetAllDims(int topN)
         {
-            return new List<FacetResult> { GetTopChildren(topN, null) };
+            return new JCG.List<FacetResult> { GetTopChildren(topN, null) };
         }
     }
 }
\ No newline at end of file
diff --git a/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetCounts.cs b/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetCounts.cs
index 77ca2ef..27b04af 100644
--- a/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetCounts.cs
+++ b/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetCounts.cs
@@ -3,6 +3,7 @@ using J2N.Text;
 using Lucene.Net.Support;
 using System;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Facet.SortedSet
 {
@@ -289,7 +290,7 @@ namespace Lucene.Net.Facet.SortedSet
 
         public override IList<FacetResult> GetAllDims(int topN)
         {
-            List<FacetResult> results = new List<FacetResult>();
+            JCG.List<FacetResult> results = new JCG.List<FacetResult>();
             foreach (KeyValuePair<string, OrdRange> ent in state.PrefixToOrdRange)
             {
                 FacetResult fr = GetDim(ent.Key, ent.Value, topN);
diff --git a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacets.cs b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacets.cs
index 1205c65..6eb62ec 100644
--- a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacets.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacets.cs
@@ -2,6 +2,7 @@
 using J2N.Text;
 using System;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Facet.Taxonomy
 {
@@ -103,7 +104,7 @@ namespace Lucene.Net.Facet.Taxonomy
         public override IList<FacetResult> GetAllDims(int topN)
         {
             int ord = m_children[TaxonomyReader.ROOT_ORDINAL];
-            List<FacetResult> results = new List<FacetResult>();
+            JCG.List<FacetResult> results = new JCG.List<FacetResult>();
             while (ord != TaxonomyReader.INVALID_ORDINAL)
             {
                 string dim = m_taxoReader.GetPath(ord).Components[0];
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CharBlockArray.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CharBlockArray.cs
index 9b26e73..8b7466f 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CharBlockArray.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CharBlockArray.cs
@@ -5,6 +5,7 @@ using System;
 using System.Collections.Generic;
 using System.IO;
 using System.Text;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Facet.Taxonomy.WriterCache
 {
@@ -107,7 +108,7 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
 
         internal CharBlockArray(int blockSize)
         {
-            this.blocks = new List<Block>();
+            this.blocks = new JCG.List<Block>();
             this.blockSize = blockSize;
             AddBlock();
         }
@@ -294,7 +295,7 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
             {
                 case serialVersionUID:
                     var blocksCount = reader.ReadInt32();
-                    this.blocks = new List<Block>(blocksCount);
+                    this.blocks = new JCG.List<Block>(blocksCount);
                     for (int i = 0; i < blocksCount; i++)
                     {
                         blocks.Add(new Block(reader));
diff --git a/src/Lucene.Net.Grouping/AbstractFirstPassGroupingCollector.cs b/src/Lucene.Net.Grouping/AbstractFirstPassGroupingCollector.cs
index bd8e559..069fecd 100644
--- a/src/Lucene.Net.Grouping/AbstractFirstPassGroupingCollector.cs
+++ b/src/Lucene.Net.Grouping/AbstractFirstPassGroupingCollector.cs
@@ -126,7 +126,7 @@ namespace Lucene.Net.Search.Grouping
                 BuildSortedSet();
             }
 
-            ICollection<ISearchGroup<TGroupValue>> result = new List<ISearchGroup<TGroupValue>>();
+            ICollection<ISearchGroup<TGroupValue>> result = new JCG.List<ISearchGroup<TGroupValue>>();
             int upto = 0;
             int sortFieldCount = groupSort.GetSort().Length;
             foreach (CollectedSearchGroup<TGroupValue> group in m_orderedGroups)
diff --git a/src/Lucene.Net.Grouping/AbstractGroupFacetCollector.cs b/src/Lucene.Net.Grouping/AbstractGroupFacetCollector.cs
index 51006b5..0ceacaa 100644
--- a/src/Lucene.Net.Grouping/AbstractGroupFacetCollector.cs
+++ b/src/Lucene.Net.Grouping/AbstractGroupFacetCollector.cs
@@ -44,7 +44,7 @@ namespace Lucene.Net.Search.Grouping
             this.m_groupField = groupField;
             this.m_facetField = facetField;
             this.m_facetPrefix = facetPrefix;
-            m_segmentResults = new List<AbstractSegmentResult>();
+            m_segmentResults = new JCG.List<AbstractSegmentResult>();
         }
 
         /// <summary>
@@ -206,7 +206,7 @@ namespace Lucene.Net.Search.Grouping
             /// <returns>a list of facet entries to be rendered based on the specified offset and limit</returns>
             public virtual IList<FacetEntry> GetFacetEntries(int offset, int limit)
             {
-                List<FacetEntry> entries = new List<FacetEntry>();
+                IList<FacetEntry> entries = new JCG.List<FacetEntry>();
 
                 int skipped = 0;
                 int included = 0;
diff --git a/src/Lucene.Net.Grouping/Function/FunctionDistinctValuesCollector.cs b/src/Lucene.Net.Grouping/Function/FunctionDistinctValuesCollector.cs
index 1f15e83..d402555 100644
--- a/src/Lucene.Net.Grouping/Function/FunctionDistinctValuesCollector.cs
+++ b/src/Lucene.Net.Grouping/Function/FunctionDistinctValuesCollector.cs
@@ -53,7 +53,7 @@ namespace Lucene.Net.Search.Grouping.Function
             }
         }
 
-        public override IEnumerable<GroupCount> Groups => new List<GroupCount>(groupMap.Values);
+        public override IEnumerable<GroupCount> Groups => new JCG.List<GroupCount>(groupMap.Values);
 
         public override void Collect(int doc)
         {
diff --git a/src/Lucene.Net.Grouping/GroupingSearch.cs b/src/Lucene.Net.Grouping/GroupingSearch.cs
index 846d055..e495a68 100644
--- a/src/Lucene.Net.Grouping/GroupingSearch.cs
+++ b/src/Lucene.Net.Grouping/GroupingSearch.cs
@@ -8,6 +8,7 @@ using System;
 using System.Collections;
 using System.Collections.Generic;
 using System.IO;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Search.Grouping
 {
@@ -324,7 +325,7 @@ namespace Lucene.Net.Search.Grouping
             ICollector firstRound;
             if (allGroupHeads || allGroups)
             {
-                List<ICollector> collectors = new List<ICollector>();
+                JCG.List<ICollector> collectors = new JCG.List<ICollector>();
                 collectors.Add(firstPassCollector);
 
                 if (allGroups)
@@ -448,7 +449,7 @@ namespace Lucene.Net.Search.Grouping
             ICollector firstRound;
             if (allGroupHeads || allGroups)
             {
-                List<ICollector> collectors = new List<ICollector>();
+                JCG.List<ICollector> collectors = new JCG.List<ICollector>();
                 collectors.Add(firstPassCollector);
 
                 if (allGroups)
diff --git a/src/Lucene.Net.Grouping/SearchGroup.cs b/src/Lucene.Net.Grouping/SearchGroup.cs
index 580ed50..6c72371 100644
--- a/src/Lucene.Net.Grouping/SearchGroup.cs
+++ b/src/Lucene.Net.Grouping/SearchGroup.cs
@@ -142,7 +142,7 @@ namespace Lucene.Net.Search.Grouping
             private object[] topValues;
 
             public IList<ShardIter<T>> Shards => shards;
-            private readonly List<ShardIter<T>> shards = new List<ShardIter<T>>();
+            private readonly IList<ShardIter<T>> shards = new JCG.List<ShardIter<T>>();
 
             public int MinShardIndex
             {
@@ -410,7 +410,7 @@ namespace Lucene.Net.Search.Grouping
                 }
 
                 // Pull merged topN groups:
-                List<SearchGroup<T>> newTopGroups = new List<SearchGroup<T>>();
+                IList<SearchGroup<T>> newTopGroups = new JCG.List<SearchGroup<T>>();
 
                 int count = 0;
 
diff --git a/src/Lucene.Net.Grouping/Term/TermAllGroupHeadsCollector.cs b/src/Lucene.Net.Grouping/Term/TermAllGroupHeadsCollector.cs
index 13137af..50e6c1f 100644
--- a/src/Lucene.Net.Grouping/Term/TermAllGroupHeadsCollector.cs
+++ b/src/Lucene.Net.Grouping/Term/TermAllGroupHeadsCollector.cs
@@ -269,7 +269,7 @@ namespace Lucene.Net.Search.Grouping.Terms
             : base(groupField, sortWithinGroup.GetSort().Length)
         {
             ordSet = new SentinelInt32Set(initialSize, -2);
-            collectedGroups = new List<GroupHead>(initialSize);
+            collectedGroups = new JCG.List<GroupHead>(initialSize);
 
             SortField[] sortFields = sortWithinGroup.GetSort();
             fields = new SortField[sortFields.Length];
@@ -493,7 +493,7 @@ namespace Lucene.Net.Search.Grouping.Terms
                     : base(groupField, sortWithinGroup.GetSort().Length)
         {
             ordSet = new SentinelInt32Set(initialSize, -2);
-            collectedGroups = new List<GroupHead>(initialSize);
+            collectedGroups = new JCG.List<GroupHead>(initialSize);
 
             SortField[] sortFields = sortWithinGroup.GetSort();
             fields = new SortField[sortFields.Length];
@@ -675,7 +675,7 @@ namespace Lucene.Net.Search.Grouping.Terms
                     : base(groupField, sortWithinGroup.GetSort().Length)
         {
             ordSet = new SentinelInt32Set(initialSize, -2);
-            collectedGroups = new List<GroupHead>(initialSize);
+            collectedGroups = new JCG.List<GroupHead>(initialSize);
 
             SortField[] sortFields = sortWithinGroup.GetSort();
             fields = new SortField[sortFields.Length];
diff --git a/src/Lucene.Net.Grouping/Term/TermAllGroupsCollector.cs b/src/Lucene.Net.Grouping/Term/TermAllGroupsCollector.cs
index 3cccd6f..aa49238 100644
--- a/src/Lucene.Net.Grouping/Term/TermAllGroupsCollector.cs
+++ b/src/Lucene.Net.Grouping/Term/TermAllGroupsCollector.cs
@@ -2,6 +2,7 @@
 using Lucene.Net.Util;
 using System;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Search.Grouping.Terms
 {
@@ -61,7 +62,7 @@ namespace Lucene.Net.Search.Grouping.Terms
         public TermAllGroupsCollector(string groupField, int initialSize)
         {
             ordSet = new SentinelInt32Set(initialSize, -2);
-            groups = new List<BytesRef>(initialSize);
+            groups = new JCG.List<BytesRef>(initialSize);
             this.groupField = groupField;
         }
 
diff --git a/src/Lucene.Net.Grouping/Term/TermDistinctValuesCollector.cs b/src/Lucene.Net.Grouping/Term/TermDistinctValuesCollector.cs
index 378ba38..31a8cac 100644
--- a/src/Lucene.Net.Grouping/Term/TermDistinctValuesCollector.cs
+++ b/src/Lucene.Net.Grouping/Term/TermDistinctValuesCollector.cs
@@ -4,6 +4,7 @@ using Lucene.Net.Util;
 using System;
 using System.Collections.Generic;
 using System.Linq;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Search.Grouping.Terms
 {
@@ -34,7 +35,7 @@ namespace Lucene.Net.Search.Grouping.Terms
     {
         private readonly string groupField;
         private readonly string countField;
-        private readonly List<GroupCount> groups;
+        private readonly IList<GroupCount> groups;
         private readonly SentinelInt32Set ordSet;
         private readonly GroupCount[] groupCounts;
 
@@ -52,7 +53,7 @@ namespace Lucene.Net.Search.Grouping.Terms
             this.groupField = groupField;
             this.countField = countField;
             int groupCount = groups.Count();
-            this.groups = new List<GroupCount>(groupCount);
+            this.groups = new JCG.List<GroupCount>(groupCount);
             foreach (ISearchGroup<BytesRef> group in groups)
             {
                 this.groups.Add(new GroupCount(group.GroupValue));
diff --git a/src/Lucene.Net.Grouping/Term/TermGroupFacetCollector.cs b/src/Lucene.Net.Grouping/Term/TermGroupFacetCollector.cs
index 1bb1b27..bdd6eac 100644
--- a/src/Lucene.Net.Grouping/Term/TermGroupFacetCollector.cs
+++ b/src/Lucene.Net.Grouping/Term/TermGroupFacetCollector.cs
@@ -2,7 +2,7 @@
 using Lucene.Net.Index;
 using Lucene.Net.Util;
 using System.Collections.Generic;
-using System.Diagnostics;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Search.Grouping.Terms
 {
@@ -31,7 +31,7 @@ namespace Lucene.Net.Search.Grouping.Terms
     /// </summary>
     public abstract class TermGroupFacetCollector : AbstractGroupFacetCollector
     {
-        internal readonly List<GroupedFacetHit> groupedFacetHits;
+        internal readonly IList<GroupedFacetHit> groupedFacetHits;
         internal readonly SentinelInt32Set segmentGroupedFacetHits;
 
         internal SortedDocValues groupFieldTermsIndex;
@@ -69,7 +69,7 @@ namespace Lucene.Net.Search.Grouping.Terms
         internal TermGroupFacetCollector(string groupField, string facetField, BytesRef facetPrefix, int initialSize)
             : base(groupField, facetField, facetPrefix)
         {
-            groupedFacetHits = new List<GroupedFacetHit>(initialSize);
+            groupedFacetHits = new JCG.List<GroupedFacetHit>(initialSize);
             segmentGroupedFacetHits = new SentinelInt32Set(initialSize, int.MinValue);
         }
 
diff --git a/src/Lucene.Net.Highlighter/Highlight/Highlighter.cs b/src/Lucene.Net.Highlighter/Highlight/Highlighter.cs
index 1d1a0a0..17574fe 100644
--- a/src/Lucene.Net.Highlighter/Highlight/Highlighter.cs
+++ b/src/Lucene.Net.Highlighter/Highlight/Highlighter.cs
@@ -5,6 +5,7 @@ using System;
 using System.Collections.Generic;
 using System.IO;
 using System.Text;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Search.Highlight
 {
@@ -137,7 +138,7 @@ namespace Lucene.Net.Search.Highlight
             TextFragment[] frag = GetBestTextFragments(tokenStream, text, true, maxNumFragments);
 
             //Get text
-            var fragTexts = new List<string>();
+            var fragTexts = new JCG.List<string>();
             for (int i = 0; i < frag.Length; i++)
             {
                 if ((frag[i] != null) && (frag[i].Score > 0))
@@ -161,7 +162,7 @@ namespace Lucene.Net.Search.Highlight
             bool mergeContiguousFragments,
             int maxNumFragments)
         {
-            var docFrags = new List<TextFragment>();
+            var docFrags = new JCG.List<TextFragment>();
             var newText = new StringBuilder();
 
             var termAtt = tokenStream.AddAttribute<ICharTermAttribute>();
@@ -305,7 +306,7 @@ namespace Lucene.Net.Search.Highlight
                 if (mergeContiguousFragments)
                 {
                     MergeContiguousFragments(frag);
-                    List<TextFragment> fragTexts = new List<TextFragment>();
+                    JCG.List<TextFragment> fragTexts = new JCG.List<TextFragment>();
                     for (int i = 0; i < frag.Length; i++)
                     {
                         if ((frag[i] != null) && (frag[i].Score > 0))
diff --git a/src/Lucene.Net.Highlighter/Highlight/TokenSources.cs b/src/Lucene.Net.Highlighter/Highlight/TokenSources.cs
index db98f19..eba68aa 100644
--- a/src/Lucene.Net.Highlighter/Highlight/TokenSources.cs
+++ b/src/Lucene.Net.Highlighter/Highlight/TokenSources.cs
@@ -6,6 +6,7 @@ using Lucene.Net.Util;
 using System;
 using System.Collections.Generic;
 using System.IO;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Search.Highlight
 {
@@ -213,7 +214,7 @@ namespace Lucene.Net.Search.Highlight
                 totalTokens += (int)termsEnum.TotalTermFreq;
             }
             Token[] tokensInOriginalOrder = new Token[totalTokens];
-            List<Token> unsortedTokens = null;
+            JCG.List<Token> unsortedTokens = null;
             termsEnum = tpv.GetEnumerator();
             DocsAndPositionsEnum dpEnum = null;
             while (termsEnum.MoveNext())
@@ -263,7 +264,7 @@ namespace Lucene.Net.Search.Highlight
                         // add to list and sort later
                         if (unsortedTokens == null)
                         {
-                            unsortedTokens = new List<Token>();
+                            unsortedTokens = new JCG.List<Token>();
                         }
                         unsortedTokens.Add(token);
                     }
diff --git a/src/Lucene.Net.Highlighter/Highlight/TokenStreamFromTermPositionVector.cs b/src/Lucene.Net.Highlighter/Highlight/TokenStreamFromTermPositionVector.cs
index 5baf119..cf7b187 100644
--- a/src/Lucene.Net.Highlighter/Highlight/TokenStreamFromTermPositionVector.cs
+++ b/src/Lucene.Net.Highlighter/Highlight/TokenStreamFromTermPositionVector.cs
@@ -3,6 +3,7 @@ using Lucene.Net.Analysis.TokenAttributes;
 using Lucene.Net.Index;
 using Lucene.Net.Util;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Search.Highlight
 {
@@ -28,7 +29,7 @@ namespace Lucene.Net.Search.Highlight
     /// </summary>
     public sealed class TokenStreamFromTermPositionVector : TokenStream
     {
-        private readonly List<Token> positionedTokens = new List<Token>();
+        private readonly IList<Token> positionedTokens = new JCG.List<Token>();
 
         private IEnumerator<Token> tokensAtCurrentPosition;
 
diff --git a/src/Lucene.Net.Highlighter/Highlight/WeightedSpanTerm.cs b/src/Lucene.Net.Highlighter/Highlight/WeightedSpanTerm.cs
index 31de3ea..bf1dfb0 100644
--- a/src/Lucene.Net.Highlighter/Highlight/WeightedSpanTerm.cs
+++ b/src/Lucene.Net.Highlighter/Highlight/WeightedSpanTerm.cs
@@ -1,4 +1,5 @@
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Search.Highlight
 {
@@ -25,7 +26,7 @@ namespace Lucene.Net.Search.Highlight
     public class WeightedSpanTerm : WeightedTerm
     {
         private bool _positionSensitive;
-        private readonly List<PositionSpan> _positionSpans = new List<PositionSpan>();
+        private readonly JCG.List<PositionSpan> _positionSpans = new JCG.List<PositionSpan>();
 
         public WeightedSpanTerm(float weight, string term)
             : base(weight, term)
diff --git a/src/Lucene.Net.Highlighter/Highlight/WeightedSpanTermExtractor.cs b/src/Lucene.Net.Highlighter/Highlight/WeightedSpanTermExtractor.cs
index a4b554c..44d4671 100644
--- a/src/Lucene.Net.Highlighter/Highlight/WeightedSpanTermExtractor.cs
+++ b/src/Lucene.Net.Highlighter/Highlight/WeightedSpanTermExtractor.cs
@@ -164,16 +164,16 @@ namespace Lucene.Net.Search.Highlight
                         }
                     }
 
-                    var disjunctLists = new List<SpanQuery>[maxPosition + 1];
+                    var disjunctLists = new JCG.List<SpanQuery>[maxPosition + 1];
                     int distinctPositions = 0;
 
                     for (int i = 0; i < termArrays.Count; ++i)
                     {
                         Term[] termArray = termArrays[i];
-                        List<SpanQuery> disjuncts = disjunctLists[positions[i]];
+                        JCG.List<SpanQuery> disjuncts = disjunctLists[positions[i]];
                         if (disjuncts == null)
                         {
-                            disjuncts = (disjunctLists[positions[i]] = new List<SpanQuery>(termArray.Length));
+                            disjuncts = (disjunctLists[positions[i]] = new JCG.List<SpanQuery>(termArray.Length));
                             ++distinctPositions;
                         }
                         foreach (var term in termArray)
@@ -282,7 +282,7 @@ namespace Lucene.Net.Search.Highlight
                 spanQuery.ExtractTerms(nonWeightedTerms);
             }
 
-            List<PositionSpan> spanPositions = new List<PositionSpan>();
+            IList<PositionSpan> spanPositions = new JCG.List<PositionSpan>();
 
             foreach (string field in fieldNames)
             {
@@ -414,7 +414,7 @@ namespace Lucene.Net.Search.Highlight
 
                 public override IEnumerator<string> GetEnumerator()
                 {
-                    var list = new List<string> { DelegatingAtomicReader.FIELD_NAME };
+                    var list = new JCG.List<string> { DelegatingAtomicReader.FIELD_NAME };
                     return list.GetEnumerator();
                 }
 
diff --git a/src/Lucene.Net.Highlighter/PostingsHighlight/MultiTermHighlighting.cs b/src/Lucene.Net.Highlighter/PostingsHighlight/MultiTermHighlighting.cs
index 64174ec..fdc8dbb 100644
--- a/src/Lucene.Net.Highlighter/PostingsHighlight/MultiTermHighlighting.cs
+++ b/src/Lucene.Net.Highlighter/PostingsHighlight/MultiTermHighlighting.cs
@@ -10,6 +10,7 @@ using Lucene.Net.Util.Automaton;
 using System;
 using System.Collections.Generic;
 using System.Diagnostics;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Search.PostingsHighlight
 {
@@ -41,7 +42,7 @@ namespace Lucene.Net.Search.PostingsHighlight
         /// </summary>
         internal static CharacterRunAutomaton[] ExtractAutomata(Query query, string field)
         {
-            List<CharacterRunAutomaton> list = new List<CharacterRunAutomaton>();
+            JCG.List<CharacterRunAutomaton> list = new JCG.List<CharacterRunAutomaton>();
             if (query is BooleanQuery booleanQuery)
             {
                 foreach (BooleanClause clause in booleanQuery.GetClauses())
diff --git a/src/Lucene.Net.Highlighter/PostingsHighlight/PostingsHighlighter.cs b/src/Lucene.Net.Highlighter/PostingsHighlight/PostingsHighlighter.cs
index f5654da..c2b4e51 100644
--- a/src/Lucene.Net.Highlighter/PostingsHighlight/PostingsHighlighter.cs
+++ b/src/Lucene.Net.Highlighter/PostingsHighlight/PostingsHighlighter.cs
@@ -781,7 +781,7 @@ namespace Lucene.Net.Search.PostingsHighlight
         protected virtual Passage[] GetEmptyHighlight(string fieldName, BreakIterator bi, int maxPassages)
         {
             // BreakIterator should be un-next'd:
-            List<Passage> passages = new List<Passage>();
+            JCG.List<Passage> passages = new JCG.List<Passage>();
             int pos = bi.Current;
             if (Debugging.AssertsEnabled) Debugging.Assert(pos == 0);
             while (passages.Count < maxPassages)
diff --git a/src/Lucene.Net.Highlighter/VectorHighlight/BaseFragListBuilder.cs b/src/Lucene.Net.Highlighter/VectorHighlight/BaseFragListBuilder.cs
index 0ca50f9..5f4134e 100644
--- a/src/Lucene.Net.Highlighter/VectorHighlight/BaseFragListBuilder.cs
+++ b/src/Lucene.Net.Highlighter/VectorHighlight/BaseFragListBuilder.cs
@@ -1,7 +1,7 @@
 using Lucene.Net.Diagnostics;
 using System;
 using System.Collections.Generic;
-using System.Diagnostics;
+using JCG = J2N.Collections.Generic;
 using WeightedPhraseInfo = Lucene.Net.Search.VectorHighlight.FieldPhraseList.WeightedPhraseInfo;
 
 namespace Lucene.Net.Search.VectorHighlight
@@ -62,7 +62,7 @@ namespace Lucene.Net.Search.VectorHighlight
             if (fragCharSize < minFragCharSize)
                 throw new ArgumentOutOfRangeException(nameof(fragCharSize), "fragCharSize(" + fragCharSize + ") is too small. It must be " + minFragCharSize + " or higher."); // LUCENENET specific - changed from IllegalArgumentException to ArgumentOutOfRangeException (.NET convention)
 
-            List<WeightedPhraseInfo> wpil = new List<WeightedPhraseInfo>();
+            JCG.List<WeightedPhraseInfo> wpil = new JCG.List<WeightedPhraseInfo>();
             using (IteratorQueue<WeightedPhraseInfo> queue = new IteratorQueue<WeightedPhraseInfo>(fieldPhraseList.PhraseList.GetEnumerator()))
             {
                 WeightedPhraseInfo phraseInfo = null;
diff --git a/src/Lucene.Net.Highlighter/VectorHighlight/BaseFragmentsBuilder.cs b/src/Lucene.Net.Highlighter/VectorHighlight/BaseFragmentsBuilder.cs
index bd2735d..824cb1c 100644
--- a/src/Lucene.Net.Highlighter/VectorHighlight/BaseFragmentsBuilder.cs
+++ b/src/Lucene.Net.Highlighter/VectorHighlight/BaseFragmentsBuilder.cs
@@ -149,7 +149,7 @@ namespace Lucene.Net.Search.VectorHighlight
 
             fragInfos = GetWeightedFragInfoList(fragInfos);
             int limitFragments = maxNumFragments < fragInfos.Count ? maxNumFragments : fragInfos.Count;
-            List<string> fragments = new List<string>(limitFragments);
+            JCG.List<string> fragments = new JCG.List<string>(limitFragments);
 
             StringBuilder buffer = new StringBuilder();
             int[] nextValueIndex = { 0 };
@@ -164,7 +164,7 @@ namespace Lucene.Net.Search.VectorHighlight
         protected virtual Field[] GetFields(IndexReader reader, int docId, string fieldName)
         {
             // according to javadoc, doc.getFields(fieldName) cannot be used with lazy loaded field???
-            List<Field> fields = new List<Field>();
+            JCG.List<Field> fields = new JCG.List<Field>();
             reader.Document(docId, new GetFieldsStoredFieldsVisitorAnonymousClass(fields, fieldName));
 
             return fields.ToArray(/*new Field[fields.size()]*/);
@@ -251,10 +251,10 @@ namespace Lucene.Net.Search.VectorHighlight
 
         protected virtual IList<WeightedFragInfo> DiscreteMultiValueHighlighting(IList<WeightedFragInfo> fragInfos, Field[] fields)
         {
-            IDictionary<string, List<WeightedFragInfo>> fieldNameToFragInfos = new Dictionary<string, List<WeightedFragInfo>>();
+            IDictionary<string, IList<WeightedFragInfo>> fieldNameToFragInfos = new Dictionary<string, IList<WeightedFragInfo>>();
             foreach (Field field in fields)
             {
-                fieldNameToFragInfos[field.Name] = new List<WeightedFragInfo>();
+                fieldNameToFragInfos[field.Name] = new JCG.List<WeightedFragInfo>();
             }
 
             foreach (WeightedFragInfo fragInfo in fragInfos)
@@ -342,8 +342,8 @@ namespace Lucene.Net.Search.VectorHighlight
             fragInfos_continue: { }
             }
 
-            List<WeightedFragInfo> result = new List<WeightedFragInfo>();
-            foreach (List<WeightedFragInfo> weightedFragInfos in fieldNameToFragInfos.Values)
+            JCG.List<WeightedFragInfo> result = new JCG.List<WeightedFragInfo>();
+            foreach (IList<WeightedFragInfo> weightedFragInfos in fieldNameToFragInfos.Values)
             {
                 result.AddRange(weightedFragInfos);
             }
diff --git a/src/Lucene.Net.Highlighter/VectorHighlight/FieldFragList.cs b/src/Lucene.Net.Highlighter/VectorHighlight/FieldFragList.cs
index 3be0baa..9cca190 100644
--- a/src/Lucene.Net.Highlighter/VectorHighlight/FieldFragList.cs
+++ b/src/Lucene.Net.Highlighter/VectorHighlight/FieldFragList.cs
@@ -2,6 +2,7 @@
 using System.Collections.Generic;
 using System.Text;
 using Float = J2N.Numerics.Single;
+using JCG = J2N.Collections.Generic;
 using Toffs = Lucene.Net.Search.VectorHighlight.FieldPhraseList.WeightedPhraseInfo.Toffs;
 using WeightedPhraseInfo = Lucene.Net.Search.VectorHighlight.FieldPhraseList.WeightedPhraseInfo;
 
@@ -30,7 +31,7 @@ namespace Lucene.Net.Search.VectorHighlight
     /// </summary>
     public abstract class FieldFragList
     {
-        private readonly List<WeightedFragInfo> fragInfos = new List<WeightedFragInfo>();
+        private readonly IList<WeightedFragInfo> fragInfos = new JCG.List<WeightedFragInfo>();
 
         /// <summary>
         /// a constructor.
diff --git a/src/Lucene.Net.Highlighter/VectorHighlight/FieldPhraseList.cs b/src/Lucene.Net.Highlighter/VectorHighlight/FieldPhraseList.cs
index bfb464f..6afb3a6 100644
--- a/src/Lucene.Net.Highlighter/VectorHighlight/FieldPhraseList.cs
+++ b/src/Lucene.Net.Highlighter/VectorHighlight/FieldPhraseList.cs
@@ -4,6 +4,7 @@ using System;
 using System.Collections.Generic;
 using System.Text;
 using Float = J2N.Numerics.Single;
+using JCG = J2N.Collections.Generic;
 using QueryPhraseMap = Lucene.Net.Search.VectorHighlight.FieldQuery.QueryPhraseMap;
 using TermInfo = Lucene.Net.Search.VectorHighlight.FieldTermStack.TermInfo;
 
@@ -35,7 +36,7 @@ namespace Lucene.Net.Search.VectorHighlight
         /// <summary>
         /// List of non-overlapping <see cref="WeightedPhraseInfo"/> objects.
         /// </summary>
-        internal List<WeightedPhraseInfo> phraseList = new List<WeightedPhraseInfo>();
+        internal IList<WeightedPhraseInfo> phraseList = new JCG.List<WeightedPhraseInfo>();
 
         /// <summary>
         /// create a <see cref="FieldPhraseList"/> that has no limit on the number of phrases to analyze
@@ -62,7 +63,7 @@ namespace Lucene.Net.Search.VectorHighlight
         {
             string field = fieldTermStack.FieldName;
 
-            List<TermInfo> phraseCandidate = new List<TermInfo>();
+            IList<TermInfo> phraseCandidate = new JCG.List<TermInfo>();
             QueryPhraseMap currMap; // LUCENENET: IDE0059: Remove unnecessary value assignment
             QueryPhraseMap nextMap; // LUCENENET: IDE0059: Remove unnecessary value assignment
             while (!fieldTermStack.IsEmpty && (phraseList.Count < phraseLimit))
@@ -156,12 +157,12 @@ namespace Lucene.Net.Search.VectorHighlight
                 }
                 using MergedEnumerator<WeightedPhraseInfo> itr = new MergedEnumerator<WeightedPhraseInfo>(false, allInfos);
                 // Step 2.  Walk the sorted list merging infos that overlap
-                phraseList = new List<WeightedPhraseInfo>();
+                phraseList = new JCG.List<WeightedPhraseInfo>();
                 if (!itr.MoveNext())
                 {
                     return;
                 }
-                List<WeightedPhraseInfo> work = new List<WeightedPhraseInfo>();
+                IList<WeightedPhraseInfo> work = new JCG.List<WeightedPhraseInfo>();
                 WeightedPhraseInfo first = itr.Current;
                 work.Add(first);
                 int workEndOffset = first.EndOffset;
@@ -225,12 +226,12 @@ namespace Lucene.Net.Search.VectorHighlight
         /// </summary>
         public class WeightedPhraseInfo : IComparable<WeightedPhraseInfo>, IFormattable // LUCENENET specific - implemented IFormattable for floating point representations
         {
-            private readonly List<Toffs> termsOffsets;   // usually termsOffsets.size() == 1, // LUCENENET: marked readonly
+            private readonly IList<Toffs> termsOffsets;   // usually termsOffsets.size() == 1, // LUCENENET: marked readonly
                                                          // but if position-gap > 1 and slop > 0 then size() could be greater than 1
             private readonly float boost;  // query boost // LUCENENET: marked readonly
             private readonly int seqnum; // LUCENENET: marked readonly
 
-            private readonly List<TermInfo> termsInfos; // LUCENENET: marked readonly
+            private readonly JCG.List<TermInfo> termsInfos; // LUCENENET: marked readonly
 
             /// <summary>
             /// Text of the match, calculated on the fly.  Use for debugging only.
@@ -272,9 +273,9 @@ namespace Lucene.Net.Search.VectorHighlight
                 this.seqnum = seqnum;
 
                 // We keep TermInfos for further operations
-                termsInfos = new List<TermInfo>(terms);
+                termsInfos = new JCG.List<TermInfo>(terms);
 
-                termsOffsets = new List<Toffs>(terms.Count);
+                termsOffsets = new JCG.List<Toffs>(terms.Count);
                 TermInfo ti = terms[0];
                 termsOffsets.Add(new Toffs(ti.StartOffset, ti.EndOffset));
                 if (terms.Count == 1)
@@ -317,7 +318,7 @@ namespace Lucene.Net.Search.VectorHighlight
                     }
                     WeightedPhraseInfo first = toMergeItr.Current;
 
-                    termsInfos = new List<TermInfo>();
+                    termsInfos = new JCG.List<TermInfo>();
                     seqnum = first.seqnum;
                     boost = first.boost;
                     allToffs[0] = first.termsOffsets.GetEnumerator();
@@ -332,7 +333,7 @@ namespace Lucene.Net.Search.VectorHighlight
 
                     // Step 2.  Walk the sorted list merging overlaps
                     using MergedEnumerator<Toffs> itr = new MergedEnumerator<Toffs>(false, allToffs);
-                    termsOffsets = new List<Toffs>();
+                    termsOffsets = new JCG.List<Toffs>();
                     if (!itr.MoveNext())
                     {
                         return;
diff --git a/src/Lucene.Net.Highlighter/VectorHighlight/FieldTermStack.cs b/src/Lucene.Net.Highlighter/VectorHighlight/FieldTermStack.cs
index c470e0a..0c70b44 100644
--- a/src/Lucene.Net.Highlighter/VectorHighlight/FieldTermStack.cs
+++ b/src/Lucene.Net.Highlighter/VectorHighlight/FieldTermStack.cs
@@ -3,9 +3,9 @@ using Lucene.Net.Index;
 using Lucene.Net.Util;
 using System;
 using System.Collections.Generic;
-using System.Diagnostics;
 using System.IO;
 using System.Text;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Search.VectorHighlight
 {
@@ -33,7 +33,7 @@ namespace Lucene.Net.Search.VectorHighlight
     public class FieldTermStack
     {
         private readonly string fieldName;
-        internal List<TermInfo> termList = new List<TermInfo>();
+        internal IList<TermInfo> termList = new JCG.List<TermInfo>();
 
         //public static void main( string[] args ) throws Exception {
         //  Analyzer analyzer = new WhitespaceAnalyzer(Version.LUCENE_CURRENT);
diff --git a/src/Lucene.Net.Highlighter/VectorHighlight/SimpleFieldFragList.cs b/src/Lucene.Net.Highlighter/VectorHighlight/SimpleFieldFragList.cs
index 086361a..7f8b341 100644
--- a/src/Lucene.Net.Highlighter/VectorHighlight/SimpleFieldFragList.cs
+++ b/src/Lucene.Net.Highlighter/VectorHighlight/SimpleFieldFragList.cs
@@ -1,4 +1,5 @@
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 using SubInfo = Lucene.Net.Search.VectorHighlight.FieldFragList.WeightedFragInfo.SubInfo;
 using WeightedPhraseInfo = Lucene.Net.Search.VectorHighlight.FieldPhraseList.WeightedPhraseInfo;
 
@@ -41,7 +42,7 @@ namespace Lucene.Net.Search.VectorHighlight
         public override void Add(int startOffset, int endOffset, IList<WeightedPhraseInfo> phraseInfoList)
         {
             float totalBoost = 0;
-            List<SubInfo> subInfos = new List<SubInfo>();
+            IList<SubInfo> subInfos = new JCG.List<SubInfo>();
             foreach (WeightedPhraseInfo phraseInfo in phraseInfoList)
             {
                 subInfos.Add(new SubInfo(phraseInfo.GetText(), phraseInfo.TermsOffsets, phraseInfo.Seqnum, phraseInfo.Boost));
diff --git a/src/Lucene.Net.Highlighter/VectorHighlight/SingleFragListBuilder.cs b/src/Lucene.Net.Highlighter/VectorHighlight/SingleFragListBuilder.cs
index ce20f30..d4d344a 100644
--- a/src/Lucene.Net.Highlighter/VectorHighlight/SingleFragListBuilder.cs
+++ b/src/Lucene.Net.Highlighter/VectorHighlight/SingleFragListBuilder.cs
@@ -1,4 +1,5 @@
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 using WeightedPhraseInfo = Lucene.Net.Search.VectorHighlight.FieldPhraseList.WeightedPhraseInfo;
 
 namespace Lucene.Net.Search.VectorHighlight
@@ -37,7 +38,7 @@ namespace Lucene.Net.Search.VectorHighlight
         {
             FieldFragList ffl = new SimpleFieldFragList(fragCharSize);
 
-            List<WeightedPhraseInfo> wpil = new List<WeightedPhraseInfo>();
+            IList<WeightedPhraseInfo> wpil = new JCG.List<WeightedPhraseInfo>();
             using IEnumerator<WeightedPhraseInfo> ite = fieldPhraseList.PhraseList.GetEnumerator();
             WeightedPhraseInfo phraseInfo = null;
             while (true)
diff --git a/src/Lucene.Net.Highlighter/VectorHighlight/WeightedFieldFragList.cs b/src/Lucene.Net.Highlighter/VectorHighlight/WeightedFieldFragList.cs
index 3c262f8..e65d43b 100644
--- a/src/Lucene.Net.Highlighter/VectorHighlight/WeightedFieldFragList.cs
+++ b/src/Lucene.Net.Highlighter/VectorHighlight/WeightedFieldFragList.cs
@@ -43,8 +43,8 @@ namespace Lucene.Net.Search.VectorHighlight
         /// </summary>
         public override void Add(int startOffset, int endOffset, IList<WeightedPhraseInfo> phraseInfoList)
         {
-            IList<SubInfo> tempSubInfos = new List<SubInfo>();
-            IList<SubInfo> realSubInfos = new List<SubInfo>();
+            IList<SubInfo> tempSubInfos = new JCG.List<SubInfo>();
+            IList<SubInfo> realSubInfos = new JCG.List<SubInfo>();
             ISet<string> distinctTerms = new JCG.HashSet<string>();
             int length = 0;
 
diff --git a/src/Lucene.Net.Join/Support/ToChildBlockJoinQuery.cs b/src/Lucene.Net.Join/Support/ToChildBlockJoinQuery.cs
index 835c26a..b1dc709 100644
--- a/src/Lucene.Net.Join/Support/ToChildBlockJoinQuery.cs
+++ b/src/Lucene.Net.Join/Support/ToChildBlockJoinQuery.cs
@@ -5,6 +5,7 @@ using Lucene.Net.Search;
 using Lucene.Net.Util;
 using System;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Join
 {
@@ -175,7 +176,7 @@ namespace Lucene.Net.Join
 
             public override ICollection<ChildScorer> GetChildren()
             {
-                return new List<ChildScorer> { new ChildScorer(_parentScorer, "BLOCK_JOIN") };
+                return new JCG.List<ChildScorer> { new ChildScorer(_parentScorer, "BLOCK_JOIN") };
             }
 
             public override int NextDoc()
diff --git a/src/Lucene.Net.Join/Support/ToParentBlockJoinQuery.cs b/src/Lucene.Net.Join/Support/ToParentBlockJoinQuery.cs
index 3d8c4f4..f5547f5 100644
--- a/src/Lucene.Net.Join/Support/ToParentBlockJoinQuery.cs
+++ b/src/Lucene.Net.Join/Support/ToParentBlockJoinQuery.cs
@@ -5,6 +5,7 @@ using Lucene.Net.Search;
 using Lucene.Net.Util;
 using System;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Join
 {
@@ -221,7 +222,7 @@ namespace Lucene.Net.Join
 
             public override ICollection<ChildScorer> GetChildren()
             {
-                return new List<ChildScorer> { new ChildScorer(_childScorer, "BLOCK_JOIN") };
+                return new JCG.List<ChildScorer> { new ChildScorer(_childScorer, "BLOCK_JOIN") };
             }
 
             internal virtual int ChildCount => _childDocUpto;
diff --git a/src/Lucene.Net.Join/ToChildBlockJoinQuery.cs b/src/Lucene.Net.Join/ToChildBlockJoinQuery.cs
index 65e7680..69dcd0b 100644
--- a/src/Lucene.Net.Join/ToChildBlockJoinQuery.cs
+++ b/src/Lucene.Net.Join/ToChildBlockJoinQuery.cs
@@ -1,10 +1,10 @@
 // Lucene version compatibility level 4.8.1
 using Lucene.Net.Diagnostics;
 using Lucene.Net.Index;
-using Lucene.Net.Search;
 using Lucene.Net.Util;
 using System;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Search.Join
 {
@@ -174,7 +174,7 @@ namespace Lucene.Net.Search.Join
 
             public override ICollection<ChildScorer> GetChildren()
             {
-                return new List<ChildScorer> { new ChildScorer(_parentScorer, "BLOCK_JOIN") };
+                return new JCG.List<ChildScorer> { new ChildScorer(_parentScorer, "BLOCK_JOIN") };
             }
             
             public override int NextDoc()
diff --git a/src/Lucene.Net.Join/ToParentBlockJoinQuery.cs b/src/Lucene.Net.Join/ToParentBlockJoinQuery.cs
index ee9942b..e5a342b 100644
--- a/src/Lucene.Net.Join/ToParentBlockJoinQuery.cs
+++ b/src/Lucene.Net.Join/ToParentBlockJoinQuery.cs
@@ -4,6 +4,7 @@ using Lucene.Net.Index;
 using Lucene.Net.Util;
 using System;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Search.Join
 {
@@ -219,7 +220,7 @@ namespace Lucene.Net.Search.Join
 
             public override ICollection<ChildScorer> GetChildren()
             {
-                return new List<ChildScorer> { new ChildScorer(_childScorer, "BLOCK_JOIN") };
+                return new JCG.List<ChildScorer> { new ChildScorer(_childScorer, "BLOCK_JOIN") };
             }
 
             internal virtual int ChildCount => _childDocUpto;
diff --git a/src/Lucene.Net.Misc/Document/LazyDocument.cs b/src/Lucene.Net.Misc/Document/LazyDocument.cs
index 82d2db9..94cbe5d 100644
--- a/src/Lucene.Net.Misc/Document/LazyDocument.cs
+++ b/src/Lucene.Net.Misc/Document/LazyDocument.cs
@@ -70,7 +70,7 @@ namespace Lucene.Net.Documents
             fieldNames.Add(fieldInfo.Name);
             if (!fields.TryGetValue(fieldInfo.Number, out IList<LazyField> values) || null == values)
             {
-                values = new List<LazyField>();
+                values = new JCG.List<LazyField>();
                 fields[fieldInfo.Number] = values;
             }
 
diff --git a/src/Lucene.Net.Misc/Index/IndexSplitter.cs b/src/Lucene.Net.Misc/Index/IndexSplitter.cs
index 304539e..42f1bb3 100644
--- a/src/Lucene.Net.Misc/Index/IndexSplitter.cs
+++ b/src/Lucene.Net.Misc/Index/IndexSplitter.cs
@@ -4,6 +4,7 @@ using System.Collections.Generic;
 using System.Globalization;
 using System.IO;
 using Console = Lucene.Net.Util.SystemConsole;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Index
 {
@@ -72,7 +73,7 @@ namespace Lucene.Net.Index
             }
             else if (args[1].Equals("-d", StringComparison.Ordinal))
             {
-                IList<string> segs = new List<string>();
+                IList<string> segs = new JCG.List<string>();
                 for (int x = 2; x < args.Length; x++)
                 {
                     segs.Add(args[x]);
@@ -82,7 +83,7 @@ namespace Lucene.Net.Index
             else
             {
                 DirectoryInfo targetDir = new DirectoryInfo(args[1]);
-                IList<string> segs = new List<string>();
+                IList<string> segs = new JCG.List<string>();
                 for (int x = 2; x < args.Length; x++)
                 {
                     segs.Add(args[x]);
diff --git a/src/Lucene.Net.Misc/Index/MultiPassIndexSplitter.cs b/src/Lucene.Net.Misc/Index/MultiPassIndexSplitter.cs
index 9277420..da156a0 100644
--- a/src/Lucene.Net.Misc/Index/MultiPassIndexSplitter.cs
+++ b/src/Lucene.Net.Misc/Index/MultiPassIndexSplitter.cs
@@ -4,10 +4,10 @@ using Lucene.Net.Store;
 using Lucene.Net.Util;
 using System;
 using System.Collections.Generic;
-using System.Diagnostics;
 using System.Globalization;
 using System.IO;
 using Console = Lucene.Net.Util.SystemConsole;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Index
 {
@@ -133,7 +133,7 @@ namespace Lucene.Net.Index
                 //Console.Error.WriteLine("\t-seq\tsequential docid-range split (default is round-robin)");
                 //Environment.Exit(-1);
             }
-            List<IndexReader> indexes = new List<IndexReader>();
+            IList<IndexReader> indexes = new JCG.List<IndexReader>();
             try
             {
                 string outDir = null;
diff --git a/src/Lucene.Net.Misc/Index/Sorter/SortingMergePolicy.cs b/src/Lucene.Net.Misc/Index/Sorter/SortingMergePolicy.cs
index 401cdca..3e9477c 100644
--- a/src/Lucene.Net.Misc/Index/Sorter/SortingMergePolicy.cs
+++ b/src/Lucene.Net.Misc/Index/Sorter/SortingMergePolicy.cs
@@ -6,6 +6,7 @@ using Lucene.Net.Util;
 using Lucene.Net.Util.Packed;
 using System;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Index.Sorter
 {
@@ -86,7 +87,7 @@ namespace Lucene.Net.Index.Sorter
                     sortedView = SortingAtomicReader.Wrap(atomicView, docMap);
                 }
                 // a null doc map means that the readers are already sorted
-                return docMap == null ? unsortedReaders : new List<AtomicReader>(new AtomicReader[] { sortedView });
+                return docMap == null ? unsortedReaders : new JCG.List<AtomicReader>(new AtomicReader[] { sortedView });
             }
 
             public override SegmentCommitInfo Info
diff --git a/src/Lucene.Net.Queries/CommonTermsQuery.cs b/src/Lucene.Net.Queries/CommonTermsQuery.cs
index 54156d7..cd9dab8 100644
--- a/src/Lucene.Net.Queries/CommonTermsQuery.cs
+++ b/src/Lucene.Net.Queries/CommonTermsQuery.cs
@@ -73,7 +73,7 @@ namespace Lucene.Net.Queries
          * rewrite to dismax rather than boolean. Yet, this can already be subclassed
          * to do so.
          */
-        protected readonly IList<Term> m_terms = new List<Term>();
+        protected readonly IList<Term> m_terms = new JCG.List<Term>();
         protected readonly bool m_disableCoord;
         protected readonly float m_maxTermFrequency;
         protected readonly Occur m_lowFreqOccur;
diff --git a/src/Lucene.Net.Queries/CustomScoreQuery.cs b/src/Lucene.Net.Queries/CustomScoreQuery.cs
index 3c865ca..81ad2b0 100644
--- a/src/Lucene.Net.Queries/CustomScoreQuery.cs
+++ b/src/Lucene.Net.Queries/CustomScoreQuery.cs
@@ -8,6 +8,7 @@ using System;
 using System.Collections.Generic;
 using System.Diagnostics.CodeAnalysis;
 using System.Text;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Queries
 {
@@ -370,7 +371,7 @@ namespace Lucene.Net.Queries
 
             public override ICollection<ChildScorer> GetChildren()
             {
-                return new List<ChildScorer> { new ChildScorer(subQueryScorer, "CUSTOM") };
+                return new JCG.List<ChildScorer> { new ChildScorer(subQueryScorer, "CUSTOM") };
             }
 
             public override int Advance(int target)
diff --git a/src/Lucene.Net.Queries/Function/BoostedQuery.cs b/src/Lucene.Net.Queries/Function/BoostedQuery.cs
index 23464b7..a4f9f12 100644
--- a/src/Lucene.Net.Queries/Function/BoostedQuery.cs
+++ b/src/Lucene.Net.Queries/Function/BoostedQuery.cs
@@ -6,6 +6,7 @@ using Lucene.Net.Util;
 using System.Collections;
 using System.Collections.Generic;
 using System.Text;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Queries.Function
 {
@@ -173,7 +174,7 @@ namespace Lucene.Net.Queries.Function
 
             public override ICollection<ChildScorer> GetChildren()
             {
-                return new List<ChildScorer> { new ChildScorer(scorer, "CUSTOM") };
+                return new JCG.List<ChildScorer> { new ChildScorer(scorer, "CUSTOM") };
             }
 
             public Explanation Explain(int doc)
diff --git a/src/Lucene.Net.Queries/Mlt/MoreLikeThis.cs b/src/Lucene.Net.Queries/Mlt/MoreLikeThis.cs
index b1b5de1..66cdeda 100644
--- a/src/Lucene.Net.Queries/Mlt/MoreLikeThis.cs
+++ b/src/Lucene.Net.Queries/Mlt/MoreLikeThis.cs
@@ -13,6 +13,7 @@ using System.Collections.Generic;
 using System.Diagnostics.CodeAnalysis;
 using System.IO;
 using System.Text;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Queries.Mlt
 {
@@ -672,7 +673,7 @@ namespace Lucene.Net.Queries.Mlt
         /// <seealso cref="RetrieveInterestingTerms(TextReader, string)"/>
         public string[] RetrieveInterestingTerms(int docNum)
         {
-            var al = new List<string>(MaxQueryTerms);
+            var al = new JCG.List<string>(MaxQueryTerms);
             var pq = RetrieveTerms(docNum);
             ScoreTerm scoreTerm;
             int lim = MaxQueryTerms; // have to be careful, retrieveTerms returns all words but that's probably not useful to our caller...
@@ -696,7 +697,7 @@ namespace Lucene.Net.Queries.Mlt
         // LUCENENET: Factored out the object[] to avoid boxing
         public string[] RetrieveInterestingTerms(TextReader r, string fieldName)
         {
-            var al = new List<string>(MaxQueryTerms);
+            var al = new JCG.List<string>(MaxQueryTerms);
             PriorityQueue<ScoreTerm> pq = RetrieveTerms(r, fieldName);
             ScoreTerm scoreTerm;
             int lim = MaxQueryTerms; // have to be careful, retrieveTerms returns all words but that's probably not useful to our caller...
diff --git a/src/Lucene.Net.Queries/TermsFilter.cs b/src/Lucene.Net.Queries/TermsFilter.cs
index a9e23ac..dc91c71 100644
--- a/src/Lucene.Net.Queries/TermsFilter.cs
+++ b/src/Lucene.Net.Queries/TermsFilter.cs
@@ -6,6 +6,7 @@ using Lucene.Net.Util;
 using System;
 using System.Collections.Generic;
 using System.Text;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Queries
 {
@@ -171,7 +172,7 @@ namespace Lucene.Net.Queries
             this.offsets = new int[length + 1];
             int lastEndOffset = 0;
             int index = 0;
-            var termsAndFields = new List<TermsAndField>();
+            var termsAndFields = new JCG.List<TermsAndField>();
             TermsAndField lastTermsAndField = null;
             BytesRef previousTerm = null;
             string previousField = null;
diff --git a/src/Lucene.Net.QueryParser/Classic/MultiFieldQueryParser.cs b/src/Lucene.Net.QueryParser/Classic/MultiFieldQueryParser.cs
index 220d575..69e408d 100644
--- a/src/Lucene.Net.QueryParser/Classic/MultiFieldQueryParser.cs
+++ b/src/Lucene.Net.QueryParser/Classic/MultiFieldQueryParser.cs
@@ -3,6 +3,7 @@ using Lucene.Net.Search;
 using Lucene.Net.Util;
 using System;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.QueryParsers.Classic
 {
@@ -109,7 +110,7 @@ namespace Lucene.Net.QueryParsers.Classic
         {
             if (field == null)
             {
-                IList<BooleanClause> clauses = new List<BooleanClause>();
+                IList<BooleanClause> clauses = new JCG.List<BooleanClause>();
                 for (int i = 0; i < m_fields.Length; i++)
                 {
                     Query q = base.GetFieldQuery(m_fields[i], queryText, true);
@@ -152,7 +153,7 @@ namespace Lucene.Net.QueryParsers.Classic
         {
             if (field == null)
             {
-                IList<BooleanClause> clauses = new List<BooleanClause>();
+                IList<BooleanClause> clauses = new JCG.List<BooleanClause>();
                 for (int i = 0; i < m_fields.Length; i++)
                 {
                     Query q = base.GetFieldQuery(m_fields[i], queryText, quoted);
@@ -180,7 +181,7 @@ namespace Lucene.Net.QueryParsers.Classic
         {
             if (field == null)
             {
-                IList<BooleanClause> clauses = new List<BooleanClause>();
+                IList<BooleanClause> clauses = new JCG.List<BooleanClause>();
                 for (int i = 0; i < m_fields.Length; i++)
                 {
                     clauses.Add(new BooleanClause(GetFuzzyQuery(m_fields[i], termStr, minSimilarity), Occur.SHOULD));
@@ -194,7 +195,7 @@ namespace Lucene.Net.QueryParsers.Classic
         {
             if (field == null)
             {
-                IList<BooleanClause> clauses = new List<BooleanClause>();
+                IList<BooleanClause> clauses = new JCG.List<BooleanClause>();
                 for (int i = 0; i < m_fields.Length; i++)
                 {
                     clauses.Add(new BooleanClause(GetPrefixQuery(m_fields[i], termStr), Occur.SHOULD));
@@ -208,7 +209,7 @@ namespace Lucene.Net.QueryParsers.Classic
         {
             if (field == null)
             {
-                IList<BooleanClause> clauses = new List<BooleanClause>();
+                IList<BooleanClause> clauses = new JCG.List<BooleanClause>();
                 for (int i = 0; i < m_fields.Length; i++)
                 {
                     clauses.Add(new BooleanClause(GetWildcardQuery(m_fields[i], termStr), Occur.SHOULD));
@@ -223,7 +224,7 @@ namespace Lucene.Net.QueryParsers.Classic
         {
             if (field == null)
             {
-                IList<BooleanClause> clauses = new List<BooleanClause>();
+                IList<BooleanClause> clauses = new JCG.List<BooleanClause>();
                 for (int i = 0; i < m_fields.Length; i++)
                 {
                     clauses.Add(new BooleanClause(GetRangeQuery(m_fields[i], part1, part2, startInclusive, endInclusive), Occur.SHOULD));
@@ -237,7 +238,7 @@ namespace Lucene.Net.QueryParsers.Classic
         {
             if (field == null)
             {
-                IList<BooleanClause> clauses = new List<BooleanClause>();
+                IList<BooleanClause> clauses = new JCG.List<BooleanClause>();
                 for (int i = 0; i < m_fields.Length; i++)
                 {
                     clauses.Add(new BooleanClause(GetRegexpQuery(m_fields[i], termStr),
diff --git a/src/Lucene.Net.QueryParser/Classic/QueryParser.cs b/src/Lucene.Net.QueryParser/Classic/QueryParser.cs
index a1794f8..a137dae 100644
--- a/src/Lucene.Net.QueryParser/Classic/QueryParser.cs
+++ b/src/Lucene.Net.QueryParser/Classic/QueryParser.cs
@@ -7,6 +7,7 @@ using System.IO;
 #if FEATURE_SERIALIZABLE_EXCEPTIONS
 using System.Runtime.Serialization;
 #endif
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.QueryParsers.Classic
 {
@@ -212,7 +213,7 @@ namespace Lucene.Net.QueryParsers.Classic
 
         public Query Query(string field)
         {
-            List<BooleanClause> clauses = new List<BooleanClause>();
+            IList<BooleanClause> clauses = new JCG.List<BooleanClause>();
             Query q, firstQuery = null;
             int conj, mods;
             mods = Modifiers();
@@ -802,7 +803,7 @@ namespace Lucene.Net.QueryParsers.Classic
                 return (jj_ntk = Jj_nt.Kind);
         }
 
-        private readonly List<int[]> jj_expentries = new List<int[]>(); // LUCENENET: marked readonly
+        private readonly IList<int[]> jj_expentries = new JCG.List<int[]>(); // LUCENENET: marked readonly
         private int[] jj_expentry;
         private int jj_kind = -1;
         private readonly int[] jj_lasttokens = new int[100]; // LUCENENET: marked readonly
diff --git a/src/Lucene.Net.QueryParser/ComplexPhrase/ComplexPhraseQueryParser.cs b/src/Lucene.Net.QueryParser/ComplexPhrase/ComplexPhraseQueryParser.cs
index 6ec5252..94cbd04 100644
--- a/src/Lucene.Net.QueryParser/ComplexPhrase/ComplexPhraseQueryParser.cs
+++ b/src/Lucene.Net.QueryParser/ComplexPhrase/ComplexPhraseQueryParser.cs
@@ -6,6 +6,7 @@ using Lucene.Net.Search.Spans;
 using Lucene.Net.Util;
 using System;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.QueryParsers.ComplexPhrase
 {
@@ -54,7 +55,7 @@ namespace Lucene.Net.QueryParsers.ComplexPhrase
     /// </summary>
     public class ComplexPhraseQueryParser : QueryParser
     {
-        private List<ComplexPhraseQuery> complexPhrases = null;
+        private IList<ComplexPhraseQuery> complexPhrases = null;
 
         private bool isPass2ResolvingPhrases;
 
@@ -110,7 +111,7 @@ namespace Lucene.Net.QueryParsers.ComplexPhrase
 
             // First pass - parse the top-level query recording any PhraseQuerys
             // which will need to be resolved
-            complexPhrases = new List<ComplexPhraseQuery>();
+            complexPhrases = new JCG.List<ComplexPhraseQuery>();
             Query q = base.Parse(query);
 
             // Perform second pass, using this QueryParser to parse any nested
@@ -287,7 +288,7 @@ namespace Lucene.Net.QueryParsers.ComplexPhrase
 
                     if (qc is BooleanQuery booleanQuery)
                     {
-                        List<SpanQuery> sc = new List<SpanQuery>();
+                        IList<SpanQuery> sc = new JCG.List<SpanQuery>();
                         AddComplexPhraseClause(sc, booleanQuery);
                         if (sc.Count > 0)
                         {
@@ -326,7 +327,7 @@ namespace Lucene.Net.QueryParsers.ComplexPhrase
                 // Complex case - we have mixed positives and negatives in the
                 // sequence.
                 // Need to return a SpanNotQuery
-                List<SpanQuery> positiveClauses = new List<SpanQuery>();
+                JCG.List<SpanQuery> positiveClauses = new JCG.List<SpanQuery>();
                 for (int j = 0; j < allSpanClauses.Length; j++)
                 {
                     if (!bclauses[j].Occur.Equals(Occur.MUST_NOT))
@@ -359,8 +360,8 @@ namespace Lucene.Net.QueryParsers.ComplexPhrase
 
             private void AddComplexPhraseClause(IList<SpanQuery> spanClauses, BooleanQuery qc)
             {
-                List<SpanQuery> ors = new List<SpanQuery>();
-                List<SpanQuery> nots = new List<SpanQuery>();
+                JCG.List<SpanQuery> ors = new JCG.List<SpanQuery>();
+                JCG.List<SpanQuery> nots = new JCG.List<SpanQuery>();
                 BooleanClause[] bclauses = qc.GetClauses();
 
                 // For all clauses e.g. one* two~
@@ -369,7 +370,7 @@ namespace Lucene.Net.QueryParsers.ComplexPhrase
                     Query childQuery = bclauses[i].Query;
 
                     // select the list to which we will add these options
-                    List<SpanQuery> chosenList = ors;
+                    IList<SpanQuery> chosenList = ors;
                     if (bclauses[i].Occur == Occur.MUST_NOT)
                     {
                         chosenList = nots;
diff --git a/src/Lucene.Net.QueryParser/Flexible/Core/Nodes/GroupQueryNode.cs b/src/Lucene.Net.QueryParser/Flexible/Core/Nodes/GroupQueryNode.cs
index 9504e0b..cb3a181 100644
--- a/src/Lucene.Net.QueryParser/Flexible/Core/Nodes/GroupQueryNode.cs
+++ b/src/Lucene.Net.QueryParser/Flexible/Core/Nodes/GroupQueryNode.cs
@@ -1,6 +1,7 @@
 using Lucene.Net.QueryParsers.Flexible.Core.Messages;
 using Lucene.Net.QueryParsers.Flexible.Core.Parser;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.QueryParsers.Flexible.Core.Nodes
 {
@@ -74,7 +75,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Core.Nodes
 
         public virtual void SetChild(IQueryNode child)
         {
-            List<IQueryNode> list = new List<IQueryNode>
+            IList<IQueryNode> list = new JCG.List<IQueryNode>
             {
                 child
             };
diff --git a/src/Lucene.Net.QueryParser/Flexible/Core/Nodes/ModifierQueryNode.cs b/src/Lucene.Net.QueryParser/Flexible/Core/Nodes/ModifierQueryNode.cs
index 7b22dcb..6fc5160 100644
--- a/src/Lucene.Net.QueryParser/Flexible/Core/Nodes/ModifierQueryNode.cs
+++ b/src/Lucene.Net.QueryParser/Flexible/Core/Nodes/ModifierQueryNode.cs
@@ -1,6 +1,7 @@
 using Lucene.Net.QueryParsers.Flexible.Core.Messages;
 using Lucene.Net.QueryParsers.Flexible.Core.Parser;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.QueryParsers.Flexible.Core.Nodes
 {
@@ -105,7 +106,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Core.Nodes
 
         public virtual void SetChild(IQueryNode child)
         {
-            List<IQueryNode> list = new List<IQueryNode>
+            IList<IQueryNode> list = new JCG.List<IQueryNode>
             {
                 child
             };
diff --git a/src/Lucene.Net.QueryParser/Flexible/Core/Nodes/PathQueryNode.cs b/src/Lucene.Net.QueryParser/Flexible/Core/Nodes/PathQueryNode.cs
index efc47f8..73212e0 100644
--- a/src/Lucene.Net.QueryParser/Flexible/Core/Nodes/PathQueryNode.cs
+++ b/src/Lucene.Net.QueryParser/Flexible/Core/Nodes/PathQueryNode.cs
@@ -4,6 +4,7 @@ using System;
 using System.Collections.Generic;
 using System.Globalization;
 using System.Text;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.QueryParsers.Flexible.Core.Nodes
 {
@@ -156,7 +157,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Core.Nodes
         /// <returns>a List QueryText element from position <paramref name="startIndex"/></returns>
         public virtual IList<QueryText> GetPathElements(int startIndex)
         {
-            List<PathQueryNode.QueryText> rValues = new List<PathQueryNode.QueryText>();
+            IList<PathQueryNode.QueryText> rValues = new JCG.List<PathQueryNode.QueryText>();
             for (int i = startIndex; i < this.values.Count; i++)
             {
                 rValues.Add((QueryText)this.values[i].Clone());
@@ -205,7 +206,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Core.Nodes
             // copy children
             if (this.values != null)
             {
-                List<QueryText> localValues = new List<QueryText>();
+                IList<QueryText> localValues = new JCG.List<QueryText>();
                 foreach (QueryText value in this.values)
                 {
                     localValues.Add((QueryText)value.Clone());
diff --git a/src/Lucene.Net.QueryParser/Flexible/Core/Processors/QueryNodeProcessorImpl.cs b/src/Lucene.Net.QueryParser/Flexible/Core/Processors/QueryNodeProcessorImpl.cs
index c7483f0..6eb62f7 100644
--- a/src/Lucene.Net.QueryParser/Flexible/Core/Processors/QueryNodeProcessorImpl.cs
+++ b/src/Lucene.Net.QueryParser/Flexible/Core/Processors/QueryNodeProcessorImpl.cs
@@ -2,6 +2,7 @@
 using Lucene.Net.QueryParsers.Flexible.Core.Nodes;
 using System;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.QueryParsers.Flexible.Core.Processors
 {
@@ -67,7 +68,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Core.Processors
     /// <seealso cref="IQueryNodeProcessor"/>
     public abstract class QueryNodeProcessor : IQueryNodeProcessor
     {
-        private readonly List<ChildrenList> childrenListPool = new List<ChildrenList>(); // LUCENENET: marked readonly
+        private readonly IList<ChildrenList> childrenListPool = new JCG.List<ChildrenList>(); // LUCENENET: marked readonly
 
         private QueryConfigHandler queryConfig;
 
@@ -212,7 +213,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Core.Processors
         /// <exception cref="QueryNodeException">if something goes wrong during the query node processing</exception>
         protected abstract IList<IQueryNode> SetChildrenOrder(IList<IQueryNode> children);
 
-        private class ChildrenList : List<IQueryNode>
+        private class ChildrenList : JCG.List<IQueryNode>
         {
             internal bool beingUsed;
         }
diff --git a/src/Lucene.Net.QueryParser/Flexible/Core/Util/QueryNodeOperation.cs b/src/Lucene.Net.QueryParser/Flexible/Core/Util/QueryNodeOperation.cs
index 768a4cf..79439ba 100644
--- a/src/Lucene.Net.QueryParser/Flexible/Core/Util/QueryNodeOperation.cs
+++ b/src/Lucene.Net.QueryParser/Flexible/Core/Util/QueryNodeOperation.cs
@@ -1,6 +1,6 @@
 using Lucene.Net.QueryParsers.Flexible.Core.Nodes;
 using System.Collections.Generic;
-using System.Diagnostics.CodeAnalysis;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.QueryParsers.Flexible.Core.Util
 {
@@ -66,7 +66,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Core.Util
             switch (op)
             {
                 case ANDOperation.NONE:
-                    List<IQueryNode> children = new List<IQueryNode>
+                    IList<IQueryNode> children = new JCG.List<IQueryNode>
                     {
                         q1.CloneTree(),
                         q2.CloneTree()
diff --git a/src/Lucene.Net.QueryParser/Flexible/Precedence/Processors/BooleanModifiersQueryNodeProcessor.cs b/src/Lucene.Net.QueryParser/Flexible/Precedence/Processors/BooleanModifiersQueryNodeProcessor.cs
index 87b9e4b..cb54cbb 100644
--- a/src/Lucene.Net.QueryParser/Flexible/Precedence/Processors/BooleanModifiersQueryNodeProcessor.cs
+++ b/src/Lucene.Net.QueryParser/Flexible/Precedence/Processors/BooleanModifiersQueryNodeProcessor.cs
@@ -3,6 +3,7 @@ using Lucene.Net.QueryParsers.Flexible.Core.Processors;
 using Lucene.Net.QueryParsers.Flexible.Standard.Config;
 using System;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 using Operator = Lucene.Net.QueryParsers.Flexible.Standard.Config.StandardQueryConfigHandler.Operator;
 
 namespace Lucene.Net.QueryParsers.Flexible.Precedence.Processors
@@ -37,7 +38,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Precedence.Processors
     /// <seealso cref="ConfigurationKeys.DEFAULT_OPERATOR"/>
     public class BooleanModifiersQueryNodeProcessor : QueryNodeProcessor
     {
-        private readonly List<IQueryNode> childrenBuffer = new List<IQueryNode>(); // LUCENENET: marked readonly
+        private readonly IList<IQueryNode> childrenBuffer = new JCG.List<IQueryNode>(); // LUCENENET: marked readonly
 
         private bool usingAnd = false;
 
diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Builders/MultiPhraseQueryNodeBuilder.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Builders/MultiPhraseQueryNodeBuilder.cs
index 7d808a6..e2485a8 100644
--- a/src/Lucene.Net.QueryParser/Flexible/Standard/Builders/MultiPhraseQueryNodeBuilder.cs
+++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Builders/MultiPhraseQueryNodeBuilder.cs
@@ -46,7 +46,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Builders
 
             if (children != null)
             {
-                IDictionary<int?, List<Term>> positionTermMap = new JCG.SortedDictionary<int?, List<Term>>();
+                IDictionary<int?, JCG.List<Term>> positionTermMap = new JCG.SortedDictionary<int?, JCG.List<Term>>();
 
                 foreach (IQueryNode child in children)
                 {
@@ -54,9 +54,9 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Builders
                     TermQuery termQuery = (TermQuery)termNode
                         .GetTag(QueryTreeBuilder.QUERY_TREE_BUILDER_TAGID);
 
-                    if (!positionTermMap.TryGetValue(termNode.PositionIncrement, out List<Term> termList) || termList == null)
+                    if (!positionTermMap.TryGetValue(termNode.PositionIncrement, out JCG.List<Term> termList) || termList == null)
                     {
-                        termList = new List<Term>();
+                        termList = new JCG.List<Term>();
                         positionTermMap[termNode.PositionIncrement] = termList;
                     }
 
@@ -65,7 +65,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Builders
 
                 foreach (int positionIncrement in positionTermMap.Keys)
                 {
-                    List<Term> termList = positionTermMap[positionIncrement];
+                    JCG.List<Term> termList = positionTermMap[positionIncrement];
 
                     phraseQuery.Add(termList.ToArray(/*new Term[termList.size()]*/),
                                 positionIncrement);
diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Nodes/AbstractRangeQueryNode.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Nodes/AbstractRangeQueryNode.cs
index d541ba3..fc1f387 100644
--- a/src/Lucene.Net.QueryParser/Flexible/Standard/Nodes/AbstractRangeQueryNode.cs
+++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Nodes/AbstractRangeQueryNode.cs
@@ -4,6 +4,7 @@ using Lucene.Net.QueryParsers.Flexible.Core.Util;
 using System;
 using System.Collections.Generic;
 using System.Text;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.QueryParsers.Flexible.Standard.Nodes
 {
@@ -140,7 +141,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Nodes
                 this.lowerInclusive = lowerInclusive;
                 this.upperInclusive = upperInclusive;
 
-                List<IQueryNode> children = new List<IQueryNode>(2)
+                IList<IQueryNode> children = new JCG.List<IQueryNode>(2)
                 {
                     lower,
                     upper
diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/StandardSyntaxParser.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/StandardSyntaxParser.cs
index ca61adb..ba5bc66 100644
--- a/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/StandardSyntaxParser.cs
+++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/StandardSyntaxParser.cs
@@ -11,6 +11,7 @@ using System.IO;
 #if FEATURE_SERIALIZABLE_EXCEPTIONS
 using System.Runtime.Serialization;
 #endif
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.QueryParsers.Flexible.Standard.Parser
 {
@@ -172,7 +173,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Parser
         //      Clause ::= [ Modifier ] ... 
         public IQueryNode Query(string field)
         {
-            List<IQueryNode> clauses = null;
+            IList<IQueryNode> clauses = null;
             IQueryNode c, first = null;
             first = DisjQuery(field);
             
@@ -199,7 +200,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Parser
                 c = DisjQuery(field);
                 if (clauses == null)
                 {
-                    clauses = new List<IQueryNode>();
+                    clauses = new JCG.List<IQueryNode>();
                     clauses.Add(first);
                 }
                 clauses.Add(c);
@@ -219,7 +220,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Parser
         public IQueryNode DisjQuery(string field)
         {
             IQueryNode first, c;
-            List<IQueryNode> clauses = null;
+            IList<IQueryNode> clauses = null;
             first = ConjQuery(field);
             
             while (true)
@@ -237,7 +238,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Parser
                 c = ConjQuery(field);
                 if (clauses == null)
                 {
-                    clauses = new List<IQueryNode>();
+                    clauses = new JCG.List<IQueryNode>();
                     clauses.Add(first);
                 }
                 clauses.Add(c);
@@ -257,7 +258,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Parser
         public IQueryNode ConjQuery(string field)
         {
             IQueryNode first, c;
-            List<IQueryNode> clauses = null;
+            IList<IQueryNode> clauses = null;
             first = ModClause(field);
             
             while (true)
@@ -275,7 +276,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Parser
                 c = ModClause(field);
                 if (clauses == null)
                 {
-                    clauses = new List<IQueryNode>();
+                    clauses = new JCG.List<IQueryNode>();
                     clauses.Add(first);
                 }
                 clauses.Add(c);
@@ -1155,7 +1156,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Parser
                 return (jj_ntk = Jj_nt.Kind);
         }
 
-        private readonly List<int[]> jj_expentries = new List<int[]>(); // LUCENENET: marked readonly
+        private readonly IList<int[]> jj_expentries = new JCG.List<int[]>(); // LUCENENET: marked readonly
         private int[] jj_expentry;
         private int jj_kind = -1;
         private readonly int[] jj_lasttokens = new int[100]; // LUCENENET: marked readonly
diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/AnalyzerQueryNodeProcessor.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/AnalyzerQueryNodeProcessor.cs
index 9494ac5..65c48e4 100644
--- a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/AnalyzerQueryNodeProcessor.cs
+++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/AnalyzerQueryNodeProcessor.cs
@@ -10,8 +10,7 @@ using Lucene.Net.Support;
 using Lucene.Net.Util;
 using System;
 using System.Collections.Generic;
-using System.Diagnostics;
-using System.IO;
+using JCG = J2N.Collections.Generic;
 using Operator = Lucene.Net.QueryParsers.Flexible.Standard.Config.StandardQueryConfigHandler.Operator;
 
 namespace Lucene.Net.QueryParsers.Flexible.Standard.Processors
@@ -201,7 +200,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Processors
                         if (positionCount == 1)
                         {
                             // simple case: only one position, with synonyms
-                            List<IQueryNode> children = new List<IQueryNode>();
+                            IList<IQueryNode> children = new JCG.List<IQueryNode>();
 
                             for (int i = 0; i < numTokens; i++)
                             {
@@ -288,7 +287,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Processors
                         // phrase query:
                         MultiPhraseQueryNode mpq = new MultiPhraseQueryNode();
 
-                        List<FieldQueryNode> multiTerms = new List<FieldQueryNode>();
+                        IList<FieldQueryNode> multiTerms = new JCG.List<FieldQueryNode>();
                         int position = -1;
                         int i = 0;
                         int termGroupCount = 0;
diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/BooleanQuery2ModifierNodeProcessor.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/BooleanQuery2ModifierNodeProcessor.cs
index 43d85a3..06dfe0e 100644
--- a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/BooleanQuery2ModifierNodeProcessor.cs
+++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/BooleanQuery2ModifierNodeProcessor.cs
@@ -5,6 +5,7 @@ using Lucene.Net.QueryParsers.Flexible.Standard.Config;
 using Lucene.Net.QueryParsers.Flexible.Standard.Nodes;
 using System;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 using Operator = Lucene.Net.QueryParsers.Flexible.Standard.Config.StandardQueryConfigHandler.Operator;
 
 namespace Lucene.Net.QueryParsers.Flexible.Standard.Processors
@@ -55,7 +56,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Processors
 
         private QueryConfigHandler queryConfigHandler;
 
-        private readonly List<IQueryNode> childrenBuffer = new List<IQueryNode>();
+        private readonly IList<IQueryNode> childrenBuffer = new JCG.List<IQueryNode>();
 
         private bool usingAnd = false;
 
diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/GroupQueryNodeProcessor.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/GroupQueryNodeProcessor.cs
index e806732..b130b92 100644
--- a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/GroupQueryNodeProcessor.cs
+++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/GroupQueryNodeProcessor.cs
@@ -5,6 +5,7 @@ using Lucene.Net.QueryParsers.Flexible.Standard.Config;
 using Lucene.Net.QueryParsers.Flexible.Standard.Nodes;
 using System;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 using Operator = Lucene.Net.QueryParsers.Flexible.Standard.Config.StandardQueryConfigHandler.Operator;
 
 namespace Lucene.Net.QueryParsers.Flexible.Standard.Processors
@@ -42,7 +43,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Processors
     [Obsolete("Use BooleanQuery2ModifierNodeProcessor instead")]
     public class GroupQueryNodeProcessor : IQueryNodeProcessor
     {
-        private List<IQueryNode> queryNodeList;
+        private IList<IQueryNode> queryNodeList;
 
         private bool latestNodeVerified;
 
@@ -72,11 +73,11 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Processors
                 queryTree = groupQueryNode.GetChild();
             }
 
-            this.queryNodeList = new List<IQueryNode>();
+            this.queryNodeList = new JCG.List<IQueryNode>();
             this.latestNodeVerified = false;
             ReadTree(queryTree);
 
-            List<IQueryNode> actualQueryNodeList = this.queryNodeList;
+            IList<IQueryNode> actualQueryNodeList = this.queryNodeList;
 
             for (int i = 0; i < actualQueryNodeList.Count; i++)
             {
diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/MultiFieldQueryNodeProcessor.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/MultiFieldQueryNodeProcessor.cs
index 76c5bd1..3c871c9 100644
--- a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/MultiFieldQueryNodeProcessor.cs
+++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/MultiFieldQueryNodeProcessor.cs
@@ -3,6 +3,7 @@ using Lucene.Net.QueryParsers.Flexible.Core.Processors;
 using Lucene.Net.QueryParsers.Flexible.Standard.Config;
 using System;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.QueryParsers.Flexible.Standard.Processors
 {
@@ -87,7 +88,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Processors
                         }
                         else
                         {
-                            List<IQueryNode> children = new List<IQueryNode>
+                            IList<IQueryNode> children = new JCG.List<IQueryNode>
                             {
                                 fieldNode
                             };
diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/RemoveEmptyNonLeafQueryNodeProcessor.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/RemoveEmptyNonLeafQueryNodeProcessor.cs
index 1f4bc46..a510350 100644
--- a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/RemoveEmptyNonLeafQueryNodeProcessor.cs
+++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/RemoveEmptyNonLeafQueryNodeProcessor.cs
@@ -2,6 +2,7 @@
 using Lucene.Net.QueryParsers.Flexible.Core.Processors;
 using Lucene.Net.Util;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.QueryParsers.Flexible.Standard.Processors
 {
@@ -35,7 +36,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Processors
     /// <seealso cref="MatchNoDocsQueryNode"/>
     public class RemoveEmptyNonLeafQueryNodeProcessor : QueryNodeProcessor
     {
-        private readonly List<IQueryNode> childrenBuffer = new List<IQueryNode>(); // LUCENENET: marked readonly
+        private readonly JCG.List<IQueryNode> childrenBuffer = new JCG.List<IQueryNode>(); // LUCENENET: marked readonly
 
         public RemoveEmptyNonLeafQueryNodeProcessor()
         {
diff --git a/src/Lucene.Net.QueryParser/Surround/Parser/QueryParser.cs b/src/Lucene.Net.QueryParser/Surround/Parser/QueryParser.cs
index 4c24a99..5c91ac3 100644
--- a/src/Lucene.Net.QueryParser/Surround/Parser/QueryParser.cs
+++ b/src/Lucene.Net.QueryParser/Surround/Parser/QueryParser.cs
@@ -6,6 +6,7 @@ using System.IO;
 #if FEATURE_SERIALIZABLE_EXCEPTIONS
 using System.Runtime.Serialization;
 #endif
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.QueryParsers.Surround.Parser
 {
@@ -228,7 +229,7 @@ namespace Lucene.Net.QueryParsers.Surround.Parser
                 Jj_consume_token(RegexpToken.COLON);
                 if (fieldNames == null)
                 {
-                    fieldNames = new List<string>();
+                    fieldNames = new JCG.List<string>();
                 }
                 fieldNames.Add(fieldName.Image);
             }
@@ -259,7 +260,7 @@ namespace Lucene.Net.QueryParsers.Surround.Parser
                 /* keep only last used operator */
                 if (queries == null)
                 {
-                    queries = new List<SrndQuery>();
+                    queries = new JCG.List<SrndQuery>();
                     queries.Add(q);
                 }
                 q = AndQuery();
@@ -292,7 +293,7 @@ namespace Lucene.Net.QueryParsers.Surround.Parser
                 /* keep only last used operator */
                 if (queries == null)
                 {
-                    queries = new List<SrndQuery>();
+                    queries = new JCG.List<SrndQuery>();
                     queries.Add(q);
                 }
                 q = NotQuery();
@@ -325,7 +326,7 @@ namespace Lucene.Net.QueryParsers.Surround.Parser
                 /* keep only last used operator */
                 if (queries == null)
                 {
-                    queries = new List<SrndQuery>();
+                    queries = new JCG.List<SrndQuery>();
                     queries.Add(q);
                 }
                 q = NQuery();
@@ -355,7 +356,7 @@ namespace Lucene.Net.QueryParsers.Surround.Parser
                         goto label_5;
                 }
                 dt = Jj_consume_token(RegexpToken.N);
-                queries = new List<SrndQuery>();
+                queries = new JCG.List<SrndQuery>();
                 queries.Add(q); /* left associative */
 
                 q = WQuery();
@@ -386,7 +387,7 @@ namespace Lucene.Net.QueryParsers.Surround.Parser
                         goto label_6;
                 }
                 wt = Jj_consume_token(RegexpToken.W);
-                queries = new List<SrndQuery>();
+                queries = new JCG.List<SrndQuery>();
                 queries.Add(q); /* left associative */
 
                 q = PrimaryQuery();
@@ -473,7 +474,7 @@ namespace Lucene.Net.QueryParsers.Surround.Parser
         public IList<SrndQuery> FieldsQueryList()
         {
             SrndQuery q;
-            IList<SrndQuery> queries = new List<SrndQuery>();
+            IList<SrndQuery> queries = new JCG.List<SrndQuery>();
             Jj_consume_token(RegexpToken.LPAREN);
             q = FieldsQuery();
             queries.Add(q);
@@ -783,7 +784,7 @@ namespace Lucene.Net.QueryParsers.Surround.Parser
                 return (jj_ntk = Jj_nt.Kind);
         }
 
-        private readonly IList<int[]> jj_expentries = new List<int[]>(); // LUCENENET: marked readonly
+        private readonly IList<int[]> jj_expentries = new JCG.List<int[]>(); // LUCENENET: marked readonly
         private int[] jj_expentry;
         private int jj_kind = -1;
         private readonly int[] jj_lasttokens = new int[100]; // LUCENENET: marked readonly
diff --git a/src/Lucene.Net.QueryParser/Surround/Query/ComposedQuery.cs b/src/Lucene.Net.QueryParser/Surround/Query/ComposedQuery.cs
index d263996..8bd5658 100644
--- a/src/Lucene.Net.QueryParser/Surround/Query/ComposedQuery.cs
+++ b/src/Lucene.Net.QueryParser/Surround/Query/ComposedQuery.cs
@@ -38,7 +38,7 @@ namespace Lucene.Net.QueryParsers.Surround.Query
         protected virtual void Recompose(IList<SrndQuery> queries)
         {
             if (queries.Count < 2) throw AssertionError.Create("Too few subqueries");
-            this.m_queries = new List<SrndQuery>(queries);
+            this.m_queries = new JCG.List<SrndQuery>(queries);
         }
 
         protected string m_opName;
diff --git a/src/Lucene.Net.QueryParser/Surround/Query/FieldsQuery.cs b/src/Lucene.Net.QueryParser/Surround/Query/FieldsQuery.cs
index 16f8ee5..259be21 100644
--- a/src/Lucene.Net.QueryParser/Surround/Query/FieldsQuery.cs
+++ b/src/Lucene.Net.QueryParser/Surround/Query/FieldsQuery.cs
@@ -1,5 +1,6 @@
 using System.Collections.Generic;
 using System.Text;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.QueryParsers.Surround.Query
 {
@@ -40,7 +41,7 @@ namespace Lucene.Net.QueryParsers.Surround.Query
         public FieldsQuery(SrndQuery q, string fieldName, char fieldOp)
         {
             this.q = q;
-            var fieldNameList = new List<string>
+            var fieldNameList = new JCG.List<string>
             {
                 fieldName
             };
@@ -58,7 +59,7 @@ namespace Lucene.Net.QueryParsers.Surround.Query
             }
             else
             { /* OR query over the fields */
-                List<SrndQuery> queries = new List<SrndQuery>();
+                IList<SrndQuery> queries = new JCG.List<SrndQuery>();
                 foreach (var fieldName in fieldNames)
                 {
                     var qc = (SrndQuery)q.Clone();
diff --git a/src/Lucene.Net.QueryParser/Surround/Query/SimpleTermRewriteQuery.cs b/src/Lucene.Net.QueryParser/Surround/Query/SimpleTermRewriteQuery.cs
index c15153c..f771993 100644
--- a/src/Lucene.Net.QueryParser/Surround/Query/SimpleTermRewriteQuery.cs
+++ b/src/Lucene.Net.QueryParser/Surround/Query/SimpleTermRewriteQuery.cs
@@ -1,6 +1,7 @@
 using Lucene.Net.Index;
 using Lucene.Net.Search;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.QueryParsers.Surround.Query
 {
@@ -33,7 +34,7 @@ namespace Lucene.Net.QueryParsers.Surround.Query
 
         public override Search.Query Rewrite(IndexReader reader)
         {
-            var luceneSubQueries = new List<Search.Query>();
+            var luceneSubQueries = new JCG.List<Search.Query>();
             m_srndQuery.VisitMatchingTerms(reader, m_fieldName, 
                 new SimpleTermRewriteMatchingTermVisitor(luceneSubQueries, m_qf));
             return (luceneSubQueries.Count == 0) ? SrndQuery.TheEmptyLcnQuery
diff --git a/src/Lucene.Net.QueryParser/Surround/Query/SpanNearClauseFactory.cs b/src/Lucene.Net.QueryParser/Surround/Query/SpanNearClauseFactory.cs
index 98d8e81..eac847f 100644
--- a/src/Lucene.Net.QueryParser/Surround/Query/SpanNearClauseFactory.cs
+++ b/src/Lucene.Net.QueryParser/Surround/Query/SpanNearClauseFactory.cs
@@ -112,7 +112,7 @@ namespace Lucene.Net.QueryParsers.Surround.Query
 
         public virtual SpanQuery MakeSpanClause()
         {
-            List<SpanQuery> spanQueries = new List<SpanQuery>();
+            JCG.List<SpanQuery> spanQueries = new JCG.List<SpanQuery>();
             foreach (var wsq in weightBySpanQuery)
             {
                 wsq.Key.Boost = wsq.Value;
diff --git a/src/Lucene.Net.QueryParser/Xml/Builders/SpanNearBuilder.cs b/src/Lucene.Net.QueryParser/Xml/Builders/SpanNearBuilder.cs
index 7d84d46..0a2b55b 100644
--- a/src/Lucene.Net.QueryParser/Xml/Builders/SpanNearBuilder.cs
+++ b/src/Lucene.Net.QueryParser/Xml/Builders/SpanNearBuilder.cs
@@ -2,6 +2,7 @@
 using System.Collections.Generic;
 using System.Globalization;
 using System.Xml;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.QueryParsers.Xml.Builders
 {
@@ -39,7 +40,7 @@ namespace Lucene.Net.QueryParsers.Xml.Builders
             string slopString = DOMUtils.GetAttributeOrFail(e, "slop");
             int slop = int.Parse(slopString, CultureInfo.InvariantCulture);
             bool inOrder = DOMUtils.GetAttribute(e, "inOrder", false);
-            List<SpanQuery> spans = new List<SpanQuery>();
+            JCG.List<SpanQuery> spans = new JCG.List<SpanQuery>();
             for (XmlNode kid = e.FirstChild; kid != null; kid = kid.NextSibling)
             {
                 if (kid.NodeType == XmlNodeType.Element)
diff --git a/src/Lucene.Net.QueryParser/Xml/Builders/SpanOrBuilder.cs b/src/Lucene.Net.QueryParser/Xml/Builders/SpanOrBuilder.cs
index bfb2986..e952726 100644
--- a/src/Lucene.Net.QueryParser/Xml/Builders/SpanOrBuilder.cs
+++ b/src/Lucene.Net.QueryParser/Xml/Builders/SpanOrBuilder.cs
@@ -1,6 +1,6 @@
 using Lucene.Net.Search.Spans;
-using System.Collections.Generic;
 using System.Xml;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.QueryParsers.Xml.Builders
 {
@@ -35,7 +35,7 @@ namespace Lucene.Net.QueryParsers.Xml.Builders
 
         public override SpanQuery GetSpanQuery(XmlElement e)
         {
-            List<SpanQuery> clausesList = new List<SpanQuery>();
+            JCG.List<SpanQuery> clausesList = new JCG.List<SpanQuery>();
             for (XmlNode kid = e.FirstChild; kid != null; kid = kid.NextSibling)
             {
                 if (kid.NodeType == XmlNodeType.Element)
diff --git a/src/Lucene.Net.QueryParser/Xml/Builders/SpanOrTermsBuilder.cs b/src/Lucene.Net.QueryParser/Xml/Builders/SpanOrTermsBuilder.cs
index 086df81..3e71751 100644
--- a/src/Lucene.Net.QueryParser/Xml/Builders/SpanOrTermsBuilder.cs
+++ b/src/Lucene.Net.QueryParser/Xml/Builders/SpanOrTermsBuilder.cs
@@ -4,9 +4,8 @@ using Lucene.Net.Index;
 using Lucene.Net.Search.Spans;
 using Lucene.Net.Util;
 using System;
-using System.Collections.Generic;
-using System.IO;
 using System.Xml;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.QueryParsers.Xml.Builders
 {
@@ -44,7 +43,7 @@ namespace Lucene.Net.QueryParsers.Xml.Builders
             string fieldName = DOMUtils.GetAttributeWithInheritanceOrFail(e, "fieldName");
             string value = DOMUtils.GetNonBlankTextOrFail(e);
 
-            List<SpanQuery> clausesList = new List<SpanQuery>();
+            JCG.List<SpanQuery> clausesList = new JCG.List<SpanQuery>();
 
             TokenStream ts = null;
             try
diff --git a/src/Lucene.Net.QueryParser/Xml/Builders/TermsFilterBuilder.cs b/src/Lucene.Net.QueryParser/Xml/Builders/TermsFilterBuilder.cs
index 07ac660..7ee0b46 100644
--- a/src/Lucene.Net.QueryParser/Xml/Builders/TermsFilterBuilder.cs
+++ b/src/Lucene.Net.QueryParser/Xml/Builders/TermsFilterBuilder.cs
@@ -5,8 +5,8 @@ using Lucene.Net.Search;
 using Lucene.Net.Util;
 using System;
 using System.Collections.Generic;
-using System.IO;
 using System.Xml;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.QueryParsers.Xml.Builders
 {
@@ -45,7 +45,7 @@ namespace Lucene.Net.QueryParsers.Xml.Builders
         /// </summary>
         public virtual Filter GetFilter(XmlElement e)
         {
-            List<BytesRef> terms = new List<BytesRef>();
+            IList<BytesRef> terms = new JCG.List<BytesRef>();
             string text = DOMUtils.GetNonBlankTextOrFail(e);
             string fieldName = DOMUtils.GetAttributeWithInheritanceOrFail(e, "fieldName");
 
diff --git a/src/Lucene.Net.Replicator/IndexRevision.cs b/src/Lucene.Net.Replicator/IndexRevision.cs
index 166a84b..b3f33d0 100644
--- a/src/Lucene.Net.Replicator/IndexRevision.cs
+++ b/src/Lucene.Net.Replicator/IndexRevision.cs
@@ -1,13 +1,13 @@
-using J2N.Collections.Generic.Extensions;
+using J2N.Collections.Generic.Extensions;
 using Lucene.Net.Diagnostics;
 using Lucene.Net.Index;
 using Lucene.Net.Store;
 using System;
 using System.Collections.Generic;
-using System.Diagnostics;
 using System.Globalization;
 using System.IO;
 using Directory = Lucene.Net.Store.Directory;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Replicator
 {
@@ -66,7 +66,7 @@ namespace Lucene.Net.Replicator
         public static IDictionary<string, IList<RevisionFile>> RevisionFiles(IndexCommit commit)
         {
             ICollection<string> commitFiles = commit.FileNames;
-            List<RevisionFile> revisionFiles = new List<RevisionFile>(commitFiles.Count);
+            IList<RevisionFile> revisionFiles = new JCG.List<RevisionFile>(commitFiles.Count);
             string segmentsFile = commit.SegmentsFileName;
             Directory dir = commit.Directory;
 
diff --git a/src/Lucene.Net.Replicator/ReplicationClient.cs b/src/Lucene.Net.Replicator/ReplicationClient.cs
index 9d05757..be8842b 100644
--- a/src/Lucene.Net.Replicator/ReplicationClient.cs
+++ b/src/Lucene.Net.Replicator/ReplicationClient.cs
@@ -253,7 +253,7 @@ namespace Lucene.Net.Replicator
                     Directory directory = factory.GetDirectory(session.Id, source);
 
                     sourceDirectory.Add(source, directory);
-                    List<string> cpFiles = new List<string>();
+                    IList<string> cpFiles = new JCG.List<string>();
                     copiedFiles.Add(source, cpFiles);
                     foreach (RevisionFile file in pair.Value)
                     {
@@ -380,7 +380,7 @@ namespace Lucene.Net.Replicator
                 }
 
                 // make sure to preserve revisionFiles order
-                List<RevisionFile> res = new List<RevisionFile>();
+                IList<RevisionFile> res = new JCG.List<RevisionFile>();
                 string source = e.Key;
                 if (Debugging.AssertsEnabled) Debugging.Assert(newRevisionFiles.ContainsKey(source), "source not found in newRevisionFiles: {0}", newRevisionFiles);
                 foreach (RevisionFile file in newRevisionFiles[source])
diff --git a/src/Lucene.Net.Replicator/SessionToken.cs b/src/Lucene.Net.Replicator/SessionToken.cs
index 027dbea..ee04a38 100644
--- a/src/Lucene.Net.Replicator/SessionToken.cs
+++ b/src/Lucene.Net.Replicator/SessionToken.cs
@@ -1,6 +1,7 @@
-using J2N.IO;
+using J2N.IO;
 using System.Collections.Generic;
 using System.IO;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Replicator
 {
@@ -67,7 +68,7 @@ namespace Lucene.Net.Replicator
                 string source = reader.ReadUTF();
                 int numFiles = reader.ReadInt32();
 
-                List<RevisionFile> files = new List<RevisionFile>(numFiles);
+                IList<RevisionFile> files = new JCG.List<RevisionFile>(numFiles);
                 for (int i = 0; i < numFiles; i++)
                 {
                     files.Add(new RevisionFile(reader.ReadUTF(), reader.ReadInt64()));
diff --git a/src/Lucene.Net.Sandbox/Queries/FuzzyLikeThisQuery.cs b/src/Lucene.Net.Sandbox/Queries/FuzzyLikeThisQuery.cs
index 93ac28f..4607c43 100644
--- a/src/Lucene.Net.Sandbox/Queries/FuzzyLikeThisQuery.cs
+++ b/src/Lucene.Net.Sandbox/Queries/FuzzyLikeThisQuery.cs
@@ -291,22 +291,20 @@ namespace Lucene.Net.Sandbox.Queries
             //create BooleanQueries to hold the variants for each token/field pair and ensure it
             // has no coord factor
             //Step 1: sort the termqueries by term/field
-            IDictionary<Term, List<ScoreTerm>> variantQueries = new Dictionary<Term, List<ScoreTerm>>();
+            IDictionary<Term, IList<ScoreTerm>> variantQueries = new Dictionary<Term, IList<ScoreTerm>>();
             int size = q.Count;
             for (int i = 0; i < size; i++)
             {
                 ScoreTerm st = q.Pop();
-                //List<ScoreTerm> l = variantQueries.get(st.fuzziedSourceTerm);
-                //          if(l==null)
-                if (!variantQueries.TryGetValue(st.FuzziedSourceTerm, out List<ScoreTerm> l) || l == null)
+                if (!variantQueries.TryGetValue(st.FuzziedSourceTerm, out IList<ScoreTerm> l) || l == null)
                 {
-                    l = new List<ScoreTerm>();
+                    l = new JCG.List<ScoreTerm>();
                     variantQueries[st.FuzziedSourceTerm] = l;
                 }
                 l.Add(st);
             }
             //Step 2: Organize the sorted termqueries into zero-coord scoring boolean queries
-            foreach (List<ScoreTerm> variants in variantQueries.Values)
+            foreach (IList<ScoreTerm> variants in variantQueries.Values)
             {
                 if (variants.Count == 1)
                 {
diff --git a/src/Lucene.Net.Spatial/Prefix/Tree/Cell.cs b/src/Lucene.Net.Spatial/Prefix/Tree/Cell.cs
index 068542e..43ebffe 100644
--- a/src/Lucene.Net.Spatial/Prefix/Tree/Cell.cs
+++ b/src/Lucene.Net.Spatial/Prefix/Tree/Cell.cs
@@ -1,10 +1,10 @@
-using Lucene.Net.Diagnostics;
+using Lucene.Net.Diagnostics;
 using Spatial4n.Core.Shapes;
 using System;
 using System.Collections.Generic;
 using System.Collections.ObjectModel;
-using System.Diagnostics;
 using System.Text;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Spatial.Prefix.Tree
 {
@@ -207,7 +207,7 @@ namespace Lucene.Net.Spatial.Prefix.Tree
                 return cells;
             }
             //TODO change API to return a filtering iterator
-            IList<Cell> copy = new List<Cell>(cells.Count);
+            IList<Cell> copy = new JCG.List<Cell>(cells.Count);
             foreach (Cell cell in cells)
             {
                 SpatialRelation rel = cell.Shape.Relate(shapeFilter);
diff --git a/src/Lucene.Net.Spatial/Prefix/Tree/GeohashPrefixTree.cs b/src/Lucene.Net.Spatial/Prefix/Tree/GeohashPrefixTree.cs
index ea315eb..15a0796 100644
--- a/src/Lucene.Net.Spatial/Prefix/Tree/GeohashPrefixTree.cs
+++ b/src/Lucene.Net.Spatial/Prefix/Tree/GeohashPrefixTree.cs
@@ -3,6 +3,7 @@ using Spatial4n.Core.Shapes;
 using Spatial4n.Core.Util;
 using System;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Spatial.Prefix.Tree
 {
@@ -122,7 +123,7 @@ namespace Lucene.Net.Spatial.Prefix.Tree
             protected internal override ICollection<Cell> GetSubCells()
             {
                 string[] hashes = GeohashUtils.GetSubGeohashes(Geohash);//sorted
-                IList<Cell> cells = new List<Cell>(hashes.Length);
+                IList<Cell> cells = new JCG.List<Cell>(hashes.Length);
                 foreach (string hash in hashes)
                 {
                     cells.Add(new GhCell((GeohashPrefixTree)m_outerInstance, hash));
diff --git a/src/Lucene.Net.Spatial/Prefix/Tree/QuadPrefixTree.cs b/src/Lucene.Net.Spatial/Prefix/Tree/QuadPrefixTree.cs
index bc35b9f..6531152 100644
--- a/src/Lucene.Net.Spatial/Prefix/Tree/QuadPrefixTree.cs
+++ b/src/Lucene.Net.Spatial/Prefix/Tree/QuadPrefixTree.cs
@@ -1,11 +1,10 @@
 using Lucene.Net.Diagnostics;
 using Spatial4n.Core.Context;
 using Spatial4n.Core.Shapes;
-using System;
 using System.Collections.Generic;
-using System.Diagnostics;
 using System.IO;
 using System.Text;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Spatial.Prefix.Tree
 {
@@ -146,7 +145,7 @@ namespace Lucene.Net.Spatial.Prefix.Tree
 
         protected internal override Cell GetCell(IPoint p, int level)
         {
-            IList<Cell> cells = new List<Cell>(1);
+            IList<Cell> cells = new JCG.List<Cell>(1);
             Build(xmid, ymid, 0, cells, new StringBuilder(), m_ctx.MakePoint(p.X, p.Y), level);
             return cells[0];
         }
@@ -261,7 +260,7 @@ namespace Lucene.Net.Spatial.Prefix.Tree
             protected internal override ICollection<Cell> GetSubCells()
             {
                 QuadPrefixTree outerInstance = (QuadPrefixTree)this.m_outerInstance;
-                return new List<Cell>(4)
+                return new JCG.List<Cell>(4)
                 {
                     new QuadCell(outerInstance, TokenString + "A"),
                     new QuadCell(outerInstance, TokenString + "B"),
diff --git a/src/Lucene.Net.Spatial/Prefix/Tree/SpatialPrefixTree.cs b/src/Lucene.Net.Spatial/Prefix/Tree/SpatialPrefixTree.cs
index 655a6c0..99cafb7 100644
--- a/src/Lucene.Net.Spatial/Prefix/Tree/SpatialPrefixTree.cs
+++ b/src/Lucene.Net.Spatial/Prefix/Tree/SpatialPrefixTree.cs
@@ -4,7 +4,7 @@ using Spatial4n.Core.Shapes;
 using System;
 using System.Collections.Generic;
 using System.Collections.ObjectModel;
-using System.Diagnostics;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Spatial.Prefix.Tree
 {
@@ -193,7 +193,7 @@ namespace Lucene.Net.Spatial.Prefix.Tree
             {
                 return GetCells(point, detailLevel, inclParents);
             }
-            IList<Cell> cells = new List<Cell>(inclParents ? 4096 : 2048);
+            IList<Cell> cells = new JCG.List<Cell>(inclParents ? 4096 : 2048);
             RecursiveGetCells(WorldCell, shape, detailLevel, inclParents, simplify, cells);
             return cells;
         }
@@ -270,7 +270,7 @@ namespace Lucene.Net.Spatial.Prefix.Tree
             }
             string endToken = cell.TokenString;
             if (Debugging.AssertsEnabled) Debugging.Assert(endToken.Length == detailLevel);
-            IList<Cell> cells = new List<Cell>(detailLevel);
+            IList<Cell> cells = new JCG.List<Cell>(detailLevel);
             for (int i = 1; i < detailLevel; i++)
             {
                 cells.Add(GetCell(endToken.Substring(0, i - 0)));
@@ -283,7 +283,7 @@ namespace Lucene.Net.Spatial.Prefix.Tree
         [Obsolete("TODO remove; not used and not interesting, don't need collection in & out")]
         public static IList<string> CellsToTokenStrings(ICollection<Cell> cells)
         {
-            IList<string> tokens = new List<string>((cells.Count));
+            IList<string> tokens = new JCG.List<string>((cells.Count));
             foreach (Cell cell in cells)
             {
                 string token = cell.TokenString;
diff --git a/src/Lucene.Net.Spatial/Query/SpatialOperation.cs b/src/Lucene.Net.Spatial/Query/SpatialOperation.cs
index 760ae4e..d9e9f04 100644
--- a/src/Lucene.Net.Spatial/Query/SpatialOperation.cs
+++ b/src/Lucene.Net.Spatial/Query/SpatialOperation.cs
@@ -2,6 +2,7 @@
 using System;
 using System.Collections.Generic;
 using System.Globalization;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Spatial.Queries
 {
@@ -39,7 +40,7 @@ namespace Lucene.Net.Spatial.Queries
     {
         // Private registry
         private static readonly IDictionary<string, SpatialOperation> registry = new Dictionary<string, SpatialOperation>();
-        private static readonly IList<SpatialOperation> list = new List<SpatialOperation>();
+        private static readonly IList<SpatialOperation> list = new JCG.List<SpatialOperation>();
 
         // Geometry Operations
 
diff --git a/src/Lucene.Net.Spatial/Util/ShapeFieldCache.cs b/src/Lucene.Net.Spatial/Util/ShapeFieldCache.cs
index 33f6f50..d7d4566 100644
--- a/src/Lucene.Net.Spatial/Util/ShapeFieldCache.cs
+++ b/src/Lucene.Net.Spatial/Util/ShapeFieldCache.cs
@@ -1,5 +1,6 @@
 using Spatial4n.Core.Shapes;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Spatial.Util
 {
@@ -46,7 +47,7 @@ namespace Lucene.Net.Spatial.Util
             IList<T> list = cache[docid];
             if (list == null)
             {
-                list = cache[docid] = new List<T>(DefaultLength);
+                list = cache[docid] = new JCG.List<T>(DefaultLength);
             }
             list.Add(s);
         }
diff --git a/src/Lucene.Net.Suggest/Spell/SpellChecker.cs b/src/Lucene.Net.Suggest/Spell/SpellChecker.cs
index 23c4e39..8d183f9 100644
--- a/src/Lucene.Net.Suggest/Spell/SpellChecker.cs
+++ b/src/Lucene.Net.Suggest/Spell/SpellChecker.cs
@@ -7,6 +7,7 @@ using System.Collections.Generic;
 using System.IO;
 using System.Runtime.CompilerServices;
 using Directory = Lucene.Net.Store.Directory;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Search.Spell
 {
@@ -504,7 +505,7 @@ namespace Lucene.Net.Search.Spell
                 using (var writer = new IndexWriter(dir, config))
                 {
                     IndexSearcher indexSearcher = ObtainSearcher();
-                    IList<TermsEnum> termsEnums = new List<TermsEnum>();
+                    IList<TermsEnum> termsEnums = new JCG.List<TermsEnum>();
 
                     IndexReader reader = searcher.IndexReader;
                     if (reader.MaxDoc > 0)
diff --git a/src/Lucene.Net.Suggest/Suggest/Analyzing/AnalyzingInfixSuggester.cs b/src/Lucene.Net.Suggest/Suggest/Analyzing/AnalyzingInfixSuggester.cs
index 3a084a3..7bda694 100644
--- a/src/Lucene.Net.Suggest/Suggest/Analyzing/AnalyzingInfixSuggester.cs
+++ b/src/Lucene.Net.Suggest/Suggest/Analyzing/AnalyzingInfixSuggester.cs
@@ -620,7 +620,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing
             // TODO: maybe just stored fields?  they compress...
             BinaryDocValues payloadsDV = MultiDocValues.GetBinaryValues(searcher.IndexReader, "payloads");
             IList<AtomicReaderContext> leaves = searcher.IndexReader.Leaves;
-            List<LookupResult> results = new List<LookupResult>();
+            IList<LookupResult> results = new JCG.List<LookupResult>();
             BytesRef scratch = new BytesRef();
             for (int i = 0; i < hits.ScoreDocs.Length; i++)
             {
diff --git a/src/Lucene.Net.Suggest/Suggest/Analyzing/AnalyzingSuggester.cs b/src/Lucene.Net.Suggest/Suggest/Analyzing/AnalyzingSuggester.cs
index 2f2e754..fe33cca 100644
--- a/src/Lucene.Net.Suggest/Suggest/Analyzing/AnalyzingSuggester.cs
+++ b/src/Lucene.Net.Suggest/Suggest/Analyzing/AnalyzingSuggester.cs
@@ -267,7 +267,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing
             for (int stateNumber = states.Length - 1; stateNumber >= 0; stateNumber--)
             {
                 State state = states[stateNumber];
-                IList<Transition> newTransitions = new List<Transition>();
+                IList<Transition> newTransitions = new JCG.List<Transition>();
                 foreach (Transition t in state.GetTransitions())
                 {
                     if (Debugging.AssertsEnabled) Debugging.Assert(t.Min == t.Max);
@@ -769,7 +769,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing
 
                 var scratchArc = new FST.Arc<PairOutputs<long?, BytesRef>.Pair>();
 
-                IList<LookupResult> results = new List<LookupResult>();
+                IList<LookupResult> results = new JCG.List<LookupResult>();
 
                 IList<FSTUtil.Path<PairOutputs<long?, BytesRef>.Pair>> prefixPaths =
                     FSTUtil.IntersectPrefixPaths(ConvertAutomaton(lookupAutomaton), fst);
diff --git a/src/Lucene.Net.Suggest/Suggest/Analyzing/BlendedInfixSuggester.cs b/src/Lucene.Net.Suggest/Suggest/Analyzing/BlendedInfixSuggester.cs
index df5ea4a..3cf9edf 100644
--- a/src/Lucene.Net.Suggest/Suggest/Analyzing/BlendedInfixSuggester.cs
+++ b/src/Lucene.Net.Suggest/Suggest/Analyzing/BlendedInfixSuggester.cs
@@ -217,7 +217,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing
                 BoundedTreeAdd(results, result, actualNum);
             }
 
-            return new List<LookupResult>(results.Reverse());
+            return new JCG.List<LookupResult>(results.Reverse());
         }
 
         /// <summary>
diff --git a/src/Lucene.Net.Suggest/Suggest/Analyzing/FSTUtil.cs b/src/Lucene.Net.Suggest/Suggest/Analyzing/FSTUtil.cs
index 5ad3fde..6d084c5 100644
--- a/src/Lucene.Net.Suggest/Suggest/Analyzing/FSTUtil.cs
+++ b/src/Lucene.Net.Suggest/Suggest/Analyzing/FSTUtil.cs
@@ -3,7 +3,7 @@ using Lucene.Net.Util;
 using Lucene.Net.Util.Automaton;
 using Lucene.Net.Util.Fst;
 using System.Collections.Generic;
-using System.Diagnostics;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Search.Suggest.Analyzing
 {
@@ -70,8 +70,8 @@ namespace Lucene.Net.Search.Suggest.Analyzing
         public static IList<Path<T>> IntersectPrefixPaths<T>(Automaton a, FST<T> fst)
         {
             if (Debugging.AssertsEnabled) Debugging.Assert(a.IsDeterministic);
-            IList<Path<T>> queue = new List<Path<T>>();
-            List<Path<T>> endNodes = new List<Path<T>>();
+            IList<Path<T>> queue = new JCG.List<Path<T>>();
+            IList<Path<T>> endNodes = new JCG.List<Path<T>>();
             queue.Add(new Path<T>(a.GetInitialState(), fst.GetFirstArc(new FST.Arc<T>()), fst.Outputs.NoOutput, new Int32sRef()));
 
             FST.Arc<T> scratchArc = new FST.Arc<T>();
diff --git a/src/Lucene.Net.Suggest/Suggest/Analyzing/FreeTextSuggester.cs b/src/Lucene.Net.Suggest/Suggest/Analyzing/FreeTextSuggester.cs
index 347defa..07973e4 100644
--- a/src/Lucene.Net.Suggest/Suggest/Analyzing/FreeTextSuggester.cs
+++ b/src/Lucene.Net.Suggest/Suggest/Analyzing/FreeTextSuggester.cs
@@ -617,7 +617,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing
                 // results, return that; else, fallback:
                 double backoff = 1.0;
 
-                List<LookupResult> results = new List<LookupResult>(num);
+                JCG.List<LookupResult> results = new JCG.List<LookupResult>(num);
 
                 // We only add a given suffix once, from the highest
                 // order model that saw it; for subsequent lower order
diff --git a/src/Lucene.Net.Suggest/Suggest/BufferedInputIterator.cs b/src/Lucene.Net.Suggest/Suggest/BufferedInputIterator.cs
index 9b30605..2eeaf29 100644
--- a/src/Lucene.Net.Suggest/Suggest/BufferedInputIterator.cs
+++ b/src/Lucene.Net.Suggest/Suggest/BufferedInputIterator.cs
@@ -1,6 +1,7 @@
 using Lucene.Net.Util;
 using System;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Search.Suggest
 {
@@ -37,7 +38,7 @@ namespace Lucene.Net.Search.Suggest
         protected BytesRefArray m_payloads = new BytesRefArray(Counter.NewCounter());
         /// <summary>
         /// buffered context set entries </summary>
-        protected IList<ICollection<BytesRef>> m_contextSets = new List<ICollection<BytesRef>>();
+        protected IList<ICollection<BytesRef>> m_contextSets = new JCG.List<ICollection<BytesRef>>();
         /// <summary>
         /// current buffer position </summary>
         protected int m_curPos = -1;
diff --git a/src/Lucene.Net.Suggest/Suggest/Fst/FSTCompletion.cs b/src/Lucene.Net.Suggest/Suggest/Fst/FSTCompletion.cs
index 1bbd8eb..7eac118 100644
--- a/src/Lucene.Net.Suggest/Suggest/Fst/FSTCompletion.cs
+++ b/src/Lucene.Net.Suggest/Suggest/Fst/FSTCompletion.cs
@@ -79,7 +79,7 @@ namespace Lucene.Net.Search.Suggest.Fst
         /// An empty result. Keep this an <see cref="List{T}"/> to keep all the returned
         /// lists of single type (monomorphic calls).
         /// </summary>
-        private static readonly List<Completion> EMPTY_RESULT = new List<Completion>();
+        private static readonly IList<Completion> EMPTY_RESULT = new JCG.List<Completion>();
 
         /// <summary>
         /// Finite state automaton encoding all the lookup terms. See class notes for
diff --git a/src/Lucene.Net.Suggest/Suggest/Fst/FSTCompletionLookup.cs b/src/Lucene.Net.Suggest/Suggest/Fst/FSTCompletionLookup.cs
index afacc7d..8b7a980 100644
--- a/src/Lucene.Net.Suggest/Suggest/Fst/FSTCompletionLookup.cs
+++ b/src/Lucene.Net.Suggest/Suggest/Fst/FSTCompletionLookup.cs
@@ -7,6 +7,7 @@ using Lucene.Net.Util.Fst;
 using System;
 using System.Collections.Generic;
 using System.IO;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Search.Suggest.Fst
 {
@@ -277,7 +278,7 @@ namespace Lucene.Net.Search.Suggest.Fst
                 completions = normalCompletion.DoLookup(key, num);
             }
 
-            List<LookupResult> results = new List<LookupResult>(completions.Count);
+            IList<LookupResult> results = new JCG.List<LookupResult>(completions.Count);
             CharsRef spare = new CharsRef();
             foreach (FSTCompletion.Completion c in completions)
             {
diff --git a/src/Lucene.Net.Suggest/Suggest/Fst/WFSTCompletionLookup.cs b/src/Lucene.Net.Suggest/Suggest/Fst/WFSTCompletionLookup.cs
index 20bfd19..c916620 100644
--- a/src/Lucene.Net.Suggest/Suggest/Fst/WFSTCompletionLookup.cs
+++ b/src/Lucene.Net.Suggest/Suggest/Fst/WFSTCompletionLookup.cs
@@ -5,8 +5,7 @@ using Lucene.Net.Util;
 using Lucene.Net.Util.Fst;
 using System;
 using System.Collections.Generic;
-using System.Diagnostics;
-using System.IO;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Search.Suggest.Fst
 {
@@ -180,7 +179,7 @@ namespace Lucene.Net.Search.Suggest.Fst
                 return Collections.EmptyList<LookupResult>();
             }
 
-            List<LookupResult> results = new List<LookupResult>(num);
+            IList<LookupResult> results = new JCG.List<LookupResult>(num);
             CharsRef spare = new CharsRef();
             if (exactFirst && arc.IsFinal)
             {
diff --git a/src/Lucene.Net.Suggest/Suggest/Jaspell/JaspellLookup.cs b/src/Lucene.Net.Suggest/Suggest/Jaspell/JaspellLookup.cs
index 4cbb763..5d9387b 100644
--- a/src/Lucene.Net.Suggest/Suggest/Jaspell/JaspellLookup.cs
+++ b/src/Lucene.Net.Suggest/Suggest/Jaspell/JaspellLookup.cs
@@ -2,6 +2,7 @@
 using Lucene.Net.Util;
 using System;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Search.Suggest.Jaspell
 {
@@ -117,7 +118,7 @@ namespace Lucene.Net.Search.Suggest.Jaspell
             {
                 throw new ArgumentException("this suggester doesn't support contexts");
             }
-            List<LookupResult> res = new List<LookupResult>();
+            IList<LookupResult> res = new JCG.List<LookupResult>();
             IList<string> list;
             int count = onlyMorePopular ? num * 2 : num;
             if (usePrefix)
diff --git a/src/Lucene.Net.Suggest/Suggest/Jaspell/JaspellTernarySearchTrie.cs b/src/Lucene.Net.Suggest/Suggest/Jaspell/JaspellTernarySearchTrie.cs
index 68b6460..164f97e 100644
--- a/src/Lucene.Net.Suggest/Suggest/Jaspell/JaspellTernarySearchTrie.cs
+++ b/src/Lucene.Net.Suggest/Suggest/Jaspell/JaspellTernarySearchTrie.cs
@@ -33,6 +33,7 @@ using System.Globalization;
 using System.IO;
 using System.IO.Compression;
 using System.Text;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Search.Suggest.Jaspell
 {
@@ -698,7 +699,7 @@ namespace Lucene.Net.Search.Suggest.Jaspell
         /// <returns> A <see cref="IList{String}"/> with the results </returns>
         public virtual IList<string> MatchAlmost(string key, int numReturnValues)
         {
-            return MatchAlmostRecursion(rootNode, 0, matchAlmostDiff, key, ((numReturnValues < 0) ? -1 : numReturnValues), new List<string>(), false);
+            return MatchAlmostRecursion(rootNode, 0, matchAlmostDiff, key, ((numReturnValues < 0) ? -1 : numReturnValues), new JCG.List<string>(), false);
         }
 
         /// <summary>
@@ -774,7 +775,7 @@ namespace Lucene.Net.Search.Suggest.Jaspell
         /// <returns> A <see cref="IList{String}"/> with the results </returns>
         public virtual IList<string> MatchPrefix(string prefix, int numReturnValues)
         {
-            List<string> sortKeysResult = new List<string>();
+            IList<string> sortKeysResult = new JCG.List<string>();
             TSTNode startNode = GetNode(prefix);
             if (startNode == null)
             {
@@ -939,7 +940,7 @@ namespace Lucene.Net.Search.Suggest.Jaspell
         /// <returns> A <see cref="IList{String}"/> with the results. </returns>
         protected virtual IList<string> SortKeys(TSTNode startNode, int numReturnValues)
         {
-            return SortKeysRecursion(startNode, ((numReturnValues < 0) ? -1 : numReturnValues), new List<string>());
+            return SortKeysRecursion(startNode, ((numReturnValues < 0) ? -1 : numReturnValues), new JCG.List<string>());
         }
 
         /// <summary>
diff --git a/src/Lucene.Net.Suggest/Suggest/Tst/TSTAutocomplete.cs b/src/Lucene.Net.Suggest/Suggest/Tst/TSTAutocomplete.cs
index 8772597..cc8b039 100644
--- a/src/Lucene.Net.Suggest/Suggest/Tst/TSTAutocomplete.cs
+++ b/src/Lucene.Net.Suggest/Suggest/Tst/TSTAutocomplete.cs
@@ -1,4 +1,5 @@
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Search.Suggest.Tst
 {
@@ -130,7 +131,7 @@ namespace Lucene.Net.Search.Suggest.Tst
         {
 
             TernaryTreeNode p = root;
-            List<TernaryTreeNode> suggest = new List<TernaryTreeNode>();
+            JCG.List<TernaryTreeNode> suggest = new JCG.List<TernaryTreeNode>();
 
             while (p != null)
             {
diff --git a/src/Lucene.Net.Suggest/Suggest/Tst/TSTLookup.cs b/src/Lucene.Net.Suggest/Suggest/Tst/TSTLookup.cs
index a46c9f1..3f41c5c 100644
--- a/src/Lucene.Net.Suggest/Suggest/Tst/TSTLookup.cs
+++ b/src/Lucene.Net.Suggest/Suggest/Tst/TSTLookup.cs
@@ -3,6 +3,7 @@ using Lucene.Net.Support.Threading;
 using Lucene.Net.Util;
 using System;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Search.Suggest.Tst
 {
@@ -69,8 +70,8 @@ namespace Lucene.Net.Search.Suggest.Tst
             }
 #pragma warning restore 612, 618
 
-            List<string> tokens = new List<string>();
-            List<object> vals = new List<object>();
+            JCG.List<string> tokens = new JCG.List<string>();
+            JCG.List<object> vals = new JCG.List<object>();
             BytesRef spare;
             CharsRef charsSpare = new CharsRef();
             while (enumerator.MoveNext())
@@ -143,7 +144,7 @@ namespace Lucene.Net.Search.Suggest.Tst
                 throw new ArgumentException("this suggester doesn't support contexts");
             }
             IList<TernaryTreeNode> list = autocomplete.PrefixCompletion(root, key, 0);
-            List<LookupResult> res = new List<LookupResult>();
+            IList<LookupResult> res = new JCG.List<LookupResult>();
             if (list == null || list.Count == 0)
             {
                 return res;
diff --git a/src/Lucene.Net.TestFramework/Analysis/BaseTokenStreamTestCase.cs b/src/Lucene.Net.TestFramework/Analysis/BaseTokenStreamTestCase.cs
index 4e069df..266a5c1 100644
--- a/src/Lucene.Net.TestFramework/Analysis/BaseTokenStreamTestCase.cs
+++ b/src/Lucene.Net.TestFramework/Analysis/BaseTokenStreamTestCase.cs
@@ -18,6 +18,7 @@ using Attribute = Lucene.Net.Util.Attribute;
 using AttributeFactory = Lucene.Net.Util.AttributeSource.AttributeFactory;
 using Console = Lucene.Net.Util.SystemConsole;
 using Directory = Lucene.Net.Store.Directory;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Analysis
 {
@@ -1023,12 +1024,12 @@ namespace Lucene.Net.Analysis
             IPositionLengthAttribute posLengthAtt;
             ITypeAttribute typeAtt;
 
-            IList<string> tokens = new List<string>();
-            IList<string> types = new List<string>();
-            IList<int> positions = new List<int>();
-            IList<int> positionLengths = new List<int>();
-            IList<int> startOffsets = new List<int>();
-            IList<int> endOffsets = new List<int>();
+            IList<string> tokens = new JCG.List<string>();
+            IList<string> types = new JCG.List<string>();
+            IList<int> positions = new JCG.List<int>();
+            IList<int> positionLengths = new JCG.List<int>();
+            IList<int> startOffsets = new JCG.List<int>();
+            IList<int> endOffsets = new JCG.List<int>();
 
             int remainder = random.Next(10);
             TextReader reader = new StringReader(text);
diff --git a/src/Lucene.Net.TestFramework/Analysis/LookaheadTokenFilter.cs b/src/Lucene.Net.TestFramework/Analysis/LookaheadTokenFilter.cs
index d5d0fb7..3eb4bad 100644
--- a/src/Lucene.Net.TestFramework/Analysis/LookaheadTokenFilter.cs
+++ b/src/Lucene.Net.TestFramework/Analysis/LookaheadTokenFilter.cs
@@ -1,8 +1,9 @@
-using Lucene.Net.Analysis.TokenAttributes;
+using Lucene.Net.Analysis.TokenAttributes;
 using Lucene.Net.Diagnostics;
 using Lucene.Net.Util;
 using System.Collections.Generic;
 using Console = Lucene.Net.Util.SystemConsole;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Analysis
 {
@@ -52,7 +53,7 @@ namespace Lucene.Net.Analysis
         public class Position : RollingBuffer.IResettable
         {
             // Buffered input tokens at this position:
-            public IList<AttributeSource.State> InputTokens { get; private set; } = new List<AttributeSource.State>();
+            public IList<AttributeSource.State> InputTokens { get; private set; } = new JCG.List<AttributeSource.State>();
 
             // Next buffered token to be returned to consumer:
             public int NextRead { get; set; }
diff --git a/src/Lucene.Net.TestFramework/Codecs/MockRandom/MockRandomPostingsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/MockRandom/MockRandomPostingsFormat.cs
index 7c9628c..15deb78 100644
--- a/src/Lucene.Net.TestFramework/Codecs/MockRandom/MockRandomPostingsFormat.cs
+++ b/src/Lucene.Net.TestFramework/Codecs/MockRandom/MockRandomPostingsFormat.cs
@@ -13,6 +13,7 @@ using RandomizedTesting.Generators;
 using System;
 using System.Collections.Generic;
 using Console = Lucene.Net.Util.SystemConsole;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Codecs.MockRandom
 {
@@ -73,7 +74,7 @@ namespace Lucene.Net.Codecs.MockRandom
         private class MockInt32StreamFactory : Int32StreamFactory
         {
             private readonly int salt;
-            private readonly IList<Int32StreamFactory> delegates = new List<Int32StreamFactory>();
+            private readonly IList<Int32StreamFactory> delegates = new JCG.List<Int32StreamFactory>();
 
             public MockInt32StreamFactory(Random random)
             {
diff --git a/src/Lucene.Net.TestFramework/Codecs/RAMOnly/RAMOnlyPostingsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/RAMOnly/RAMOnlyPostingsFormat.cs
index c5dc4f5..6e1598c 100644
--- a/src/Lucene.Net.TestFramework/Codecs/RAMOnly/RAMOnlyPostingsFormat.cs
+++ b/src/Lucene.Net.TestFramework/Codecs/RAMOnly/RAMOnlyPostingsFormat.cs
@@ -188,7 +188,7 @@ namespace Lucene.Net.Codecs.RAMOnly
         {
             internal readonly string term;
             internal long totalTermFreq;
-            internal readonly IList<RAMDoc> docs = new List<RAMDoc>();
+            internal readonly IList<RAMDoc> docs = new JCG.List<RAMDoc>();
 
             public RAMTerm(string term)
             {
diff --git a/src/Lucene.Net.TestFramework/Index/BaseCompressingDocValuesFormatTestCase.cs b/src/Lucene.Net.TestFramework/Index/BaseCompressingDocValuesFormatTestCase.cs
index 40e7ffd..4292f26 100644
--- a/src/Lucene.Net.TestFramework/Index/BaseCompressingDocValuesFormatTestCase.cs
+++ b/src/Lucene.Net.TestFramework/Index/BaseCompressingDocValuesFormatTestCase.cs
@@ -5,6 +5,7 @@ using Lucene.Net.Util;
 using Lucene.Net.Util.Packed;
 using System.Collections.Generic;
 using RandomizedTesting.Generators;
+using JCG = J2N.Collections.Generic;
 
 #if TESTFRAMEWORK_MSTEST
 using Test = Microsoft.VisualStudio.TestTools.UnitTesting.TestMethodAttribute;
@@ -65,7 +66,7 @@ namespace Lucene.Net.Index
             using Directory dir = new RAMDirectory();
             using IndexWriter iwriter = new IndexWriter(dir, iwc);
             int uniqueValueCount = TestUtil.NextInt32(Random, 1, 256);
-            IList<long> values = new List<long>();
+            IList<long> values = new JCG.List<long>();
 
             Document doc = new Document();
             NumericDocValuesField dvf = new NumericDocValuesField("dv", 0);
diff --git a/src/Lucene.Net.TestFramework/Index/BaseDocValuesFormatTestCase.cs b/src/Lucene.Net.TestFramework/Index/BaseDocValuesFormatTestCase.cs
index e04602f..67dfac5 100644
--- a/src/Lucene.Net.TestFramework/Index/BaseDocValuesFormatTestCase.cs
+++ b/src/Lucene.Net.TestFramework/Index/BaseDocValuesFormatTestCase.cs
@@ -2419,7 +2419,7 @@ namespace Lucene.Net.Index
                     }
 
                     // add in any order to the dv field
-                    IList<string> unordered = new List<string>(values);
+                    IList<string> unordered = new JCG.List<string>(values);
                     unordered.Shuffle(Random);
                     foreach (string v in unordered)
                     {
@@ -2662,14 +2662,14 @@ namespace Lucene.Net.Index
                 }
                 int numValues = Random.Next(17);
                 // create a random list of strings
-                IList<string> values = new List<string>();
+                IList<string> values = new JCG.List<string>();
                 for (int v = 0; v < numValues; v++)
                 {
                     values.Add(TestUtil.RandomSimpleString(Random, length));
                 }
 
                 // add in any order to the indexed field
-                IList<string> unordered = new List<string>(values);
+                IList<string> unordered = new JCG.List<string>(values);
                 unordered.Shuffle(Random);
                 foreach (string v in unordered)
                 {
@@ -2677,7 +2677,7 @@ namespace Lucene.Net.Index
                 }
 
                 // add in any order to the dv field
-                IList<string> unordered2 = new List<string>(values);
+                IList<string> unordered2 = new JCG.List<string>(values);
                 unordered2.Shuffle(Random);
                 foreach (string v in unordered2)
                 {
@@ -3034,7 +3034,7 @@ namespace Lucene.Net.Index
                 {
                     numDocs = TestUtil.NextInt32(Random, 100, 200);
                 }
-                var docBytes = new List<byte[]>();
+                var docBytes = new JCG.List<byte[]>();
                 DirectoryReader r = null;
                 try
                 {
@@ -3178,7 +3178,7 @@ namespace Lucene.Net.Index
             {
                 numDocs = TestUtil.NextInt32(Random, 100, 200);
             }
-            var docBytes = new List<byte[]>();
+            var docBytes = new JCG.List<byte[]>();
             DirectoryReader r = null;
             try
             {
diff --git a/src/Lucene.Net.TestFramework/Index/BasePostingsFormatTestCase.cs b/src/Lucene.Net.TestFramework/Index/BasePostingsFormatTestCase.cs
index 5927d6e..e177ec8 100644
--- a/src/Lucene.Net.TestFramework/Index/BasePostingsFormatTestCase.cs
+++ b/src/Lucene.Net.TestFramework/Index/BasePostingsFormatTestCase.cs
@@ -365,7 +365,7 @@ namespace Lucene.Net.Index
         }
 
 //#if TESTFRAMEWORK_MSTEST
-//        private static readonly IList<string> initalizationLock = new List<string>();
+//        private static readonly IList<string> initalizationLock = new JCG.List<string>();
 
 //        // LUCENENET TODO: Add support for attribute inheritance when it is released (2.0.0)
 //        //[Microsoft.VisualStudio.TestTools.UnitTesting.ClassInitialize(Microsoft.VisualStudio.TestTools.UnitTesting.InheritanceBehavior.BeforeEachDerivedClass)]
@@ -500,7 +500,7 @@ namespace Lucene.Net.Index
                 }
             }
 
-            allTerms = new List<FieldAndTerm>();
+            allTerms = new JCG.List<FieldAndTerm>();
             foreach (KeyValuePair<string, JCG.SortedDictionary<BytesRef, long>> fieldEnt in fields)
             {
                 string field = fieldEnt.Key;
@@ -1187,8 +1187,8 @@ namespace Lucene.Net.Index
             ThreadState threadState = new ThreadState();
 
             // Test random terms/fields:
-            IList<TermState> termStates = new List<TermState>();
-            IList<FieldAndTerm> termStateTerms = new List<FieldAndTerm>();
+            IList<TermState> termStates = new JCG.List<TermState>();
+            IList<FieldAndTerm> termStateTerms = new JCG.List<FieldAndTerm>();
 
             allTerms.Shuffle(Random);
             int upto = 0;
diff --git a/src/Lucene.Net.TestFramework/Index/BaseStoredFieldsFormatTestCase.cs b/src/Lucene.Net.TestFramework/Index/BaseStoredFieldsFormatTestCase.cs
index f827b40..307c354 100644
--- a/src/Lucene.Net.TestFramework/Index/BaseStoredFieldsFormatTestCase.cs
+++ b/src/Lucene.Net.TestFramework/Index/BaseStoredFieldsFormatTestCase.cs
@@ -88,7 +88,7 @@ namespace Lucene.Net.Index
             int docCount = AtLeast(200);
             int fieldCount = TestUtil.NextInt32(rand, 1, 5);
 
-            IList<int?> fieldIDs = new List<int?>();
+            IList<int?> fieldIDs = new JCG.List<int?>();
 
             FieldType customType = new FieldType(TextField.TYPE_STORED);
             customType.IsTokenized = false;
@@ -499,7 +499,7 @@ namespace Lucene.Net.Index
                 IndexSearcher searcher = new IndexSearcher(rd);
                 int concurrentReads = AtLeast(5);
                 int readsPerThread = AtLeast(50);
-                IList<ThreadJob> readThreads = new List<ThreadJob>();
+                IList<ThreadJob> readThreads = new JCG.List<ThreadJob>();
 
                 for (int i = 0; i < concurrentReads; ++i)
                 {
diff --git a/src/Lucene.Net.TestFramework/Index/BaseTermVectorsFormatTestCase.cs b/src/Lucene.Net.TestFramework/Index/BaseTermVectorsFormatTestCase.cs
index 9b1d663..50f14db 100644
--- a/src/Lucene.Net.TestFramework/Index/BaseTermVectorsFormatTestCase.cs
+++ b/src/Lucene.Net.TestFramework/Index/BaseTermVectorsFormatTestCase.cs
@@ -155,7 +155,7 @@ namespace Lucene.Net.Index
 
         protected virtual Options RandomOptions()
         {
-            return RandomPicks.RandomFrom(Random, new List<Options>(ValidOptions()));
+            return RandomPicks.RandomFrom(Random, new JCG.List<Options>(ValidOptions()));
         }
 
         protected virtual FieldType FieldType(Options options)
diff --git a/src/Lucene.Net.TestFramework/Index/ThreadedIndexingAndSearchingTestCase.cs b/src/Lucene.Net.TestFramework/Index/ThreadedIndexingAndSearchingTestCase.cs
index 2f92e96..9cb64ef 100644
--- a/src/Lucene.Net.TestFramework/Index/ThreadedIndexingAndSearchingTestCase.cs
+++ b/src/Lucene.Net.TestFramework/Index/ThreadedIndexingAndSearchingTestCase.cs
@@ -20,6 +20,7 @@ using System.Threading;
 using System.Threading.Tasks;
 using Console = Lucene.Net.Util.SystemConsole;
 using Directory = Lucene.Net.Store.Directory;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Index
 {
@@ -174,8 +175,8 @@ namespace Lucene.Net.Index
             public override void Run()
             {
                 // TODO: would be better if this were cross thread, so that we make sure one thread deleting anothers added docs works:
-                IList<string> toDeleteIDs = new List<string>();
-                IList<SubDocs> toDeleteSubDocs = new List<SubDocs>();
+                IList<string> toDeleteIDs = new JCG.List<string>();
+                IList<SubDocs> toDeleteSubDocs = new JCG.List<SubDocs>();
                 while (J2N.Time.NanoTime() / J2N.Time.MillisecondsPerNanosecond < stopTime && !outerInstance.m_failed) // LUCENENET: Use NanoTime() rather than CurrentTimeMilliseconds() for more accurate/reliable results
                 {
                     try
@@ -245,9 +246,9 @@ namespace Lucene.Net.Index
                                 }
 
                                 Field packIDField = NewStringField("packID", packID, Field.Store.YES);
-                                IList<string> docIDs = new List<string>();
+                                IList<string> docIDs = new JCG.List<string>();
                                 SubDocs subDocs = new SubDocs(packID, docIDs);
-                                IList<Document> docsList = new List<Document>();
+                                IList<Document> docsList = new JCG.List<Document>();
 
                                 allSubDocs.Enqueue(subDocs);
                                 doc.Add(packIDField);
diff --git a/src/Lucene.Net.TestFramework/Search/AssertingScorer.cs b/src/Lucene.Net.TestFramework/Search/AssertingScorer.cs
index 3dc951a..67ea723 100644
--- a/src/Lucene.Net.TestFramework/Search/AssertingScorer.cs
+++ b/src/Lucene.Net.TestFramework/Search/AssertingScorer.cs
@@ -1,9 +1,10 @@
-using Lucene.Net.Diagnostics;
+using Lucene.Net.Diagnostics;
 using Lucene.Net.Index;
 using Lucene.Net.Support;
 using System;
 using System.Collections.Generic;
 using System.Runtime.CompilerServices;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Search
 {
@@ -118,7 +119,7 @@ namespace Lucene.Net.Search
             // collectors (e.g. ToParentBlockJoinCollector) that
             // need to walk the scorer tree will miss/skip the
             // Scorer we wrap:
-            return new List<ChildScorer>() { new ChildScorer(@in, "SHOULD") };
+            return new JCG.List<ChildScorer>() { new ChildScorer(@in, "SHOULD") };
         }
 
         public override int Freq
diff --git a/src/Lucene.Net.TestFramework/Search/RandomSimilarityProvider.cs b/src/Lucene.Net.TestFramework/Search/RandomSimilarityProvider.cs
index 2cf40a2..c26aca2 100644
--- a/src/Lucene.Net.TestFramework/Search/RandomSimilarityProvider.cs
+++ b/src/Lucene.Net.TestFramework/Search/RandomSimilarityProvider.cs
@@ -6,6 +6,7 @@ using RandomizedTesting.Generators;
 using System;
 using System.Collections.Generic;
 using System.Text;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Search
 {
@@ -47,7 +48,7 @@ namespace Lucene.Net.Search
             perFieldSeed = random.Next();
             coordType = random.Next(3);
             shouldQueryNorm = random.NextBoolean();
-            knownSims = new List<Similarity>(allSims);
+            knownSims = new JCG.List<Similarity>(allSims);
             knownSims.Shuffle(random);
         }
 
@@ -113,7 +114,7 @@ namespace Lucene.Net.Search
 
         private static IList<Similarity> LoadAllSims() // LUCENENET: Avoid static constructors (see https://github.com/apache/lucenenet/pull/224#issuecomment-469284006)
         {
-            var allSims = new List<Similarity>();
+            var allSims = new JCG.List<Similarity>();
             allSims.Add(new DefaultSimilarity());
             allSims.Add(new BM25Similarity());
             foreach (BasicModel basicModel in BASIC_MODELS)
diff --git a/src/Lucene.Net.TestFramework/Search/SearchEquivalenceTestBase.cs b/src/Lucene.Net.TestFramework/Search/SearchEquivalenceTestBase.cs
index c846fda..c185344 100644
--- a/src/Lucene.Net.TestFramework/Search/SearchEquivalenceTestBase.cs
+++ b/src/Lucene.Net.TestFramework/Search/SearchEquivalenceTestBase.cs
@@ -58,7 +58,7 @@ namespace Lucene.Net.Search
         protected static string m_stopword; // we always pick a character as a stopword
 
 //#if TESTFRAMEWORK_MSTEST
-//        private static readonly IList<string> initalizationLock = new List<string>();
+//        private static readonly IList<string> initalizationLock = new JCG.List<string>();
 
 //        // LUCENENET TODO: Add support for attribute inheritance when it is released (2.0.0)
 //        //[Microsoft.VisualStudio.TestTools.UnitTesting.ClassInitialize(Microsoft.VisualStudio.TestTools.UnitTesting.InheritanceBehavior.BeforeEachDerivedClass)]
diff --git a/src/Lucene.Net.TestFramework/Store/BaseDirectoryTestCase.cs b/src/Lucene.Net.TestFramework/Store/BaseDirectoryTestCase.cs
index a45097d..552fe88 100644
--- a/src/Lucene.Net.TestFramework/Store/BaseDirectoryTestCase.cs
+++ b/src/Lucene.Net.TestFramework/Store/BaseDirectoryTestCase.cs
@@ -1321,7 +1321,7 @@ namespace Lucene.Net.Store
         //{
         //    using (Directory dir = GetDirectory(CreateTempDir()))
         //    {
-        //        IList<string> names = new List<string>();
+        //        IList<string> names = new JCG.List<string>();
         //        int iters = AtLeast(50);
         //        for (int iter = 0; iter < iters; iter++)
         //        {
@@ -1343,7 +1343,7 @@ namespace Lucene.Net.Store
         //            .Where(file => !ExtraFS.IsExtra(file)) // remove any ExtrasFS stuff.
         //            .ToList();
 
-        //        assertEquals(new List<string>(names), files);
+        //        assertEquals(new JCG.List<string>(names), files);
         //    }
         //}
 
diff --git a/src/Lucene.Net.TestFramework/Store/MockDirectoryWrapper.cs b/src/Lucene.Net.TestFramework/Store/MockDirectoryWrapper.cs
index aef6c3b..ead37aa 100644
--- a/src/Lucene.Net.TestFramework/Store/MockDirectoryWrapper.cs
+++ b/src/Lucene.Net.TestFramework/Store/MockDirectoryWrapper.cs
@@ -1045,7 +1045,7 @@ namespace Lucene.Net.Store
 
                                 if (!Arrays.Equals(startFiles, endFiles))
                                 {
-                                    IList<string> removed = new List<string>();
+                                    IList<string> removed = new JCG.List<string>();
                                     foreach (string fileName in startFiles)
                                     {
                                         if (!endSet.Contains(fileName))
@@ -1054,7 +1054,7 @@ namespace Lucene.Net.Store
                                         }
                                     }
 
-                                    IList<string> added = new List<string>();
+                                    IList<string> added = new JCG.List<string>();
                                     foreach (string fileName in endFiles)
                                     {
                                         if (!startSet.Contains(fileName))
@@ -1166,7 +1166,7 @@ namespace Lucene.Net.Store
 
         // LUCENENET specific - de-nested Failure
 
-        internal List<Failure> failures;
+        internal JCG.List<Failure> failures;
 
         /// <summary>
         /// Add a <see cref="Failure"/> object to the list of objects to be evaluated
@@ -1179,7 +1179,7 @@ namespace Lucene.Net.Store
             {
                 if (failures == null)
                 {
-                    failures = new List<Failure>();
+                    failures = new JCG.List<Failure>();
                 }
                 failures.Add(fail);
             }
diff --git a/src/Lucene.Net.TestFramework/Support/JavaCompatibility/SystemTypesHelpers.cs b/src/Lucene.Net.TestFramework/Support/JavaCompatibility/SystemTypesHelpers.cs
index 599913d..cc77b85 100644
--- a/src/Lucene.Net.TestFramework/Support/JavaCompatibility/SystemTypesHelpers.cs
+++ b/src/Lucene.Net.TestFramework/Support/JavaCompatibility/SystemTypesHelpers.cs
@@ -9,6 +9,7 @@ using System.Linq;
 using System.Text;
 using System.Text.RegularExpressions;
 using Console = Lucene.Net.Util.SystemConsole;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net
 {
@@ -257,7 +258,7 @@ namespace Lucene.Net
             }
             else
             {
-                List<T> toRemove = new List<T>();
+                JCG.List<T> toRemove = new JCG.List<T>();
 
                 foreach (var item in s)
                 {
diff --git a/src/Lucene.Net.TestFramework/Support/SynchronizedList.cs b/src/Lucene.Net.TestFramework/Support/SynchronizedList.cs
index d7acccc..76d3a5f 100644
--- a/src/Lucene.Net.TestFramework/Support/SynchronizedList.cs
+++ b/src/Lucene.Net.TestFramework/Support/SynchronizedList.cs
@@ -1,6 +1,7 @@
-using System.Collections;
+using System.Collections;
 using System.Collections.Generic;
 using System.Threading;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Index
 {
@@ -23,7 +24,7 @@ namespace Lucene.Net.Index
 
     internal class SynchronizedList<T> : IList<T>
     {
-        private readonly List<T> _list = new List<T>();
+        private readonly JCG.List<T> _list = new JCG.List<T>();
 
         private readonly ReaderWriterLockSlim _lock = new ReaderWriterLockSlim();
 
diff --git a/src/Lucene.Net.TestFramework/Util/Automaton/AutomatonTestUtil.cs b/src/Lucene.Net.TestFramework/Util/Automaton/AutomatonTestUtil.cs
index 812f984..4113e76 100644
--- a/src/Lucene.Net.TestFramework/Util/Automaton/AutomatonTestUtil.cs
+++ b/src/Lucene.Net.TestFramework/Util/Automaton/AutomatonTestUtil.cs
@@ -452,7 +452,7 @@ namespace Lucene.Net.Util.Automaton
                     Transition t = s.TransitionsArray[i];
                     if (!allArriving.TryGetValue(t.to, out IList<ArrivingTransition> tl) || tl == null)
                     {
-                        tl = new List<ArrivingTransition>();
+                        tl = new JCG.List<ArrivingTransition>();
                         allArriving[t.to] = tl;
                     }
                     tl.Add(new ArrivingTransition(s, t));
@@ -487,7 +487,7 @@ namespace Lucene.Net.Util.Automaton
 
         public int[] GetRandomAcceptedString(Random r)
         {
-            List<int> soFar = new List<int>();
+            JCG.List<int> soFar = new JCG.List<int>();
             if (a.IsSingleton)
             {
                 // accepts only one
@@ -535,7 +535,7 @@ namespace Lucene.Net.Util.Automaton
                     {
                         // pick a transition that we know is the fastest
                         // path to an accept state
-                        IList<Transition> toAccept = new List<Transition>();
+                        IList<Transition> toAccept = new JCG.List<Transition>();
                         for (int i = 0; i < s.numTransitions; i++)
                         {
                             Transition t0 = s.TransitionsArray[i];
diff --git a/src/Lucene.Net.TestFramework/Util/Fst/FSTTester.cs b/src/Lucene.Net.TestFramework/Util/Fst/FSTTester.cs
index 1653463..edd90b3 100644
--- a/src/Lucene.Net.TestFramework/Util/Fst/FSTTester.cs
+++ b/src/Lucene.Net.TestFramework/Util/Fst/FSTTester.cs
@@ -266,7 +266,7 @@ namespace Lucene.Net.Util.Fst
         {
             FST.Arc<T> arc = fst.GetFirstArc(new FST.Arc<T>());
 
-            IList<FST.Arc<T>> arcs = new List<FST.Arc<T>>();
+            IList<FST.Arc<T>> arcs = new JCG.List<FST.Arc<T>>();
             @in.Length = 0;
             @in.Offset = 0;
             T NO_OUTPUT = fst.Outputs.NoOutput;
diff --git a/src/Lucene.Net.TestFramework/Util/LuceneTestCase.cs b/src/Lucene.Net.TestFramework/Util/LuceneTestCase.cs
index f803a3a..33f0b37 100644
--- a/src/Lucene.Net.TestFramework/Util/LuceneTestCase.cs
+++ b/src/Lucene.Net.TestFramework/Util/LuceneTestCase.cs
@@ -756,7 +756,7 @@ namespace Lucene.Net.Util
 
         private static IList<string> LoadCoreDirectories()
         {
-            return new List<string>(FS_DIRECTORIES)
+            return new JCG.List<string>(FS_DIRECTORIES)
             {
                 "RAMDirectory"
             };
@@ -1036,7 +1036,7 @@ namespace Lucene.Net.Util
         }
 
 #if TESTFRAMEWORK_MSTEST
-        private static readonly IList<string> initalizationLock = new List<string>();
+        private static readonly IList<string> initalizationLock = new JCG.List<string>();
         private static string _testClassName = string.Empty;
         private static string _testName = string.Empty;
         private static Type _testClassType;
@@ -2979,7 +2979,7 @@ namespace Lucene.Net.Util
 
             rightEnum = rightTerms.GetEnumerator(rightEnum);
 
-            IList<BytesRef> shuffledTests = new List<BytesRef>(tests);
+            IList<BytesRef> shuffledTests = new JCG.List<BytesRef>(tests);
             shuffledTests.Shuffle(Random);
 
             foreach (BytesRef b in shuffledTests)
diff --git a/src/Lucene.Net.TestFramework/Util/RunListenerPrintReproduceInfo.cs b/src/Lucene.Net.TestFramework/Util/RunListenerPrintReproduceInfo.cs
index a74ba72..60fd9b6 100644
--- a/src/Lucene.Net.TestFramework/Util/RunListenerPrintReproduceInfo.cs
+++ b/src/Lucene.Net.TestFramework/Util/RunListenerPrintReproduceInfo.cs
@@ -1,4 +1,4 @@
-#if TESTFRAMEWORK
+#if TESTFRAMEWORK
 // LUCENENET NOTE: This is incomplete
 using System;
 using System.Collections.Generic;
@@ -49,7 +49,7 @@ namespace Lucene.Net.Util
       /// A list of all test suite classes executed so far in this JVM (ehm, 
       /// under this class's classloader).
       /// </summary>
-      private static IList<string> TestClassesRun = new List<string>();
+      private static IList<string> TestClassesRun = new JCG.List<string>();
 
       /// <summary>
       /// The currently executing scope.
diff --git a/src/Lucene.Net.TestFramework/Util/TestRuleMarkFailure.cs b/src/Lucene.Net.TestFramework/Util/TestRuleMarkFailure.cs
index 5aa27e6..3eaf337 100644
--- a/src/Lucene.Net.TestFramework/Util/TestRuleMarkFailure.cs
+++ b/src/Lucene.Net.TestFramework/Util/TestRuleMarkFailure.cs
@@ -1,4 +1,4 @@
-#if TESTFRAMEWORK
+#if TESTFRAMEWORK
 // LUCENENET NOTE: This is incomplete
 using System;
 using System.Collections.Generic;
@@ -100,7 +100,7 @@ namespace Lucene.Net.Util
       /// </summary>
       private static IList<Exception> ExpandFromMultiple(Exception t)
       {
-        return ExpandFromMultiple(t, new List<Exception>());
+        return ExpandFromMultiple(t, new JCG.List<Exception>());
       }
 
       /// <summary>
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/CharFilters/TestMappingCharFilter.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/CharFilters/TestMappingCharFilter.cs
index ea6dfe3..b6b6cea 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/CharFilters/TestMappingCharFilter.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/CharFilters/TestMappingCharFilter.cs
@@ -1,4 +1,4 @@
-// Lucene version compatibility level 4.8.1
+// Lucene version compatibility level 4.8.1
 using Lucene.Net.Diagnostics;
 using Lucene.Net.Util;
 using NUnit.Framework;
@@ -344,7 +344,7 @@ namespace Lucene.Net.Analysis.CharFilters
                     StringBuilder output = new StringBuilder();
 
                     // Maps output offset to input offset:
-                    IList<int?> inputOffsets = new List<int?>();
+                    IList<int?> inputOffsets = new JCG.List<int?>();
 
                     int cumDiff = 0;
                     int charIdx = 0;
@@ -446,7 +446,7 @@ namespace Lucene.Net.Analysis.CharFilters
 
                     MappingCharFilter mapFilter = new MappingCharFilter(charMap, new StringReader(content));
                     StringBuilder actualBuilder = new StringBuilder();
-                    IList<int?> actualInputOffsets = new List<int?>();
+                    IList<int?> actualInputOffsets = new JCG.List<int?>();
 
                     // Now consume the actual mapFilter, somewhat randomly:
                     while (true)
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestRandomChains.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestRandomChains.cs
index acf341a..72d80e6 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestRandomChains.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestRandomChains.cs
@@ -60,9 +60,9 @@ namespace Lucene.Net.Analysis.Core
     public class TestRandomChains : BaseTokenStreamTestCase
     {
 
-        internal static List<ConstructorInfo> tokenizers;
-        internal static List<ConstructorInfo> tokenfilters;
-        internal static List<ConstructorInfo> charfilters;
+        internal static IList<ConstructorInfo> tokenizers;
+        internal static IList<ConstructorInfo> tokenfilters;
+        internal static IList<ConstructorInfo> charfilters;
 
         private interface IPredicate<T>
         {
@@ -207,9 +207,9 @@ namespace Lucene.Net.Analysis.Core
                         && (typeInfo.IsSubclassOf(typeof(Tokenizer)) || typeInfo.IsSubclassOf(typeof(TokenFilter)) || typeInfo.IsSubclassOf(typeof(CharFilter)));
                 })
                 .ToArray();
-            tokenizers = new List<ConstructorInfo>();
-            tokenfilters = new List<ConstructorInfo>();
-            charfilters = new List<ConstructorInfo>();
+            tokenizers = new JCG.List<ConstructorInfo>();
+            tokenfilters = new JCG.List<ConstructorInfo>();
+            charfilters = new JCG.List<ConstructorInfo>();
             foreach (Type c in analysisClasses)
             {
                 foreach (ConstructorInfo ctor in c.GetConstructors())
@@ -447,7 +447,7 @@ namespace Lucene.Net.Analysis.Core
             public object Create(Random random)
             {
                 // CapitalizationFilter
-                ICollection<char[]> col = new List<char[]>();
+                ICollection<char[]> col = new JCG.List<char[]>();
                 int num = random.nextInt(5);
                 for (int i = 0; i < num; i++)
                 {
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestStopFilter.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestStopFilter.cs
index 2acebb7..f450fb5 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestStopFilter.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestStopFilter.cs
@@ -1,13 +1,13 @@
-// Lucene version compatibility level 4.8.1
+// Lucene version compatibility level 4.8.1
 using Lucene.Net.Analysis.TokenAttributes;
 using Lucene.Net.Analysis.Util;
 using Lucene.Net.Util;
 using NUnit.Framework;
 using System;
-using System.Collections.Generic;
 using System.IO;
 using System.Text;
 using Console = Lucene.Net.Util.SystemConsole;
+using JCG = J2N.Collections.Generic;
 using Version = Lucene.Net.Util.LuceneVersion;
 
 namespace Lucene.Net.Analysis.Core
@@ -60,7 +60,7 @@ namespace Lucene.Net.Analysis.Core
         public virtual void TestStopPositons()
         {
             StringBuilder sb = new StringBuilder();
-            List<string> a = new List<string>();
+            JCG.List<string> a = new JCG.List<string>();
             for (int i = 0; i < 20; i++)
             {
                 string w = English.Int32ToEnglish(i).Trim();
@@ -88,8 +88,8 @@ namespace Lucene.Net.Analysis.Core
 #pragma warning restore 612, 618
             DoTestStopPositons(stpf, false);
             // with increments, concatenating two stop filters
-            List<string> a0 = new List<string>();
-            List<string> a1 = new List<string>();
+            JCG.List<string> a0 = new JCG.List<string>();
+            JCG.List<string> a1 = new JCG.List<string>();
             for (int i = 0; i < a.Count; i++)
             {
                 if (i % 2 == 0)
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestUAX29URLEmailTokenizer.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestUAX29URLEmailTokenizer.cs
index 8ec637c..bca6cdd 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestUAX29URLEmailTokenizer.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestUAX29URLEmailTokenizer.cs
@@ -1,4 +1,4 @@
-// Lucene version compatibility level 4.8.1
+// Lucene version compatibility level 4.8.1
 using Lucene.Net.Analysis.Standard;
 using Lucene.Net.Analysis.TokenAttributes;
 using Lucene.Net.Support;
@@ -9,6 +9,7 @@ using System.Collections.Generic;
 using System.IO;
 using System.Linq;
 using System.Text;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Analysis.Core
 {
@@ -310,7 +311,7 @@ namespace Lucene.Net.Analysis.Core
             string[] urls;
             try
             {
-                IList<string> urlList = new List<string>();
+                IList<string> urlList = new JCG.List<string>();
                 bufferedReader = new StreamReader(this.GetType().getResourceAsStream("LuceneResourcesWikiPageURLs.txt"), Encoding.UTF8);
                 string line;
                 while (null != (line = bufferedReader.ReadLine()))
@@ -365,7 +366,7 @@ namespace Lucene.Net.Analysis.Core
             string[] emails;
             try
             {
-                IList<string> emailList = new List<string>();
+                IList<string> emailList = new JCG.List<string>();
                 bufferedReader = new System.IO.StreamReader(this.GetType().getResourceAsStream("email.addresses.from.random.text.with.email.addresses.txt"), Encoding.UTF8);
                 string line;
                 while (null != (line = bufferedReader.ReadLine()))
@@ -429,7 +430,7 @@ namespace Lucene.Net.Analysis.Core
             string[] urls;
             try
             {
-                IList<string> urlList = new List<string>();
+                IList<string> urlList = new JCG.List<string>();
                 bufferedReader = new System.IO.StreamReader(this.GetType().getResourceAsStream("urls.from.random.text.with.urls.txt"), Encoding.UTF8);
                 string line;
                 while (null != (line = bufferedReader.ReadLine()))
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Miscellaneous/TestASCIIFoldingFilter.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Miscellaneous/TestASCIIFoldingFilter.cs
index e08625f..2c2dc16 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Miscellaneous/TestASCIIFoldingFilter.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Miscellaneous/TestASCIIFoldingFilter.cs
@@ -7,6 +7,7 @@ using System;
 using System.Collections.Generic;
 using System.IO;
 using System.Text;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Analysis.Miscellaneous
 {
@@ -189,8 +190,8 @@ namespace Lucene.Net.Analysis.Miscellaneous
             string[] foldings = new string[] { "À" + "Á" + "Â" + "Ã" + "Ä" + "Å" + "Ā" + "Ă" + "Ą" + "Ə" + "Ǎ" + "Ǟ" + "Ǡ" + "Ǻ" + "Ȁ" + "Ȃ" + "Ȧ" + "Ⱥ" + "ᴀ" + "Ḁ" + "Ạ" + "Ả" + "Ấ" + "Ầ" + "Ẩ" + "Ẫ" + "Ậ" + "Ắ" + "Ằ" + "Ẳ" + "Ẵ" + "Ặ" + "Ⓐ" + "A", "A", "à" + "á" + "â" + "ã" + "ä" + "å" + "ā" + "ă" + "ą" + "ǎ" + "ǟ" + "ǡ" + "ǻ" + "ȁ" + "ȃ" + "ȧ" + "ɐ" + "ə" + "ɚ" + "ᶏ" + "ḁ" + "ᶕ" + "ẚ" + "ạ" + "ả" + "ấ" + "ầ" + "ẩ" + "ẫ" + "ậ" + "ắ" + "ằ" + "ẳ" + "ẵ" + "ặ" + "ₐ" + "ₔ" + "ⓐ" + "ⱥ" + "Ɐ" [...]
 
             // Construct input text and expected output tokens
-            IList<string> expectedUnfoldedTokens = new List<string>();
-            IList<string> expectedFoldedTokens = new List<string>();
+            IList<string> expectedUnfoldedTokens = new JCG.List<string>();
+            IList<string> expectedFoldedTokens = new JCG.List<string>();
             StringBuilder inputText = new StringBuilder();
             for (int n = 0; n < foldings.Length; n += 2)
             {
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Miscellaneous/TestCapitalizationFilter.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Miscellaneous/TestCapitalizationFilter.cs
index a7415aa..4254742 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Miscellaneous/TestCapitalizationFilter.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Miscellaneous/TestCapitalizationFilter.cs
@@ -1,4 +1,4 @@
-// Lucene version compatibility level 4.8.1
+// Lucene version compatibility level 4.8.1
 using Lucene.Net.Analysis.Core;
 using Lucene.Net.Analysis.Util;
 using NUnit.Framework;
@@ -6,6 +6,7 @@ using System;
 using System.Collections.Generic;
 using System.Globalization;
 using System.IO;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Analysis.Miscellaneous
 {
@@ -60,7 +61,7 @@ namespace Lucene.Net.Analysis.Miscellaneous
             AssertCapitalizesTo("McKinley", new string[] { "Mckinley" }, true, keep, true, null, 0, CapitalizationFilter.DEFAULT_MAX_WORD_COUNT, CapitalizationFilter.DEFAULT_MAX_TOKEN_LENGTH);
 
             // Now try some prefixes
-            IList<char[]> okPrefix = new List<char[]>();
+            IList<char[]> okPrefix = new JCG.List<char[]>();
             okPrefix.Add("McK".ToCharArray());
 
             AssertCapitalizesTo("McKinley", new string[] { "McKinley" }, true, keep, true, okPrefix, 0, CapitalizationFilter.DEFAULT_MAX_WORD_COUNT, CapitalizationFilter.DEFAULT_MAX_TOKEN_LENGTH);
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Miscellaneous/TestStemmerOverrideFilter.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Miscellaneous/TestStemmerOverrideFilter.cs
index fd13a30..a5f900f 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Miscellaneous/TestStemmerOverrideFilter.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Miscellaneous/TestStemmerOverrideFilter.cs
@@ -1,4 +1,4 @@
-// Lucene version compatibility level 4.8.1
+// Lucene version compatibility level 4.8.1
 using J2N;
 using J2N.Collections.Generic.Extensions;
 using J2N.Text;
@@ -10,6 +10,7 @@ using System.Collections.Generic;
 using System.IO;
 using System.Linq;
 using System.Text;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Analysis.Miscellaneous
 {
@@ -100,7 +101,7 @@ namespace Lucene.Net.Analysis.Miscellaneous
             StemmerOverrideFilter.Builder builder = new StemmerOverrideFilter.Builder(Random.nextBoolean());
             IDictionary<string, string> entrySet = map;
             StringBuilder input = new StringBuilder();
-            IList<string> output = new List<string>();
+            IList<string> output = new JCG.List<string>();
             foreach (KeyValuePair<string, string> entry in entrySet)
             {
                 builder.Add(entry.Key, entry.Value);
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Pattern/TestPatternTokenizer.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Pattern/TestPatternTokenizer.cs
index 871796b..ed4391d 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Pattern/TestPatternTokenizer.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Pattern/TestPatternTokenizer.cs
@@ -1,4 +1,4 @@
-// Lucene version compatibility level 4.8.1
+// Lucene version compatibility level 4.8.1
 using Lucene.Net.Analysis.CharFilters;
 using Lucene.Net.Analysis.TokenAttributes;
 using NUnit.Framework;
@@ -7,6 +7,7 @@ using System.Globalization;
 using System.IO;
 using System.Text;
 using System.Text.RegularExpressions;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Analysis.Pattern
 {
@@ -73,7 +74,7 @@ namespace Lucene.Net.Analysis.Pattern
             const string INPUT = "G&uuml;nther G&uuml;nther is here";
 
             // create MappingCharFilter
-            IList<string> mappingRules = new List<string>();
+            IList<string> mappingRules = new JCG.List<string>();
             mappingRules.Add("\"&uuml;\" => \"ü\"");
             NormalizeCharMap.Builder builder = new NormalizeCharMap.Builder();
             builder.Add("&uuml;", "ü");
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Synonym/TestMultiWordSynonyms.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Synonym/TestMultiWordSynonyms.cs
index 5895ada..b95b633 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Synonym/TestMultiWordSynonyms.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Synonym/TestMultiWordSynonyms.cs
@@ -1,9 +1,10 @@
-// Lucene version compatibility level 4.8.1
+// Lucene version compatibility level 4.8.1
 using Lucene.Net.Analysis.Util;
 using NUnit.Framework;
 using System;
 using System.Collections.Generic;
 using System.IO;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Analysis.Synonym
 {
@@ -35,7 +36,7 @@ namespace Lucene.Net.Analysis.Synonym
         [Obsolete("Remove this test in 5.0")]
         public virtual void TestMultiWordSynonymsOld()
         {
-            IList<string> rules = new List<string>();
+            IList<string> rules = new JCG.List<string>();
             rules.Add("a b c,d");
             SlowSynonymMap synMap = new SlowSynonymMap(true);
             SlowSynonymFilterFactory.ParseRules(rules, synMap, "=>", ",", true, null);
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Synonym/TestSlowSynonymFilter.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Synonym/TestSlowSynonymFilter.cs
index e5010b2..fc3b52d 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Synonym/TestSlowSynonymFilter.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Synonym/TestSlowSynonymFilter.cs
@@ -1,4 +1,4 @@
-// Lucene version compatibility level 4.8.1
+// Lucene version compatibility level 4.8.1
 using J2N.Collections.Generic.Extensions;
 using J2N.Text;
 using Lucene.Net.Analysis.TokenAttributes;
@@ -8,6 +8,7 @@ using System.Collections.Generic;
 using System.Globalization;
 using System.IO;
 using System.Linq;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Analysis.Synonym
 {
@@ -278,7 +279,7 @@ namespace Lucene.Net.Analysis.Synonym
         private IList<Token> Tokens(string str)
         {
             string[] arr = str.Split(' ').TrimEnd();
-            IList<Token> result = new List<Token>();
+            IList<Token> result = new JCG.List<Token>();
             for (int i = 0; i < arr.Length; i++)
             {
                 string[] toks = arr[i].Split('/').TrimEnd();
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Synonym/TestSynonymMap.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Synonym/TestSynonymMap.cs
index 62065ad..24fc697 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Synonym/TestSynonymMap.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Synonym/TestSynonymMap.cs
@@ -7,6 +7,7 @@ using System;
 using System.Collections.Generic;
 using System.IO;
 using System.Text;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Analysis.Synonym
 {
@@ -36,7 +37,7 @@ namespace Lucene.Net.Analysis.Synonym
         public virtual void TestInvalidMappingRules()
         {
             SlowSynonymMap synMap = new SlowSynonymMap(true);
-            IList<string> rules = new List<string>(1);
+            IList<string> rules = new JCG.List<string>(1);
             rules.Add("a=>b=>c");
             try
             {
@@ -54,7 +55,7 @@ namespace Lucene.Net.Analysis.Synonym
             SlowSynonymMap synMap;
 
             // (a)->[b]
-            IList<string> rules = new List<string>();
+            IList<string> rules = new JCG.List<string>();
             rules.Add("a=>b");
             synMap = new SlowSynonymMap(true);
             SlowSynonymFilterFactory.ParseRules(rules, synMap, "=>", ",", true, null);
@@ -134,7 +135,7 @@ namespace Lucene.Net.Analysis.Synonym
 
             // (a)->[a]
             // (b)->[a]
-            IList<string> rules = new List<string>();
+            IList<string> rules = new JCG.List<string>();
             rules.Add("a,b");
             synMap = new SlowSynonymMap(true);
             SlowSynonymFilterFactory.ParseRules(rules, synMap, "=>", ",", false, null);
@@ -186,7 +187,7 @@ namespace Lucene.Net.Analysis.Synonym
 
             // (a)->[a][b]
             // (b)->[a][b]
-            IList<string> rules = new List<string>();
+            IList<string> rules = new JCG.List<string>();
             rules.Add("a,b");
             synMap = new SlowSynonymMap(true);
             SlowSynonymFilterFactory.ParseRules(rules, synMap, "=>", ",", true, null);
@@ -262,7 +263,7 @@ namespace Lucene.Net.Analysis.Synonym
             TokenizerFactory tf = new NGramTokenizerFactory(args);
 
             // (ab)->(bc)->(cd)->[ef][fg][gh]
-            IList<string> rules = new List<string>();
+            IList<string> rules = new JCG.List<string>();
             rules.Add("abcd=>efgh");
             synMap = new SlowSynonymMap(true);
             SlowSynonymFilterFactory.ParseRules(rules, synMap, "=>", ",", true, tf);
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Synonym/TestSynonymMapFilter.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Synonym/TestSynonymMapFilter.cs
index 625055e..d95e3f8 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Synonym/TestSynonymMapFilter.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Synonym/TestSynonymMapFilter.cs
@@ -449,7 +449,7 @@ namespace Lucene.Net.Analysis.Synonym
             //final int numSyn = 2;
 
             IDictionary<string, OneSyn> synMap = new Dictionary<string, OneSyn>();
-            IList<OneSyn> syns = new List<OneSyn>();
+            IList<OneSyn> syns = new JCG.List<OneSyn>();
             bool dedup = Random.nextBoolean();
             if (Verbose)
             {
@@ -464,7 +464,7 @@ namespace Lucene.Net.Analysis.Synonym
                     s = new OneSyn();
                     s.@in = synIn;
                     syns.Add(s);
-                    s.@out = new List<string>();
+                    s.@out = new JCG.List<string>();
                     synMap[synIn] = s;
                     s.keepOrig = Random.nextBoolean();
                 }
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Th/TestThaiAnalyzer.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Th/TestThaiAnalyzer.cs
index 3ec5ce9..78e064a 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Th/TestThaiAnalyzer.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Th/TestThaiAnalyzer.cs
@@ -12,6 +12,7 @@ using System;
 using System.Collections.Generic;
 using System.IO;
 using System.Threading;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Analysis.Th
 {
@@ -253,10 +254,10 @@ namespace Lucene.Net.Analysis.Th
             IOffsetAttribute offsetAtt;
             IPositionIncrementAttribute posIncAtt;
 
-            List<string> tokens = new List<string>();
-            List<int> positions = new List<int>();
-            List<int> startOffsets = new List<int>();
-            List<int> endOffsets = new List<int>();
+            JCG.List<string> tokens = new JCG.List<string>();
+            JCG.List<int> positions = new JCG.List<int>();
+            JCG.List<int> startOffsets = new JCG.List<int>();
+            JCG.List<int> endOffsets = new JCG.List<int>();
 
             TokenStream ts;
             TextReader reader = new StringReader(text);
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Util/TestCharArraySet.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Util/TestCharArraySet.cs
index 1147fad..8492e10 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Util/TestCharArraySet.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Util/TestCharArraySet.cs
@@ -418,7 +418,7 @@ namespace Lucene.Net.Analysis.Util
             CharArraySet setCaseSensitive = new CharArraySet(TEST_VERSION_CURRENT, 10, false);
 
             IList<string> stopwords = TEST_STOP_WORDS;
-            IList<string> stopwordsUpper = new List<string>();
+            IList<string> stopwordsUpper = new JCG.List<string>();
             foreach (string @string in stopwords)
             {
                 stopwordsUpper.Add(@string.ToUpperInvariant());
@@ -442,7 +442,7 @@ namespace Lucene.Net.Analysis.Util
                 assertFalse(copyCaseSens.contains(@string));
             }
             // test adding terms to the copy
-            IList<string> newWords = new List<string>();
+            IList<string> newWords = new JCG.List<string>();
             foreach (string @string in stopwords)
             {
                 newWords.Add(@string + "_1");
@@ -471,7 +471,7 @@ namespace Lucene.Net.Analysis.Util
             CharArraySet setCaseSensitive = new CharArraySet(TEST_VERSION_CURRENT, 10, false);
 
             IList<string> stopwords = TEST_STOP_WORDS;
-            IList<string> stopwordsUpper = new List<string>();
+            IList<string> stopwordsUpper = new JCG.List<string>();
             foreach (string @string in stopwords)
             {
                 stopwordsUpper.Add(@string.ToUpperInvariant());
@@ -495,7 +495,7 @@ namespace Lucene.Net.Analysis.Util
                 assertFalse(copyCaseSens.contains(@string));
             }
             // test adding terms to the copy
-            IList<string> newWords = new List<string>();
+            IList<string> newWords = new JCG.List<string>();
             foreach (string @string in stopwords)
             {
                 newWords.Add(@string + "_1");
@@ -522,7 +522,7 @@ namespace Lucene.Net.Analysis.Util
             ISet<string> set = new JCG.HashSet<string>();
 
             IList<string> stopwords = TEST_STOP_WORDS;
-            IList<string> stopwordsUpper = new List<string>();
+            IList<string> stopwordsUpper = new JCG.List<string>();
             foreach (string @string in stopwords)
             {
                 stopwordsUpper.Add(@string.ToUpperInvariant());
@@ -540,7 +540,7 @@ namespace Lucene.Net.Analysis.Util
                 assertFalse(copy.contains(@string));
             }
 
-            IList<string> newWords = new List<string>();
+            IList<string> newWords = new JCG.List<string>();
             foreach (string @string in stopwords)
             {
                 newWords.Add(@string + "_1");
@@ -649,7 +649,7 @@ namespace Lucene.Net.Analysis.Util
         [Test, LuceneNetSpecific]
         public virtual void TestEquality()
         {
-            var values = new List<string> { "sally", "sells", "seashells", "by", "the", "sea", "shore" };
+            var values = new JCG.List<string> { "sally", "sells", "seashells", "by", "the", "sea", "shore" };
             var charArraySet = new CharArraySet(TEST_VERSION_CURRENT, values, false);
             var charArraySetCopy = new CharArraySet(TEST_VERSION_CURRENT, values, false);
             values.Reverse();
@@ -663,7 +663,7 @@ namespace Lucene.Net.Analysis.Util
             assertTrue(charArraySet.GetHashCode().Equals(equatableSetReverse.GetHashCode()));
             assertTrue(charArraySet.Equals(equatableSetReverse));
 
-            values = new List<string> { "sally", "seashells", "by", "the", "sea", "shore" };
+            values = new JCG.List<string> { "sally", "seashells", "by", "the", "sea", "shore" };
             charArraySet.Clear();
             charArraySet.UnionWith(values);
 
@@ -684,8 +684,8 @@ namespace Lucene.Net.Analysis.Util
         {
             var originalValues = new string[] { "sally", "sells", "seashells", "by", "the", "sea", "shore" };
             CharArraySet target = new CharArraySet(TEST_VERSION_CURRENT, originalValues, false);
-            var existingValuesAsObject = new List<object> { "seashells", "sea", "shore" };
-            var mixedExistingNonExistingValuesAsObject = new List<object> { "true", "set", "of", "unique", "values", "except", "sells" };
+            var existingValuesAsObject = new JCG.List<object> { "seashells", "sea", "shore" };
+            var mixedExistingNonExistingValuesAsObject = new JCG.List<object> { "true", "set", "of", "unique", "values", "except", "sells" };
             var nonExistingMixedTypes = new object[] { true, (byte)55, (short)44, (int)33, (sbyte)22, (long)11, (char)'\n', "hurray", (uint)99, (ulong)89, (ushort)79, new char[] { 't', 'w', 'o' }, new StringCharSequence("testing") };
 
             // Add existing values
@@ -727,8 +727,8 @@ namespace Lucene.Net.Analysis.Util
         {
             var originalValues = new string[] { "sally", "sells", "seashells", "by", "the", "sea", "shore" };
             CharArraySet target = new CharArraySet(TEST_VERSION_CURRENT, originalValues, false);
-            var existingValues = new List<char[]> { "seashells".ToCharArray(), "sea".ToCharArray(), "shore".ToCharArray() };
-            var mixedExistingNonExistingValues = new List<char[]> { "true".ToCharArray(), "set".ToCharArray(), "of".ToCharArray(), "unique".ToCharArray(), "values".ToCharArray(), "except".ToCharArray(), "sells".ToCharArray() };
+            var existingValues = new JCG.List<char[]> { "seashells".ToCharArray(), "sea".ToCharArray(), "shore".ToCharArray() };
+            var mixedExistingNonExistingValues = new JCG.List<char[]> { "true".ToCharArray(), "set".ToCharArray(), "of".ToCharArray(), "unique".ToCharArray(), "values".ToCharArray(), "except".ToCharArray(), "sells".ToCharArray() };
 
             // Add existing values
             assertFalse(target.UnionWith(existingValues));
@@ -747,8 +747,8 @@ namespace Lucene.Net.Analysis.Util
         {
             var originalValues = new string[] { "sally", "sells", "seashells", "by", "the", "sea", "shore" };
             CharArraySet target = new CharArraySet(TEST_VERSION_CURRENT, originalValues, false);
-            var existingValues = new List<string> { "seashells", "sea", "shore" };
-            var mixedExistingNonExistingValues = new List<string> { "true", "set", "of", "unique", "values", "except", "sells" };
+            var existingValues = new JCG.List<string> { "seashells", "sea", "shore" };
+            var mixedExistingNonExistingValues = new JCG.List<string> { "true", "set", "of", "unique", "values", "except", "sells" };
 
             // Add existing values
             //assertFalse(target.UnionWith(existingValues));
@@ -769,8 +769,8 @@ namespace Lucene.Net.Analysis.Util
         {
             var originalValues = new string[] { "sally", "sells", "seashells", "by", "the", "sea", "shore" };
             CharArraySet target = new CharArraySet(TEST_VERSION_CURRENT, originalValues, false);
-            var existingValues = new List<ICharSequence> { new StringCharSequence("seashells"), new StringCharSequence("sea"), new StringCharSequence("shore") };
-            var mixedExistingNonExistingValues = new List<ICharSequence> { new StringCharSequence("true"), new StringCharSequence("set"), new StringCharSequence("of"), new StringCharSequence("unique"), new StringCharSequence("values"), new StringCharSequence("except"), new StringCharSequence("sells") };
+            var existingValues = new JCG.List<ICharSequence> { new StringCharSequence("seashells"), new StringCharSequence("sea"), new StringCharSequence("shore") };
+            var mixedExistingNonExistingValues = new JCG.List<ICharSequence> { new StringCharSequence("true"), new StringCharSequence("set"), new StringCharSequence("of"), new StringCharSequence("unique"), new StringCharSequence("values"), new StringCharSequence("except"), new StringCharSequence("sells") };
 
             // Add existing values
             assertFalse(target.UnionWith(existingValues));
@@ -789,8 +789,8 @@ namespace Lucene.Net.Analysis.Util
         {
             var originalValues = new string[] { "sally", "sells", "seashells", "by", "the", "sea", "shore" };
             CharArraySet target = new CharArraySet(TEST_VERSION_CURRENT, originalValues, false);
-            var subset = new List<string> { "seashells", "sea", "shore" };
-            var superset = new List<string> { "introducing", "sally", "sells", "seashells", "by", "the", "sea", "shore", "and", "more" };
+            var subset = new JCG.List<string> { "seashells", "sea", "shore" };
+            var superset = new JCG.List<string> { "introducing", "sally", "sells", "seashells", "by", "the", "sea", "shore", "and", "more" };
 
             assertFalse(target.IsSubsetOf(subset));
             assertTrue(target.IsSubsetOf(superset));
@@ -802,8 +802,8 @@ namespace Lucene.Net.Analysis.Util
         {
             var originalValues = new string[] { "sally", "sells", "seashells", "by", "the", "sea", "shore" };
             CharArraySet target = new CharArraySet(TEST_VERSION_CURRENT, originalValues, false);
-            var subset = new List<object> { "seashells", "sea", "shore" };
-            var superset = new List<object> { "introducing", "sally", "sells", "seashells", "by", "the", "sea", "shore", "and", "more" };
+            var subset = new JCG.List<object> { "seashells", "sea", "shore" };
+            var superset = new JCG.List<object> { "introducing", "sally", "sells", "seashells", "by", "the", "sea", "shore", "and", "more" };
 
             assertFalse(target.IsSubsetOf(subset));
             assertTrue(target.IsSubsetOf(superset));
@@ -815,8 +815,8 @@ namespace Lucene.Net.Analysis.Util
         {
             var originalValues = new string[] { "sally", "sells", "seashells", "by", "the", "sea", "shore" };
             CharArraySet target = new CharArraySet(TEST_VERSION_CURRENT, originalValues, false);
-            var subset = new List<string> { "seashells", "sea", "shore" };
-            var superset = new List<string> { "introducing", "sally", "sells", "seashells", "by", "the", "sea", "shore", "and", "more" };
+            var subset = new JCG.List<string> { "seashells", "sea", "shore" };
+            var superset = new JCG.List<string> { "introducing", "sally", "sells", "seashells", "by", "the", "sea", "shore", "and", "more" };
 
             assertFalse(target.IsProperSubsetOf(subset));
             assertTrue(target.IsProperSubsetOf(superset));
@@ -828,8 +828,8 @@ namespace Lucene.Net.Analysis.Util
         {
             var originalValues = new string[] { "sally", "sells", "seashells", "by", "the", "sea", "shore" };
             CharArraySet target = new CharArraySet(TEST_VERSION_CURRENT, originalValues, false);
-            var subset = new List<object> { "seashells", "sea", "shore" };
-            var superset = new List<object> { "introducing", "sally", "sells", "seashells", "by", "the", "sea", "shore", "and", "more" };
+            var subset = new JCG.List<object> { "seashells", "sea", "shore" };
+            var superset = new JCG.List<object> { "introducing", "sally", "sells", "seashells", "by", "the", "sea", "shore", "and", "more" };
 
             assertFalse(target.IsProperSubsetOf(subset));
             assertTrue(target.IsProperSubsetOf(superset));
@@ -841,8 +841,8 @@ namespace Lucene.Net.Analysis.Util
         {
             var originalValues = new string[] { "sally", "sells", "seashells", "by", "the", "sea", "shore" };
             CharArraySet target = new CharArraySet(TEST_VERSION_CURRENT, originalValues, false);
-            var subset = new List<string> { "seashells", "sea", "shore" };
-            var superset = new List<string> { "introducing", "sally", "sells", "seashells", "by", "the", "sea", "shore", "and", "more" };
+            var subset = new JCG.List<string> { "seashells", "sea", "shore" };
+            var superset = new JCG.List<string> { "introducing", "sally", "sells", "seashells", "by", "the", "sea", "shore", "and", "more" };
 
             assertTrue(target.IsSupersetOf(subset));
             assertFalse(target.IsSupersetOf(superset));
@@ -854,8 +854,8 @@ namespace Lucene.Net.Analysis.Util
         {
             var originalValues = new string[] { "sally", "sells", "seashells", "by", "the", "sea", "shore" };
             CharArraySet target = new CharArraySet(TEST_VERSION_CURRENT, originalValues, false);
-            var subset = new List<object> { "seashells", "sea", "shore" };
-            var superset = new List<object> { "introducing", "sally", "sells", "seashells", "by", "the", "sea", "shore", "and", "more" };
+            var subset = new JCG.List<object> { "seashells", "sea", "shore" };
+            var superset = new JCG.List<object> { "introducing", "sally", "sells", "seashells", "by", "the", "sea", "shore", "and", "more" };
 
             assertTrue(target.IsSupersetOf(subset));
             assertFalse(target.IsSupersetOf(superset));
@@ -867,8 +867,8 @@ namespace Lucene.Net.Analysis.Util
         {
             var originalValues = new string[] { "sally", "sells", "seashells", "by", "the", "sea", "shore" };
             CharArraySet target = new CharArraySet(TEST_VERSION_CURRENT, originalValues, false);
-            var subset = new List<string> { "seashells", "sea", "shore" };
-            var superset = new List<string> { "introducing", "sally", "sells", "seashells", "by", "the", "sea", "shore", "and", "more" };
+            var subset = new JCG.List<string> { "seashells", "sea", "shore" };
+            var superset = new JCG.List<string> { "introducing", "sally", "sells", "seashells", "by", "the", "sea", "shore", "and", "more" };
 
             assertTrue(target.IsProperSupersetOf(subset));
             assertFalse(target.IsProperSupersetOf(superset));
@@ -880,8 +880,8 @@ namespace Lucene.Net.Analysis.Util
         {
             var originalValues = new string[] { "sally", "sells", "seashells", "by", "the", "sea", "shore" };
             CharArraySet target = new CharArraySet(TEST_VERSION_CURRENT, originalValues, false);
-            var subset = new List<object> { "seashells", "sea", "shore" };
-            var superset = new List<object> { "introducing", "sally", "sells", "seashells", "by", "the", "sea", "shore", "and", "more" };
+            var subset = new JCG.List<object> { "seashells", "sea", "shore" };
+            var superset = new JCG.List<object> { "introducing", "sally", "sells", "seashells", "by", "the", "sea", "shore", "and", "more" };
 
             assertTrue(target.IsProperSupersetOf(subset));
             assertFalse(target.IsProperSupersetOf(superset));
@@ -893,8 +893,8 @@ namespace Lucene.Net.Analysis.Util
         {
             var originalValues = new string[] { "sally", "sells", "seashells", "by", "the", "sea", "shore" };
             CharArraySet target = new CharArraySet(TEST_VERSION_CURRENT, originalValues, false);
-            var nonOverlapping = new List<string> { "peter", "piper", "picks", "a", "pack", "of", "pickled", "peppers" };
-            var overlapping = new List<string> { "introducing", "sally", "sells", "seashells", "by", "the", "sea", "shore", "and", "more" };
+            var nonOverlapping = new JCG.List<string> { "peter", "piper", "picks", "a", "pack", "of", "pickled", "peppers" };
+            var overlapping = new JCG.List<string> { "introducing", "sally", "sells", "seashells", "by", "the", "sea", "shore", "and", "more" };
 
             assertFalse(target.Overlaps(nonOverlapping));
             assertTrue(target.Overlaps(overlapping));
@@ -906,8 +906,8 @@ namespace Lucene.Net.Analysis.Util
         {
             var originalValues = new string[] { "sally", "sells", "seashells", "by", "the", "sea", "shore" };
             CharArraySet target = new CharArraySet(TEST_VERSION_CURRENT, originalValues, false);
-            var nonOverlapping = new List<object> { "peter", "piper", "picks", "a", "pack", "of", "pickled", "peppers" };
-            var overlapping = new List<object> { "introducing", "sally", "sells", "seashells", "by", "the", "sea", "shore", "and", "more" };
+            var nonOverlapping = new JCG.List<object> { "peter", "piper", "picks", "a", "pack", "of", "pickled", "peppers" };
+            var overlapping = new JCG.List<object> { "introducing", "sally", "sells", "seashells", "by", "the", "sea", "shore", "and", "more" };
 
             assertFalse(target.Overlaps(nonOverlapping));
             assertTrue(target.Overlaps(overlapping));
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Util/TestElision.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Util/TestElision.cs
index 39fbf5a..731c355 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Util/TestElision.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Util/TestElision.cs
@@ -1,4 +1,4 @@
-// Lucene version compatibility level 4.8.1
+// Lucene version compatibility level 4.8.1
 using Lucene.Net.Analysis.Core;
 using Lucene.Net.Analysis.Fr;
 using Lucene.Net.Analysis.Standard;
@@ -6,6 +6,7 @@ using Lucene.Net.Analysis.TokenAttributes;
 using NUnit.Framework;
 using System.Collections.Generic;
 using System.IO;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Analysis.Util
 {
@@ -44,7 +45,7 @@ namespace Lucene.Net.Analysis.Util
 
         private IList<string> Filter(TokenFilter filter)
         {
-            IList<string> tas = new List<string>();
+            IList<string> tas = new JCG.List<string>();
             ICharTermAttribute termAtt = filter.GetAttribute<ICharTermAttribute>();
             filter.Reset();
             while (filter.IncrementToken())
diff --git a/src/Lucene.Net.Tests.Analysis.Kuromoji/Tools/TestBuildDictionary.cs b/src/Lucene.Net.Tests.Analysis.Kuromoji/Tools/TestBuildDictionary.cs
index e18dec3..3f31370 100644
--- a/src/Lucene.Net.Tests.Analysis.Kuromoji/Tools/TestBuildDictionary.cs
+++ b/src/Lucene.Net.Tests.Analysis.Kuromoji/Tools/TestBuildDictionary.cs
@@ -3,7 +3,7 @@ using Lucene.Net.Analysis.Ja.Util;
 using Lucene.Net.Attributes;
 using Lucene.Net.Util;
 using NUnit.Framework;
-using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Analysis.Ja.Tools
 {
@@ -45,7 +45,7 @@ namespace Lucene.Net.Analysis.Ja.Tools
                 TestUtil.Unzip(zipFileStream, inputDir);
             }
 
-            var args = new List<string>();
+            var args = new JCG.List<string>();
             args.Add("ipadic"); // dictionary format
             args.Add(inputDir.FullName); // input dir
             args.Add(outputDir.FullName); // output dir
diff --git a/src/Lucene.Net.Tests.Analysis.Phonetic/Language/Bm/PhoneticEngineTest.cs b/src/Lucene.Net.Tests.Analysis.Phonetic/Language/Bm/PhoneticEngineTest.cs
index 7017df2..3f94170 100644
--- a/src/Lucene.Net.Tests.Analysis.Phonetic/Language/Bm/PhoneticEngineTest.cs
+++ b/src/Lucene.Net.Tests.Analysis.Phonetic/Language/Bm/PhoneticEngineTest.cs
@@ -1,9 +1,9 @@
 using J2N.Text;
 using NUnit.Framework;
 using System;
-using System.Collections.Generic;
 using System.Text.RegularExpressions;
 using Assert = Lucene.Net.TestFramework.Assert;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Analysis.Phonetic.Language.Bm
 {
@@ -28,7 +28,7 @@ namespace Lucene.Net.Analysis.Phonetic.Language.Bm
     {
         private static readonly int TEN = 10;
 
-        public static List<Object[]> Values = new List<object[]> { new Object[] { "Renault", "rinD|rinDlt|rina|rinalt|rino|rinolt|rinu|rinult", NameType.GENERIC, RuleType.APPROX, true, TEN },
+        public static JCG.List<Object[]> Values = new JCG.List<object[]> { new Object[] { "Renault", "rinD|rinDlt|rina|rinalt|rino|rinolt|rinu|rinult", NameType.GENERIC, RuleType.APPROX, true, TEN },
                             new Object[] { "Renault", "rYnDlt|rYnalt|rYnult|rinDlt|rinalt|rinult", NameType.ASHKENAZI, RuleType.APPROX, true, TEN },
                             new Object[] { "Renault", "rYnDlt", NameType.ASHKENAZI, RuleType.APPROX, true, 1 },
                             new Object[] { "Renault", "rinDlt", NameType.SEPHARDIC, RuleType.APPROX, true, TEN },
diff --git a/src/Lucene.Net.Tests.Facet/AssertingSubDocsAtOnceCollector.cs b/src/Lucene.Net.Tests.Facet/AssertingSubDocsAtOnceCollector.cs
index d64d1a6..c2d1735 100644
--- a/src/Lucene.Net.Tests.Facet/AssertingSubDocsAtOnceCollector.cs
+++ b/src/Lucene.Net.Tests.Facet/AssertingSubDocsAtOnceCollector.cs
@@ -1,6 +1,7 @@
 // Lucene version compatibility level 4.8.1
 using System;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Facet
 {
@@ -39,7 +40,7 @@ namespace Lucene.Net.Facet
         public virtual void SetScorer(Scorer scorer)
         {
             // Gathers all scorers, including value and "under":
-            allScorers = new List<Scorer>();
+            allScorers = new JCG.List<Scorer>();
             allScorers.Add(scorer);
             int upto = 0;
             while (upto < allScorers.Count)
diff --git a/src/Lucene.Net.Tests.Facet/FacetTestCase.cs b/src/Lucene.Net.Tests.Facet/FacetTestCase.cs
index 4da783e..871a626 100644
--- a/src/Lucene.Net.Tests.Facet/FacetTestCase.cs
+++ b/src/Lucene.Net.Tests.Facet/FacetTestCase.cs
@@ -5,6 +5,7 @@ using NUnit.Framework;
 using RandomizedTesting.Generators;
 using System;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 using Assert = Lucene.Net.TestFramework.Assert;
 using Console = Lucene.Net.Util.SystemConsole;
 
@@ -101,7 +102,7 @@ namespace Lucene.Net.Facet
 
         protected internal virtual IList<TestDoc> GetRandomDocs(string[] tokens, int count, int numDims)
         {
-            IList<TestDoc> docs = new List<TestDoc>();
+            IList<TestDoc> docs = new JCG.List<TestDoc>();
             for (int i = 0; i < count; i++)
             {
                 TestDoc doc = new TestDoc();
@@ -170,7 +171,7 @@ namespace Lucene.Net.Facet
             }
         }
         
-        protected internal virtual void SortLabelValues(List<LabelAndValue> labelValues)
+        protected internal virtual void SortLabelValues(JCG.List<LabelAndValue> labelValues)
         {
             labelValues.Sort(Comparer<LabelAndValue>.Create((a,b) => {
                 if ((double)a.Value > (double)b.Value)
@@ -189,7 +190,7 @@ namespace Lucene.Net.Facet
         }
 
        
-        protected internal virtual void SortFacetResults(List<FacetResult> results)
+        protected internal virtual void SortFacetResults(JCG.List<FacetResult> results)
         {
             results.Sort(Comparer<FacetResult>.Create((a, b) =>
             {
diff --git a/src/Lucene.Net.Tests.Facet/SortedSet/TestSortedSetDocValuesFacets.cs b/src/Lucene.Net.Tests.Facet/SortedSet/TestSortedSetDocValuesFacets.cs
index 6d8bf87..1e546ee 100644
--- a/src/Lucene.Net.Tests.Facet/SortedSet/TestSortedSetDocValuesFacets.cs
+++ b/src/Lucene.Net.Tests.Facet/SortedSet/TestSortedSetDocValuesFacets.cs
@@ -3,6 +3,7 @@ using NUnit.Framework;
 using RandomizedTesting.Generators;
 using System;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 using Assert = Lucene.Net.TestFramework.Assert;
 using Console = Lucene.Net.Util.SystemConsole;
 
@@ -353,7 +354,7 @@ namespace Lucene.Net.Facet.SortedSet
                 Facets facets = new SortedSetDocValuesFacetCounts(state, fc);
 
                 // Slow, yet hopefully bug-free, faceting:
-                var expectedCounts = new List<Dictionary<string, int?>>();
+                var expectedCounts = new JCG.List<Dictionary<string, int?>>();
                 for (int i = 0; i < numDims; i++)
                 {
                     expectedCounts.Add(new Dictionary<string, int?>());
@@ -380,10 +381,10 @@ namespace Lucene.Net.Facet.SortedSet
                     }
                 }
 
-                List<FacetResult> expected = new List<FacetResult>();
+                JCG.List<FacetResult> expected = new JCG.List<FacetResult>();
                 for (int i = 0; i < numDims; i++)
                 {
-                    List<LabelAndValue> labelValues = new List<LabelAndValue>();
+                    JCG.List<LabelAndValue> labelValues = new JCG.List<LabelAndValue>();
                     int totCount = 0;
                     foreach (KeyValuePair<string, int?> ent in expectedCounts[i])
                     {
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/TestSearcherTaxonomyManager.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/TestSearcherTaxonomyManager.cs
index f608ee9..041e42b 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/TestSearcherTaxonomyManager.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/TestSearcherTaxonomyManager.cs
@@ -71,7 +71,7 @@ namespace Lucene.Net.Facet.Taxonomy
                 try
                 {
                     var seen = new JCG.HashSet<string>();
-                    IList<string> paths = new List<string>();
+                    IList<string> paths = new JCG.List<string>();
                     while (true)
                     {
                         Document doc = new Document();
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyCombined.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyCombined.cs
index 8504348..87cfab2 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyCombined.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyCombined.cs
@@ -2,14 +2,13 @@
 using J2N.Threading;
 using J2N.Threading.Atomic;
 using Lucene.Net.Support;
-using Lucene.Net.Support.Threading;
 using NUnit.Framework;
 using System;
-using System.Collections.Generic;
 using System.Globalization;
 using System.Text;
 using System.Threading;
 using Assert = Lucene.Net.TestFramework.Assert;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Facet.Taxonomy
 {
@@ -593,7 +592,7 @@ namespace Lucene.Net.Facet.Taxonomy
             {
                 // find expected children by looking at all expectedCategories
                 // for children
-                List<int?> expectedChildren = new List<int?>();
+                JCG.List<int?> expectedChildren = new JCG.List<int?>();
                 for (int j = ExpectedCategories.Length - 1; j >= 0; j--)
                 {
                     if (ExpectedCategories[j].Length != ExpectedCategories[i].Length + 1)
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetCounts.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetCounts.cs
index fb34904..59b41e6 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetCounts.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetCounts.cs
@@ -833,7 +833,7 @@ namespace Lucene.Net.Facet.Taxonomy
                 Facets facets = GetTaxonomyFacetCounts(tr, config, fc);
 
                 // Slow, yet hopefully bug-free, faceting:
-                var expectedCounts = new List<Dictionary<string, int?>>();
+                var expectedCounts = new JCG.List<Dictionary<string, int?>>();
                 for (int i = 0; i < numDims; i++)
                 {
                     expectedCounts.Add(new Dictionary<string, int?>());
@@ -860,10 +860,10 @@ namespace Lucene.Net.Facet.Taxonomy
                     }
                 }
 
-                List<FacetResult> expected = new List<FacetResult>();
+                JCG.List<FacetResult> expected = new JCG.List<FacetResult>();
                 for (int i = 0; i < numDims; i++)
                 {
-                    List<LabelAndValue> labelValues = new List<LabelAndValue>();
+                    JCG.List<LabelAndValue> labelValues = new JCG.List<LabelAndValue>();
                     int totCount = 0;
                     foreach (KeyValuePair<string, int?> ent in expectedCounts[i])
                     {
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetCounts2.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetCounts2.cs
index fcf6f40..035c20c 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetCounts2.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetCounts2.cs
@@ -107,7 +107,7 @@ namespace Lucene.Net.Facet.Taxonomy
             categories_a.Shuffle(Random);
             categories_b.Shuffle(Random);
 
-            List<FacetField> categories = new List<FacetField>();
+            JCG.List<FacetField> categories = new JCG.List<FacetField>();
             categories.AddRange(categories_a.GetView(0, numFacetsA)); // LUCENENET: Checked length for correctness
             categories.AddRange(categories_b.GetView(0, numFacetsB)); // LUCENENET: Checked length for correctness
 
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetSumValueSource.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetSumValueSource.cs
index e29cbb7..c7b4bb6 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetSumValueSource.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetSumValueSource.cs
@@ -8,6 +8,7 @@ using System.Collections.Generic;
 using System.Globalization;
 using System.IO;
 using System.Runtime.CompilerServices;
+using JCG = J2N.Collections.Generic;
 using Assert = Lucene.Net.TestFramework.Assert;
 using Console = Lucene.Net.Util.SystemConsole;
 
@@ -554,7 +555,7 @@ namespace Lucene.Net.Facet.Taxonomy
                 Facets facets = new TaxonomyFacetSumValueSource(tr, config, fc, values);
 
                 // Slow, yet hopefully bug-free, faceting:
-                var expectedValues = new List<Dictionary<string, float?>>(numDims);
+                var expectedValues = new JCG.List<Dictionary<string, float?>>(numDims);
                 for (int i = 0; i < numDims; i++)
                 {
                     expectedValues.Add(new Dictionary<string, float?>());
@@ -581,10 +582,10 @@ namespace Lucene.Net.Facet.Taxonomy
                     }
                 }
 
-                List<FacetResult> expected = new List<FacetResult>();
+                JCG.List<FacetResult> expected = new JCG.List<FacetResult>();
                 for (int i = 0; i < numDims; i++)
                 {
-                    List<LabelAndValue> labelValues = new List<LabelAndValue>();
+                    JCG.List<LabelAndValue> labelValues = new JCG.List<LabelAndValue>();
                     float totValue = 0;
                     foreach (KeyValuePair<string, float?> ent in expectedValues[i])
                     {
diff --git a/src/Lucene.Net.Tests.Facet/TestDrillSideways.cs b/src/Lucene.Net.Tests.Facet/TestDrillSideways.cs
index 44478ff..f82ef09 100644
--- a/src/Lucene.Net.Tests.Facet/TestDrillSideways.cs
+++ b/src/Lucene.Net.Tests.Facet/TestDrillSideways.cs
@@ -492,7 +492,7 @@ namespace Lucene.Net.Facet
                 valueCount *= 2;
             }
 
-            IList<Doc> docs = new List<Doc>();
+            IList<Doc> docs = new JCG.List<Doc>();
             for (int i = 0; i < numDocs; i++)
             {
                 Doc doc = new Doc();
@@ -1051,7 +1051,7 @@ namespace Lucene.Net.Facet
         {
             int numDims = dimValues.Length;
 
-            List<Doc> hits = new List<Doc>();
+            JCG.List<Doc> hits = new JCG.List<Doc>();
             Counters drillDownCounts = new Counters(dimValues);
             Counters[] drillSidewaysCounts = new Counters[dimValues.Length];
             for (int dim = 0; dim < numDims; dim++)
diff --git a/src/Lucene.Net.Tests.Grouping/AllGroupHeadsCollectorTest.cs b/src/Lucene.Net.Tests.Grouping/AllGroupHeadsCollectorTest.cs
index b4fe1c0..d3ded8b 100644
--- a/src/Lucene.Net.Tests.Grouping/AllGroupHeadsCollectorTest.cs
+++ b/src/Lucene.Net.Tests.Grouping/AllGroupHeadsCollectorTest.cs
@@ -178,7 +178,7 @@ namespace Lucene.Net.Search.Grouping
                     Console.WriteLine("TEST: numDocs=" + numDocs + " numGroups=" + numGroups);
                 }
 
-                List<BytesRef> groups = new List<BytesRef>();
+                JCG.List<BytesRef> groups = new JCG.List<BytesRef>();
                 for (int i = 0; i < numGroups; i++)
                 {
                     string randomValue;
@@ -480,7 +480,7 @@ namespace Lucene.Net.Search.Grouping
 
         private int[] CreateExpectedGroupHeads(string searchTerm, GroupDoc[] groupDocs, Sort docSort, bool sortByScoreOnly, int[] fieldIdToDocID)
         {
-            IDictionary<BytesRef, List<GroupDoc>> groupHeads = new JCG.Dictionary<BytesRef, List<GroupDoc>>();
+            IDictionary<BytesRef, JCG.List<GroupDoc>> groupHeads = new JCG.Dictionary<BytesRef, JCG.List<GroupDoc>>();
             foreach (GroupDoc groupDoc in groupDocs)
             {
                 if (!groupDoc.content.StartsWith(searchTerm, StringComparison.Ordinal))
@@ -488,9 +488,9 @@ namespace Lucene.Net.Search.Grouping
                     continue;
                 }
 
-                if (!groupHeads.TryGetValue(groupDoc.group, out List<GroupDoc> grouphead))
+                if (!groupHeads.TryGetValue(groupDoc.group, out JCG.List<GroupDoc> grouphead))
                 {
-                    List<GroupDoc> list = new List<GroupDoc>();
+                    JCG.List<GroupDoc> list = new JCG.List<GroupDoc>();
                     list.Add(groupDoc);
                     groupHeads[groupDoc.group] = list;
                     continue;
@@ -502,7 +502,7 @@ namespace Lucene.Net.Search.Grouping
             int i = 0;
             foreach (BytesRef groupValue in groupHeads.Keys)
             {
-                List<GroupDoc> docs = groupHeads[groupValue];
+                JCG.List<GroupDoc> docs = groupHeads[groupValue];
                 // LUCENENET TODO: The original API Collections.Sort does not currently exist.
                 // This call ultimately results in calling TimSort, which is why this line was replaced
                 // with CollectionUtil.TimSort(IList<T>, IComparer<T>).
@@ -520,7 +520,7 @@ namespace Lucene.Net.Search.Grouping
 
         private Sort GetRandomSort(bool scoreOnly)
         {
-            List<SortField> sortFields = new List<SortField>();
+            JCG.List<SortField> sortFields = new JCG.List<SortField>();
             if (Random.nextInt(7) == 2 || scoreOnly)
             {
                 sortFields.Add(SortField.FIELD_SCORE);
diff --git a/src/Lucene.Net.Tests.Grouping/DistinctValuesCollectorTest.cs b/src/Lucene.Net.Tests.Grouping/DistinctValuesCollectorTest.cs
index 3ad43f2..cf5213e 100644
--- a/src/Lucene.Net.Tests.Grouping/DistinctValuesCollectorTest.cs
+++ b/src/Lucene.Net.Tests.Grouping/DistinctValuesCollectorTest.cs
@@ -146,31 +146,31 @@ namespace Lucene.Net.Search.Grouping
                 = CreateDistinctCountCollector(firstCollector, groupField, countField, dvType);
             indexSearcher.Search(new TermQuery(new Term("content", "random")), distinctValuesCollector);
 
-            //var gcs = distinctValuesCollector.Groups as List<IGroupCount<IComparable>>;
+            //var gcs = distinctValuesCollector.Groups as JCG.List<IGroupCount<IComparable>>;
             // LUCENENET TODO: Try to work out how to do this without an O(n) operation
-            var gcs = new List<AbstractDistinctValuesCollector.IGroupCount<IComparable>>(distinctValuesCollector.Groups);
+            var gcs = new JCG.List<AbstractDistinctValuesCollector.IGroupCount<IComparable>>(distinctValuesCollector.Groups);
             gcs.Sort(cmp);
             assertEquals(4, gcs.Count);
 
             CompareNull(gcs[0].GroupValue);
-            List<IComparable> countValues = new List<IComparable>(gcs[0].UniqueValues);
+            JCG.List<IComparable> countValues = new JCG.List<IComparable>(gcs[0].UniqueValues);
             assertEquals(1, countValues.size());
             Compare("1", countValues[0]);
 
             Compare("1", gcs[1].GroupValue);
-            countValues = new List<IComparable>(gcs[1].UniqueValues);
+            countValues = new JCG.List<IComparable>(gcs[1].UniqueValues);
             countValues.Sort(nullComparer);
             assertEquals(2, countValues.size());
             Compare("1", countValues[0]);
             Compare("2", countValues[1]);
 
             Compare("2", gcs[2].GroupValue);
-            countValues = new List<IComparable>(gcs[2].UniqueValues);
+            countValues = new JCG.List<IComparable>(gcs[2].UniqueValues);
             assertEquals(1, countValues.size());
             CompareNull(countValues[0]);
 
             Compare("3", gcs[3].GroupValue);
-            countValues = new List<IComparable>(gcs[3].UniqueValues);
+            countValues = new JCG.List<IComparable>(gcs[3].UniqueValues);
             assertEquals(1, countValues.size());
             Compare("1", countValues[0]);
 
@@ -181,25 +181,25 @@ namespace Lucene.Net.Search.Grouping
             indexSearcher.Search(new TermQuery(new Term("content", "some")), distinctValuesCollector);
 
             // LUCENENET TODO: Try to work out how to do this without an O(n) operation
-            //gcs = distinctValuesCollector.Groups as List<IGroupCount<IComparable>>;
-            gcs = new List<AbstractDistinctValuesCollector.IGroupCount<IComparable>>(distinctValuesCollector.Groups);
+            //gcs = distinctValuesCollector.Groups as JCG.List<IGroupCount<IComparable>>;
+            gcs = new JCG.List<AbstractDistinctValuesCollector.IGroupCount<IComparable>>(distinctValuesCollector.Groups);
             gcs.Sort(cmp);
             assertEquals(3, gcs.Count);
 
             Compare("1", gcs[0].GroupValue);
-            countValues = new List<IComparable>(gcs[0].UniqueValues);
+            countValues = new JCG.List<IComparable>(gcs[0].UniqueValues);
             assertEquals(2, countValues.size());
             countValues.Sort(nullComparer);
             Compare("1", countValues[0]);
             Compare("2", countValues[1]);
 
             Compare("2", gcs[1].GroupValue);
-            countValues = new List<IComparable>(gcs[1].UniqueValues);
+            countValues = new JCG.List<IComparable>(gcs[1].UniqueValues);
             assertEquals(1, countValues.size());
             CompareNull(countValues[0]);
 
             Compare("3", gcs[2].GroupValue);
-            countValues = new List<IComparable>(gcs[2].UniqueValues);
+            countValues = new JCG.List<IComparable>(gcs[2].UniqueValues);
             assertEquals(1, countValues.size());
             Compare("1", countValues[0]);
 
@@ -210,19 +210,19 @@ namespace Lucene.Net.Search.Grouping
             indexSearcher.Search(new TermQuery(new Term("content", "blob")), distinctValuesCollector);
 
             // LUCENENET TODO: Try to work out how to do this without an O(n) operation
-            //gcs = distinctValuesCollector.Groups as List<IGroupCount<IComparable>>;
-            gcs = new List<AbstractDistinctValuesCollector.IGroupCount<IComparable>>(distinctValuesCollector.Groups);
+            //gcs = distinctValuesCollector.Groups as JCG.List<IGroupCount<IComparable>>;
+            gcs = new JCG.List<AbstractDistinctValuesCollector.IGroupCount<IComparable>>(distinctValuesCollector.Groups);
             gcs.Sort(cmp);
             assertEquals(2, gcs.Count);
 
             Compare("1", gcs[0].GroupValue);
-            countValues = new List<IComparable>(gcs[0].UniqueValues);
+            countValues = new JCG.List<IComparable>(gcs[0].UniqueValues);
             // B/c the only one document matched with blob inside the author 1 group
             assertEquals(1, countValues.Count);
             Compare("1", countValues[0]);
 
             Compare("3", gcs[1].GroupValue);
-            countValues = new List<IComparable>(gcs[1].UniqueValues);
+            countValues = new JCG.List<IComparable>(gcs[1].UniqueValues);
             assertEquals(1, countValues.Count);
             Compare("1", countValues[0]);
 
@@ -247,7 +247,7 @@ namespace Lucene.Net.Search.Grouping
                     Sort groupSort = new Sort(new SortField("id", SortFieldType.STRING));
                     int topN = 1 + random.nextInt(10);
 
-                    List<AbstractDistinctValuesCollector.IGroupCount<IComparable>> expectedResult = CreateExpectedResult(context, term, groupSort, topN);
+                    IList<AbstractDistinctValuesCollector.IGroupCount<IComparable>> expectedResult = CreateExpectedResult(context, term, groupSort, topN);
 
                     IAbstractFirstPassGroupingCollector<IComparable> firstCollector = CreateRandomFirstPassCollector(dvType, groupSort, groupField, topN);
                     searcher.Search(new TermQuery(new Term("content", term)), firstCollector);
@@ -256,7 +256,7 @@ namespace Lucene.Net.Search.Grouping
                     searcher.Search(new TermQuery(new Term("content", term)), distinctValuesCollector);
 
                     // LUCENENET TODO: Try to work out how to do this without an O(n) operation
-                    List<AbstractDistinctValuesCollector.IGroupCount<IComparable>> actualResult = new List<AbstractDistinctValuesCollector.IGroupCount<IComparable>>(distinctValuesCollector.Groups);
+                    JCG.List<AbstractDistinctValuesCollector.IGroupCount<IComparable>> actualResult = new JCG.List<AbstractDistinctValuesCollector.IGroupCount<IComparable>>(distinctValuesCollector.Groups);
 
                     if (Verbose)
                     {
@@ -281,9 +281,9 @@ namespace Lucene.Net.Search.Grouping
                         AbstractDistinctValuesCollector.IGroupCount<IComparable> actual = actualResult[i];
                         AssertValues(expected.GroupValue, actual.GroupValue);
                         assertEquals(expected.UniqueValues.Count(), actual.UniqueValues.Count());
-                        List<IComparable> expectedUniqueValues = new List<IComparable>(expected.UniqueValues);
+                        JCG.List<IComparable> expectedUniqueValues = new JCG.List<IComparable>(expected.UniqueValues);
                         expectedUniqueValues.Sort(nullComparer);
-                        List<IComparable> actualUniqueValues = new List<IComparable>(actual.UniqueValues);
+                        JCG.List<IComparable> actualUniqueValues = new JCG.List<IComparable>(actual.UniqueValues);
                         actualUniqueValues.Sort(nullComparer);
                         for (int j = 0; j < expectedUniqueValues.size(); j++)
                         {
@@ -296,7 +296,7 @@ namespace Lucene.Net.Search.Grouping
             }
         }
 
-        private void PrintGroups(List<AbstractDistinctValuesCollector.IGroupCount<IComparable>> results)
+        private void PrintGroups(IList<AbstractDistinctValuesCollector.IGroupCount<IComparable>> results)
         {
             for (int i = 0; i < results.size(); i++)
             {
@@ -474,9 +474,9 @@ namespace Lucene.Net.Search.Grouping
             }
         }
 
-        private List<AbstractDistinctValuesCollector.IGroupCount<IComparable>> CreateExpectedResult(IndexContext context, string term, Sort groupSort, int topN)
+        private IList<AbstractDistinctValuesCollector.IGroupCount<IComparable>> CreateExpectedResult(IndexContext context, string term, Sort groupSort, int topN)
         {
-            List<AbstractDistinctValuesCollector.IGroupCount<IComparable>> result = new List<AbstractDistinctValuesCollector.IGroupCount<IComparable>>();
+            JCG.List<AbstractDistinctValuesCollector.IGroupCount<IComparable>> result = new JCG.List<AbstractDistinctValuesCollector.IGroupCount<IComparable>>();
             IDictionary<string, ISet<string>> groupCounts = context.searchTermToGroupCounts[term];
             int i = 0;
             foreach (string group in groupCounts.Keys)
@@ -527,7 +527,7 @@ namespace Lucene.Net.Search.Grouping
                 countValues[i] = GenerateRandomNonEmptyString();
             }
 
-            List<string> contentStrings = new List<string>();
+            JCG.List<string> contentStrings = new JCG.List<string>();
             IDictionary<string, IDictionary<string, ISet<string>>> searchTermToGroupCounts = new JCG.Dictionary<string, IDictionary<string, ISet<string>>>();
             for (int i = 1; i <= numDocs; i++)
             {
diff --git a/src/Lucene.Net.Tests.Grouping/GroupFacetCollectorTest.cs b/src/Lucene.Net.Tests.Grouping/GroupFacetCollectorTest.cs
index 4c4f0d6..84e7b0c 100644
--- a/src/Lucene.Net.Tests.Grouping/GroupFacetCollectorTest.cs
+++ b/src/Lucene.Net.Tests.Grouping/GroupFacetCollectorTest.cs
@@ -507,12 +507,12 @@ namespace Lucene.Net.Search.Grouping
                 Console.WriteLine("TEST: numDocs=" + numDocs + " numGroups=" + numGroups);
             }
 
-            List<string> groups = new List<string>();
+            JCG.List<string> groups = new JCG.List<string>();
             for (int i = 0; i < numGroups; i++)
             {
                 groups.Add(GenerateRandomNonEmptyString());
             }
-            List<string> facetValues = new List<string>();
+            JCG.List<string> facetValues = new JCG.List<string>();
             for (int i = 0; i < numFacets; i++)
             {
                 facetValues.Add(GenerateRandomNonEmptyString());
@@ -633,7 +633,7 @@ namespace Lucene.Net.Search.Grouping
                     searchTermToFacetToGroups[contentStr] = facetToGroups = new JCG.Dictionary<string, ISet<string>>();
                 }
 
-                List<string> facetVals = new List<string>();
+                JCG.List<string> facetVals = new JCG.List<string>();
                 if (useDv || random.nextInt(24) != 18)
                 {
                     if (useDv)
diff --git a/src/Lucene.Net.Tests.Grouping/GroupingSearchTest.cs b/src/Lucene.Net.Tests.Grouping/GroupingSearchTest.cs
index 524f58d..b6994f9 100644
--- a/src/Lucene.Net.Tests.Grouping/GroupingSearchTest.cs
+++ b/src/Lucene.Net.Tests.Grouping/GroupingSearchTest.cs
@@ -10,8 +10,7 @@ using Lucene.Net.Util.Mutable;
 using NUnit.Framework;
 using System;
 using System.Collections;
-using System.Collections.Generic;
-using System.Reflection;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Search.Grouping
 {
@@ -51,7 +50,7 @@ namespace Lucene.Net.Search.Grouping
                 NewIndexWriterConfig(TEST_VERSION_CURRENT,
                     new MockAnalyzer(Random)).SetMergePolicy(NewLogMergePolicy()));
             bool canUseIDV = !"Lucene3x".Equals(w.IndexWriter.Config.Codec.Name, StringComparison.Ordinal);
-            List<Document> documents = new List<Document>();
+            JCG.List<Document> documents = new JCG.List<Document>();
             // 0
             Document doc = new Document();
             AddGroupField(doc, groupField, "author1", canUseIDV);
diff --git a/src/Lucene.Net.Tests.Grouping/TestGrouping.cs b/src/Lucene.Net.Tests.Grouping/TestGrouping.cs
index d0226b6..edfdee9 100644
--- a/src/Lucene.Net.Tests.Grouping/TestGrouping.cs
+++ b/src/Lucene.Net.Tests.Grouping/TestGrouping.cs
@@ -258,7 +258,7 @@ namespace Lucene.Net.Search.Grouping
             else
             {
                 ValueSource vs = new BytesRefFieldSource(groupField);
-                List<SearchGroup<MutableValue>> mvalSearchGroups = new List<SearchGroup<MutableValue>>(searchGroups.size());
+                JCG.List<SearchGroup<MutableValue>> mvalSearchGroups = new JCG.List<SearchGroup<MutableValue>>(searchGroups.size());
                 foreach (SearchGroup<BytesRef> mergedTopGroup in searchGroups)
                 {
                     SearchGroup<MutableValue> sg = new SearchGroup<MutableValue>();
@@ -348,7 +348,7 @@ namespace Lucene.Net.Search.Grouping
                     return null;
                 }
 
-                List<SearchGroup<BytesRef>> groups = new List<SearchGroup<BytesRef>>(mutableValueGroups.Count());
+                JCG.List<SearchGroup<BytesRef>> groups = new JCG.List<SearchGroup<BytesRef>>(mutableValueGroups.Count());
                 foreach (var mutableValueGroup in mutableValueGroups)
                 {
                     SearchGroup<BytesRef> sg = new SearchGroup<BytesRef>();
@@ -371,7 +371,7 @@ namespace Lucene.Net.Search.Grouping
             else if (c.GetType().IsAssignableFrom(typeof(FunctionSecondPassGroupingCollector<MutableValue>)))        // LUCENENET Specific type for generic must be specified.
             {
                 ITopGroups<MutableValue> mvalTopGroups = ((FunctionSecondPassGroupingCollector<MutableValue>)c).GetTopGroups(withinGroupOffset);        // LUCENENET Specific type for generic must be specified.
-                List<GroupDocs<BytesRef>> groups = new List<GroupDocs<BytesRef>>(mvalTopGroups.Groups.Length);
+                JCG.List<GroupDocs<BytesRef>> groups = new JCG.List<GroupDocs<BytesRef>>(mvalTopGroups.Groups.Length);
                 foreach (GroupDocs<MutableValue> mvalGd in mvalTopGroups.Groups)
                 {
                     BytesRef groupValue = mvalGd.GroupValue.Exists ? ((MutableValueStr)mvalGd.GroupValue).Value : null;
@@ -406,7 +406,7 @@ namespace Lucene.Net.Search.Grouping
 
         private Sort GetRandomSort()
         {
-            List<SortField> sortFields = new List<SortField>();
+            JCG.List<SortField> sortFields = new JCG.List<SortField>();
             if (Random.nextInt(7) == 2)
             {
                 sortFields.Add(SortField.FIELD_SCORE);
@@ -550,9 +550,9 @@ namespace Lucene.Net.Search.Grouping
             //Arrays.Sort(groupDocs, groupSortComp);
             ArrayUtil.TimSort(groupDocs, groupSortComp);
             
-            IDictionary<BytesRef, List<GroupDoc>> groups = new JCG.Dictionary<BytesRef, List<GroupDoc>>();
-            List<BytesRef> sortedGroups = new List<BytesRef>();
-            List<IComparable[]> sortedGroupFields = new List<IComparable[]>();
+            IDictionary<BytesRef, IList<GroupDoc>> groups = new JCG.Dictionary<BytesRef, IList<GroupDoc>>();
+            IList<BytesRef> sortedGroups = new JCG.List<BytesRef>();
+            IList<IComparable[]> sortedGroupFields = new JCG.List<IComparable[]>();
 
             int totalHitCount = 0;
             ISet<BytesRef> knownGroups = new JCG.HashSet<BytesRef>();
@@ -577,7 +577,7 @@ namespace Lucene.Net.Search.Grouping
                     }
                 }
 
-                if (!groups.TryGetValue(d.group, out List<GroupDoc> l) || l == null)
+                if (!groups.TryGetValue(d.group, out IList<GroupDoc> l) || l == null)
                 {
                     //Console.WriteLine("    add sortedGroup=" + groupToString(d.group));
                     sortedGroups.Add(d.group);
@@ -585,7 +585,7 @@ namespace Lucene.Net.Search.Grouping
                     {
                         sortedGroupFields.Add(FillFields(d, groupSort));
                     }
-                    l = new List<GroupDoc>();
+                    l = new JCG.List<GroupDoc>();
                     groups.Put(d.group, l);
                 }
                 l.Add(d);
@@ -606,7 +606,7 @@ namespace Lucene.Net.Search.Grouping
             for (int idx = groupOffset; idx < limit; idx++)
             {
                 BytesRef group = sortedGroups[idx];
-                List<GroupDoc> docs = groups[group];
+                IList<GroupDoc> docs = groups[group];
                 totalGroupedHitCount += docs.size();
 
                 // LUCENENET specific: The original API Collections.Sort does not currently exist.
@@ -668,15 +668,15 @@ namespace Lucene.Net.Search.Grouping
         {
             // Coalesce by group, but in random order:
             groupDocs.Shuffle(Random);
-            IDictionary<BytesRef, List<GroupDoc>> groupMap = new JCG.Dictionary<BytesRef, List<GroupDoc>>();
-            List<BytesRef> groupValues = new List<BytesRef>();
+            IDictionary<BytesRef, IList<GroupDoc>> groupMap = new JCG.Dictionary<BytesRef, IList<GroupDoc>>();
+            IList<BytesRef> groupValues = new JCG.List<BytesRef>();
 
             foreach (GroupDoc groupDoc in groupDocs)
             {
-                if (!groupMap.TryGetValue(groupDoc.group, out List<GroupDoc> docs))
+                if (!groupMap.TryGetValue(groupDoc.group, out IList<GroupDoc> docs))
                 {
                     groupValues.Add(groupDoc.group);
-                    groupMap[groupDoc.group] = docs = new List<GroupDoc>();
+                    groupMap[groupDoc.group] = docs = new JCG.List<GroupDoc>();
                 }
                 docs.Add(groupDoc);
             }
@@ -687,7 +687,7 @@ namespace Lucene.Net.Search.Grouping
                                                         NewIndexWriterConfig(TEST_VERSION_CURRENT,
                                                                              new MockAnalyzer(Random)));
 
-            List<List<Document>> updateDocs = new List<List<Document>>();
+            IList<IList<Document>> updateDocs = new JCG.List<IList<Document>>();
 
             FieldType groupEndType = new FieldType(StringField.TYPE_NOT_STORED);
             groupEndType.IndexOptions = (IndexOptions.DOCS_ONLY);
@@ -696,7 +696,7 @@ namespace Lucene.Net.Search.Grouping
             //Console.WriteLine("TEST: index groups");
             foreach (BytesRef group in groupValues)
             {
-                List<Document> docs = new List<Document>();
+                IList<Document> docs = new JCG.List<Document>();
                 //Console.WriteLine("TEST:   group=" + (group == null ? "null" : group.utf8ToString()));
                 foreach (GroupDoc groupValue in groupMap[group])
                 {
@@ -723,7 +723,7 @@ namespace Lucene.Net.Search.Grouping
                 }
             }
 
-            foreach (List<Document> docs in updateDocs)
+            foreach (IList<Document> docs in updateDocs)
             {
                 // Just replaces docs w/ same docs:
                 w.UpdateDocuments(new Index.Term("group", docs[0].Get("group")), docs);
@@ -782,7 +782,7 @@ namespace Lucene.Net.Search.Grouping
                     Console.WriteLine("TEST: numDocs=" + numDocs + " numGroups=" + numGroups);
                 }
 
-                List<BytesRef> groups = new List<BytesRef>();
+                IList<BytesRef> groups = new JCG.List<BytesRef>();
                 for (int i = 0; i < numGroups; i++)
                 {
                     string randomValue;
@@ -1451,8 +1451,8 @@ namespace Lucene.Net.Search.Grouping
             }
             // Run 1st pass collector to get top groups per shard
             Weight w = topSearcher.CreateNormalizedWeight(query);
-            List<IEnumerable<ISearchGroup<BytesRef>>> shardGroups = new List<IEnumerable<ISearchGroup<BytesRef>>>();
-            List<IAbstractFirstPassGroupingCollector<object>> firstPassGroupingCollectors = new List<IAbstractFirstPassGroupingCollector<object>>();
+            IList<IEnumerable<ISearchGroup<BytesRef>>> shardGroups = new JCG.List<IEnumerable<ISearchGroup<BytesRef>>>();
+            IList<IAbstractFirstPassGroupingCollector<object>> firstPassGroupingCollectors = new JCG.List<IAbstractFirstPassGroupingCollector<object>>();
             IAbstractFirstPassGroupingCollector<object> firstPassCollector = null;
             bool shardsCanUseIDV;
             if (canUseIDV)
@@ -1650,12 +1650,12 @@ namespace Lucene.Net.Search.Grouping
 
         internal class ShardSearcher : IndexSearcher
         {
-            private readonly List<AtomicReaderContext> ctx;
+            private readonly IList<AtomicReaderContext> ctx;
 
             public ShardSearcher(AtomicReaderContext ctx, IndexReaderContext parent)
                             : base(parent)
             {
-                this.ctx = new List<AtomicReaderContext>(new AtomicReaderContext[] { ctx });
+                this.ctx = new JCG.List<AtomicReaderContext>(new AtomicReaderContext[] { ctx });
             }
 
             public void Search(Weight weight, ICollector collector)
diff --git a/src/Lucene.Net.Tests.Highlighter/Highlight/HighlighterTest.cs b/src/Lucene.Net.Tests.Highlighter/Highlight/HighlighterTest.cs
index c40b9e3..5865441 100644
--- a/src/Lucene.Net.Tests.Highlighter/Highlight/HighlighterTest.cs
+++ b/src/Lucene.Net.Tests.Highlighter/Highlight/HighlighterTest.cs
@@ -1202,12 +1202,12 @@ namespace Lucene.Net.Search.Highlight
                 wTerms = new WeightedSpanTerm[2];
                 wTerms[0] = new WeightedSpanTerm(10f, "hello");
 
-                List<PositionSpan> positionSpans = new List<PositionSpan>();
+                IList<PositionSpan> positionSpans = new JCG.List<PositionSpan>();
                 positionSpans.Add(new PositionSpan(0, 0));
                 wTerms[0].AddPositionSpans(positionSpans);
 
                 wTerms[1] = new WeightedSpanTerm(1f, "kennedy");
-                positionSpans = new List<PositionSpan>();
+                positionSpans = new JCG.List<PositionSpan>();
                 positionSpans.Add(new PositionSpan(14, 14));
                 wTerms[1].AddPositionSpans(positionSpans);
 
@@ -1636,7 +1636,7 @@ namespace Lucene.Net.Search.Highlight
                 posIncrAtt = AddAttribute<IPositionIncrementAttribute>();
                 offsetAtt = AddAttribute<IOffsetAttribute>();
 
-                lst = new List<Token>();
+                lst = new JCG.List<Token>();
                 Token t;
                 t = createToken("hi", 0, 2);
                 t.PositionIncrement = (1);
@@ -1657,7 +1657,7 @@ namespace Lucene.Net.Search.Highlight
             }
 
             IEnumerator<Token> iter;
-            internal List<Token> lst;
+            internal IList<Token> lst;
             private readonly ICharTermAttribute termAtt;
             private readonly IPositionIncrementAttribute posIncrAtt;
             private readonly IOffsetAttribute offsetAtt;
@@ -1698,7 +1698,7 @@ namespace Lucene.Net.Search.Highlight
                 posIncrAtt = AddAttribute<IPositionIncrementAttribute>();
                 offsetAtt = AddAttribute<IOffsetAttribute>();
 
-                lst = new List<Token>();
+                lst = new JCG.List<Token>();
                 Token t;
                 t = createToken("hispeed", 0, 8);
                 t.PositionIncrement = (1);
@@ -1719,7 +1719,7 @@ namespace Lucene.Net.Search.Highlight
             }
 
             IEnumerator<Token> iter;
-            internal List<Token> lst;
+            internal IList<Token> lst;
             private readonly ICharTermAttribute termAtt;
             private readonly IPositionIncrementAttribute posIncrAtt;
             private readonly IOffsetAttribute offsetAtt;
diff --git a/src/Lucene.Net.Tests.Highlighter/VectorHighlight/AbstractTestCase.cs b/src/Lucene.Net.Tests.Highlighter/VectorHighlight/AbstractTestCase.cs
index e454912..b1f7f20 100644
--- a/src/Lucene.Net.Tests.Highlighter/VectorHighlight/AbstractTestCase.cs
+++ b/src/Lucene.Net.Tests.Highlighter/VectorHighlight/AbstractTestCase.cs
@@ -9,6 +9,7 @@ using System.Collections.Generic;
 using System.IO;
 using System.Text;
 using Directory = Lucene.Net.Store.Directory;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Search.VectorHighlight
 {
@@ -168,9 +169,9 @@ namespace Lucene.Net.Search.VectorHighlight
             }
         }
 
-        protected List<BytesRef> analyze(String text, String field, Analyzer analyzer)
+        protected IList<BytesRef> analyze(String text, String field, Analyzer analyzer)
         {
-            List<BytesRef> bytesRefs = new List<BytesRef>();
+            IList<BytesRef> bytesRefs = new JCG.List<BytesRef>();
 
             TokenStream tokenStream = analyzer.GetTokenStream(field, text);
             try
@@ -197,7 +198,7 @@ namespace Lucene.Net.Search.VectorHighlight
             return bytesRefs;
         }
 
-        protected PhraseQuery toPhraseQuery(List<BytesRef> bytesRefs, String field)
+        protected PhraseQuery toPhraseQuery(IList<BytesRef> bytesRefs, String field)
         {
             PhraseQuery phraseQuery = new PhraseQuery();
             foreach (BytesRef bytesRef in bytesRefs)
diff --git a/src/Lucene.Net.Tests.Highlighter/VectorHighlight/FieldPhraseListTest.cs b/src/Lucene.Net.Tests.Highlighter/VectorHighlight/FieldPhraseListTest.cs
index bc2e17a..f6e4f9b 100644
--- a/src/Lucene.Net.Tests.Highlighter/VectorHighlight/FieldPhraseListTest.cs
+++ b/src/Lucene.Net.Tests.Highlighter/VectorHighlight/FieldPhraseListTest.cs
@@ -3,6 +3,7 @@ using NUnit.Framework;
 using System;
 using System.Collections.Generic;
 using System.Globalization;
+using JCG = J2N.Collections.Generic;
 using TermInfo = Lucene.Net.Search.VectorHighlight.FieldTermStack.TermInfo;
 using Toffs = Lucene.Net.Search.VectorHighlight.FieldPhraseList.WeightedPhraseInfo.Toffs;
 using WeightedPhraseInfo = Lucene.Net.Search.VectorHighlight.FieldPhraseList.WeightedPhraseInfo;
@@ -300,7 +301,7 @@ namespace Lucene.Net.Search.VectorHighlight
 
         private WeightedPhraseInfo newInfo(int startOffset, int endOffset, float boost)
         {
-            List<TermInfo> infos = new List<TermInfo>();
+            IList<TermInfo> infos = new JCG.List<TermInfo>();
             infos.Add(new TermInfo(TestUtil.RandomUnicodeString(Random), startOffset, endOffset, 0, 0));
             return new WeightedPhraseInfo(infos, boost);
         }
diff --git a/src/Lucene.Net.Tests.Highlighter/VectorHighlight/FieldQueryTest.cs b/src/Lucene.Net.Tests.Highlighter/VectorHighlight/FieldQueryTest.cs
index 41d11fe..6974ac3 100644
--- a/src/Lucene.Net.Tests.Highlighter/VectorHighlight/FieldQueryTest.cs
+++ b/src/Lucene.Net.Tests.Highlighter/VectorHighlight/FieldQueryTest.cs
@@ -884,7 +884,7 @@ namespace Lucene.Net.Search.VectorHighlight
             FieldQuery fq = new FieldQuery(query, true, true);
 
             // "a"
-            List<TermInfo> phraseCandidate = new List<TermInfo>();
+            IList<TermInfo> phraseCandidate = new JCG.List<TermInfo>();
             phraseCandidate.Add(new TermInfo("a", 0, 1, 0, 1));
             assertNull(fq.SearchPhrase(F, phraseCandidate));
             // "a b"
@@ -928,7 +928,7 @@ namespace Lucene.Net.Search.VectorHighlight
             FieldQuery fq = new FieldQuery(query, true, true);
 
             // "a b c" w/ position-gap = 2
-            List<TermInfo> phraseCandidate = new List<TermInfo>();
+            IList<TermInfo> phraseCandidate = new JCG.List<TermInfo>();
             phraseCandidate.Add(new TermInfo("a", 0, 1, 0, 1));
             phraseCandidate.Add(new TermInfo("b", 2, 3, 2, 1));
             phraseCandidate.Add(new TermInfo("c", 4, 5, 4, 1));
@@ -986,7 +986,7 @@ namespace Lucene.Net.Search.VectorHighlight
             QueryPhraseMap qpm = fq.GetFieldTermMap(F, "defg");
             assertNotNull(qpm);
             assertNull(fq.GetFieldTermMap(F, "dog"));
-            List<TermInfo> phraseCandidate = new List<TermInfo>();
+            IList<TermInfo> phraseCandidate = new JCG.List<TermInfo>();
             phraseCandidate.Add(new TermInfo("defg", 0, 12, 0, 1));
             assertNotNull(fq.SearchPhrase(F, phraseCandidate));
         }
diff --git a/src/Lucene.Net.Tests.Highlighter/VectorHighlight/SimpleFragmentsBuilderTest.cs b/src/Lucene.Net.Tests.Highlighter/VectorHighlight/SimpleFragmentsBuilderTest.cs
index f27698c..e171ddb 100644
--- a/src/Lucene.Net.Tests.Highlighter/VectorHighlight/SimpleFragmentsBuilderTest.cs
+++ b/src/Lucene.Net.Tests.Highlighter/VectorHighlight/SimpleFragmentsBuilderTest.cs
@@ -265,8 +265,8 @@ namespace Lucene.Net.Search.VectorHighlight
             int numDocs = randomValues.Length * 5;
             int numFields = 2 + Random.nextInt(5);
             int numTerms = 2 + Random.nextInt(3);
-            List<Doc> docs = new List<Doc>(numDocs);
-            List<Document> documents = new List<Document>(numDocs);
+            IList<Doc> docs = new JCG.List<Doc>(numDocs);
+            IList<Document> documents = new JCG.List<Document>(numDocs);
             IDictionary<String, ISet<int>> valueToDocId = new JCG.Dictionary<String, ISet<int>>();
             for (int i = 0; i < numDocs; i++)
             {
@@ -301,7 +301,7 @@ namespace Lucene.Net.Search.VectorHighlight
 
                     String queryTerm = randomValues[Random.nextInt(randomValues.Length)];
                     int randomHit = valueToDocId[queryTerm].First();
-                    List<StringBuilder> builders = new List<StringBuilder>();
+                    IList<StringBuilder> builders = new JCG.List<StringBuilder>();
                     foreach (String[] fieldValues in docs[randomHit].fieldValues)
                     {
                         StringBuilder builder = new StringBuilder();
diff --git a/src/Lucene.Net.Tests.Join/TestBlockJoin.cs b/src/Lucene.Net.Tests.Join/TestBlockJoin.cs
index f04cecf..e18a9b2 100644
--- a/src/Lucene.Net.Tests.Join/TestBlockJoin.cs
+++ b/src/Lucene.Net.Tests.Join/TestBlockJoin.cs
@@ -15,6 +15,7 @@ using System.Collections.Generic;
 using System.Globalization;
 using System.Linq;
 using System.Text;
+using JCG = J2N.Collections.Generic;
 using Console = Lucene.Net.Util.SystemConsole;
 
 namespace Lucene.Net.Search.Join
@@ -76,7 +77,7 @@ namespace Lucene.Net.Search.Join
             // we don't want to merge - since we rely on certain segment setup
             IndexWriter w = new IndexWriter(dir, config);
 
-            IList<Document> docs = new List<Document>();
+            IList<Document> docs = new JCG.List<Document>();
 
             docs.Add(MakeJob("java", 2007));
             docs.Add(MakeJob("python", 2010));
@@ -139,7 +140,7 @@ namespace Lucene.Net.Search.Join
 #endif
                 Random, dir);
 
-            IList<Document> docs = new List<Document>();
+            IList<Document> docs = new JCG.List<Document>();
 
             docs.Add(MakeJob("java", 2007));
             docs.Add(MakeJob("python", 2010));
@@ -233,7 +234,7 @@ namespace Lucene.Net.Search.Join
 #endif
                 Random, dir);
 
-            IList<Document> docs = new List<Document>();
+            IList<Document> docs = new JCG.List<Document>();
 
             for (int i = 0; i < 10; i++)
             {
@@ -294,13 +295,13 @@ namespace Lucene.Net.Search.Join
 #endif
                 Random, dir);
 
-            IList<Document> docs = new List<Document>();
+            IList<Document> docs = new JCG.List<Document>();
             docs.Add(MakeJob("java", 2007));
             docs.Add(MakeJob("python", 2010));
             docs.Shuffle(Random);
             docs.Add(MakeResume("Lisa", "United Kingdom"));
 
-            IList<Document> docs2 = new List<Document>();
+            IList<Document> docs2 = new JCG.List<Document>();
             docs2.Add(MakeJob("ruby", 2005));
             docs2.Add(MakeJob("java", 2006));
             docs2.Shuffle(Random);
@@ -424,7 +425,7 @@ namespace Lucene.Net.Search.Join
             // Cannot assert this since we use NoMergePolicy:
             w.DoRandomForceMergeAssert = false;
 
-            IList<Document> docs = new List<Document>();
+            IList<Document> docs = new JCG.List<Document>();
             docs.Add(MakeJob("java", 2007));
             docs.Add(MakeJob("python", 2010));
             docs.Add(MakeResume("Lisa", "United Kingdom"));
@@ -507,7 +508,7 @@ namespace Lucene.Net.Search.Join
 
         private Sort GetRandomSort(string prefix, int numFields)
         {
-            List<SortField> sortFields = new List<SortField>();
+            JCG.List<SortField> sortFields = new JCG.List<SortField>();
             // TODO: sometimes sort by score; problem is scores are
             // not comparable across the two indices
             // sortFields.Add(SortField.FIELD_SCORE);
@@ -544,7 +545,7 @@ namespace Lucene.Net.Search.Join
             string[][] childFields = GetRandomFields(numParentDocs);
 
             bool doDeletes = Random.NextBoolean();
-            IList<int> toDelete = new List<int>();
+            IList<int> toDelete = new JCG.List<int>();
 
             // TODO: parallel star join, nested join cases too!
             RandomIndexWriter w = new RandomIndexWriter(
@@ -581,7 +582,7 @@ namespace Lucene.Net.Search.Join
                     parentJoinDoc.Add(NewStringField("blockID", "" + parentDocID, Field.Store.NO));
                 }
 
-                IList<Document> joinDocs = new List<Document>();
+                IList<Document> joinDocs = new JCG.List<Document>();
 
                 if (Verbose)
                 {
@@ -811,7 +812,7 @@ namespace Lucene.Net.Search.Join
                 }
 
                 // Merge both sorts:
-                List<SortField> sortFields = new List<SortField>(parentSort.GetSort());
+                IList<SortField> sortFields = new JCG.List<SortField>(parentSort.GetSort());
                 sortFields.AddRange(childSort.GetSort());
                 Sort parentAndChildSort = new Sort(sortFields.ToArray());
 
@@ -1169,7 +1170,7 @@ namespace Lucene.Net.Search.Join
 #endif
                 Random, dir);
 
-            IList<Document> docs = new List<Document>();
+            IList<Document> docs = new JCG.List<Document>();
 
             docs.Add(MakeJob("java", 2007));
             docs.Add(MakeJob("python", 2010));
@@ -1322,7 +1323,7 @@ namespace Lucene.Net.Search.Join
 #endif
                 Random, dir);
 
-            IList<Document> docs = new List<Document>();
+            IList<Document> docs = new JCG.List<Document>();
             docs.Add(MakeJob("ruby", 2005));
             docs.Add(MakeJob("java", 2006));
             docs.Add(MakeJob("java", 2010));
@@ -1424,7 +1425,7 @@ namespace Lucene.Net.Search.Join
             parent.Add(NewTextField("parentText", "text", Field.Store.NO));
             parent.Add(NewStringField("isParent", "yes", Field.Store.NO));
 
-            IList<Document> docs = new List<Document>();
+            IList<Document> docs = new JCG.List<Document>();
 
             Document child = new Document();
             docs.Add(child);
@@ -1493,7 +1494,7 @@ namespace Lucene.Net.Search.Join
             parent.Add(NewTextField("parentText", "text", Field.Store.NO));
             parent.Add(NewStringField("isParent", "yes", Field.Store.NO));
 
-            IList<Document> docs = new List<Document>();
+            IList<Document> docs = new JCG.List<Document>();
 
             Document child = new Document();
             docs.Add(child);
@@ -1563,7 +1564,7 @@ namespace Lucene.Net.Search.Join
             parent.Add(NewTextField("parentText", "text", Field.Store.NO));
             parent.Add(NewStringField("isParent", "yes", Field.Store.NO));
 
-            IList<Document> docs = new List<Document>();
+            IList<Document> docs = new JCG.List<Document>();
 
             Document child = new Document();
             docs.Add(child);
diff --git a/src/Lucene.Net.Tests.Join/TestBlockJoinSorting.cs b/src/Lucene.Net.Tests.Join/TestBlockJoinSorting.cs
index fc1258c..e646415 100644
--- a/src/Lucene.Net.Tests.Join/TestBlockJoinSorting.cs
+++ b/src/Lucene.Net.Tests.Join/TestBlockJoinSorting.cs
@@ -8,6 +8,7 @@ using Lucene.Net.Util;
 using NUnit.Framework;
 using RandomizedTesting.Generators;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Search.Join
 {
@@ -37,7 +38,7 @@ namespace Lucene.Net.Search.Join
             RandomIndexWriter w = new RandomIndexWriter(Random, dir, NewIndexWriterConfig(TEST_VERSION_CURRENT,
                 new MockAnalyzer(Random)).SetMergePolicy(NoMergePolicy.COMPOUND_FILES));
 
-            IList<Document> docs = new List<Document>();
+            IList<Document> docs = new JCG.List<Document>();
             Document document = new Document();
             document.Add(new StringField("field2", "a", Field.Store.NO));
             document.Add(new StringField("filter_1", "T", Field.Store.NO));
diff --git a/src/Lucene.Net.Tests.Join/TestBlockJoinValidation.cs b/src/Lucene.Net.Tests.Join/TestBlockJoinValidation.cs
index 40ae57f..8972686 100644
--- a/src/Lucene.Net.Tests.Join/TestBlockJoinValidation.cs
+++ b/src/Lucene.Net.Tests.Join/TestBlockJoinValidation.cs
@@ -9,6 +9,7 @@ using RandomizedTesting.Generators;
 using System;
 using System.Collections.Generic;
 using System.Text;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Search.Join
 {
@@ -158,12 +159,12 @@ namespace Lucene.Net.Search.Join
 
         private IList<Document> CreateDocsForSegment(int segmentNumber)
         {
-            IList<IList<Document>> blocks = new List<IList<Document>>(AMOUNT_OF_PARENT_DOCS);
+            IList<IList<Document>> blocks = new JCG.List<IList<Document>>(AMOUNT_OF_PARENT_DOCS);
             for (int i = 0; i < AMOUNT_OF_PARENT_DOCS; i++)
             {
                 blocks.Add(CreateParentDocWithChildren(segmentNumber, i));
             }
-            IList<Document> result = new List<Document>(AMOUNT_OF_DOCS_IN_SEGMENT);
+            IList<Document> result = new JCG.List<Document>(AMOUNT_OF_DOCS_IN_SEGMENT);
             foreach (IList<Document> block in blocks)
             {
                 result.AddRange(block);
@@ -173,7 +174,7 @@ namespace Lucene.Net.Search.Join
 
         private IList<Document> CreateParentDocWithChildren(int segmentNumber, int parentNumber)
         {
-            IList<Document> result = new List<Document>(AMOUNT_OF_CHILD_DOCS + 1);
+            IList<Document> result = new JCG.List<Document>(AMOUNT_OF_CHILD_DOCS + 1);
             for (int i = 0; i < AMOUNT_OF_CHILD_DOCS; i++)
             {
                 result.Add(CreateChildDoc(segmentNumber, parentNumber, i));
diff --git a/src/Lucene.Net.Tests.Join/TestJoinUtil.cs b/src/Lucene.Net.Tests.Join/TestJoinUtil.cs
index 807f090..104bcb5 100644
--- a/src/Lucene.Net.Tests.Join/TestJoinUtil.cs
+++ b/src/Lucene.Net.Tests.Join/TestJoinUtil.cs
@@ -603,11 +603,11 @@ namespace Lucene.Net.Search.Join
                     {
                         if (!context.FromDocuments.TryGetValue(linkValue, out IList<RandomDoc> fromDocs))
                         {
-                            context.FromDocuments[linkValue] = fromDocs = new List<RandomDoc>();
+                            context.FromDocuments[linkValue] = fromDocs = new JCG.List<RandomDoc>();
                         }
                         if (!context.RandomValueFromDocs.TryGetValue(value, out IList<RandomDoc> randomValueFromDocs))
                         {
-                            context.RandomValueFromDocs[value] = randomValueFromDocs = new List<RandomDoc>();
+                            context.RandomValueFromDocs[value] = randomValueFromDocs = new JCG.List<RandomDoc>();
                         }
 
                         fromDocs.Add(docs[i]);
@@ -618,11 +618,11 @@ namespace Lucene.Net.Search.Join
                     {
                         if (!context.ToDocuments.TryGetValue(linkValue, out IList<RandomDoc> toDocuments))
                         {
-                            context.ToDocuments[linkValue] = toDocuments = new List<RandomDoc>();
+                            context.ToDocuments[linkValue] = toDocuments = new JCG.List<RandomDoc>();
                         }
                         if (!context.RandomValueToDocs.TryGetValue(value, out IList<RandomDoc> randomValueToDocs))
                         {
-                            context.RandomValueToDocs[value] = randomValueToDocs = new List<RandomDoc>();
+                            context.RandomValueToDocs[value] = randomValueToDocs = new JCG.List<RandomDoc>();
                         }
 
                         toDocuments.Add(docs[i]);
@@ -943,7 +943,7 @@ namespace Lucene.Net.Search.Join
                 ? context.FromHitsToJoinScore[queryValue]
                 : context.ToHitsToJoinScore[queryValue];
 
-            var hits = new List<KeyValuePair<int, JoinScore>>(hitsToJoinScores);
+            var hits = new JCG.List<KeyValuePair<int, JoinScore>>(hitsToJoinScores);
             hits.Sort(Comparer< KeyValuePair<int, JoinScore>>.Create( (hit1, hit2) =>
             {
                 float score1 = hit1.Value.Score(scoreMode);
@@ -1041,7 +1041,7 @@ namespace Lucene.Net.Search.Join
             {
                 this.id = id;
                 this.@from = from;
-                linkValues = new List<string>(numberOfLinkValues);
+                linkValues = new JCG.List<string>(numberOfLinkValues);
                 this.value = value;
             }
         }
diff --git a/src/Lucene.Net.Tests.Misc/Index/Sorter/IndexSortingTest.cs b/src/Lucene.Net.Tests.Misc/Index/Sorter/IndexSortingTest.cs
index 64cd63a..6b2de06 100644
--- a/src/Lucene.Net.Tests.Misc/Index/Sorter/IndexSortingTest.cs
+++ b/src/Lucene.Net.Tests.Misc/Index/Sorter/IndexSortingTest.cs
@@ -5,6 +5,7 @@ using NUnit.Framework;
 using System;
 using System.Collections.Generic;
 using System.Globalization;
+using JCG = J2N.Collections.Generic;
 using Console = Lucene.Net.Util.SystemConsole;
 
 namespace Lucene.Net.Index.Sorter
@@ -42,7 +43,7 @@ namespace Lucene.Net.Index.Sorter
             // only read the values of the undeleted documents, since after addIndexes,
             // the deleted ones will be dropped from the index.
             IBits liveDocs = reader.LiveDocs;
-            List<int> values = new List<int>();
+            JCG.List<int> values = new JCG.List<int>();
             for (int i = 0; i < reader.MaxDoc; i++)
             {
                 if (liveDocs == null || liveDocs.Get(i))
diff --git a/src/Lucene.Net.Tests.Misc/Index/Sorter/SorterTestBase.cs b/src/Lucene.Net.Tests.Misc/Index/Sorter/SorterTestBase.cs
index c19f794..56ac516 100644
--- a/src/Lucene.Net.Tests.Misc/Index/Sorter/SorterTestBase.cs
+++ b/src/Lucene.Net.Tests.Misc/Index/Sorter/SorterTestBase.cs
@@ -13,6 +13,7 @@ using System;
 using System.Collections.Generic;
 using System.Globalization;
 using Console = Lucene.Net.Util.SystemConsole;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Index.Sorter
 {
@@ -167,7 +168,7 @@ namespace Lucene.Net.Index.Sorter
         /** Creates an index for sorting. */
         public void CreateIndex(Directory dir, int numDocs, Random random)
         {
-            IList<int> ids = new List<int>();
+            IList<int> ids = new JCG.List<int>();
             for (int i = 0; i < numDocs; i++)
             {
                 ids.Add(i * 10);
diff --git a/src/Lucene.Net.Tests.Misc/Index/Sorter/TestBlockJoinSorter.cs b/src/Lucene.Net.Tests.Misc/Index/Sorter/TestBlockJoinSorter.cs
index 86d94f5..914d914 100644
--- a/src/Lucene.Net.Tests.Misc/Index/Sorter/TestBlockJoinSorter.cs
+++ b/src/Lucene.Net.Tests.Misc/Index/Sorter/TestBlockJoinSorter.cs
@@ -1,4 +1,4 @@
-/*
+/*
  *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -27,6 +27,7 @@ using Lucene.Net.Index.Extensions;
 using NUnit.Framework;
 using System;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Index.Sorter
 {
@@ -68,7 +69,7 @@ namespace Lucene.Net.Index.Sorter
                 parentDoc.Add(parent);
                 for (int i = 0; i < numParents; ++i)
                 {
-                    List<Document> documents = new List<Document>();
+                    IList<Document> documents = new JCG.List<Document>();
                     int numChildren = Random.nextInt(10);
                     for (int j = 0; j < numChildren; ++j)
                     {
diff --git a/src/Lucene.Net.Tests.Misc/Index/Sorter/TestEarlyTermination.cs b/src/Lucene.Net.Tests.Misc/Index/Sorter/TestEarlyTermination.cs
index 19c6032..9acd9f9 100644
--- a/src/Lucene.Net.Tests.Misc/Index/Sorter/TestEarlyTermination.cs
+++ b/src/Lucene.Net.Tests.Misc/Index/Sorter/TestEarlyTermination.cs
@@ -33,7 +33,7 @@ namespace Lucene.Net.Index.Sorter
     public class TestEarlyTermination : LuceneTestCase
     {
         private int numDocs;
-        private List<string> terms;
+        private IList<string> terms;
         private Directory dir;
         private Sort sort;
         private RandomIndexWriter iw;
@@ -64,7 +64,7 @@ namespace Lucene.Net.Index.Sorter
             {
                 randomTerms.add(TestUtil.RandomSimpleString(Random));
             }
-            terms = new List<string>(randomTerms);
+            terms = new JCG.List<string>(randomTerms);
             int seed = Random.Next();
             IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(new Random(seed)));
             iwc.SetMergePolicy(TestSortingMergePolicy.NewSortingMergePolicy(sort));
diff --git a/src/Lucene.Net.Tests.Misc/Index/Sorter/TestSortingMergePolicy.cs b/src/Lucene.Net.Tests.Misc/Index/Sorter/TestSortingMergePolicy.cs
index 5806efb..97d9648 100644
--- a/src/Lucene.Net.Tests.Misc/Index/Sorter/TestSortingMergePolicy.cs
+++ b/src/Lucene.Net.Tests.Misc/Index/Sorter/TestSortingMergePolicy.cs
@@ -32,7 +32,7 @@ namespace Lucene.Net.Index.Sorter
     [SuppressCodecs("Lucene3x")]
     public class TestSortingMergePolicy : LuceneTestCase
     {
-        private List<string> terms;
+        private IList<string> terms;
         private Directory dir1, dir2;
         private Sort sort;
         private IndexReader reader;
@@ -87,7 +87,7 @@ namespace Lucene.Net.Index.Sorter
             {
                 randomTerms.add(TestUtil.RandomSimpleString(Random));
             }
-            terms = new List<string>(randomTerms);
+            terms = new JCG.List<string>(randomTerms);
             long seed = Random.NextInt64();
             IndexWriterConfig iwc1 = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(new Random((int)seed)));
             IndexWriterConfig iwc2 = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(new Random((int)seed)));
diff --git a/src/Lucene.Net.Tests.Queries/CommonTermsQueryTest.cs b/src/Lucene.Net.Tests.Queries/CommonTermsQueryTest.cs
index c47aad0..6ff07d9 100644
--- a/src/Lucene.Net.Tests.Queries/CommonTermsQueryTest.cs
+++ b/src/Lucene.Net.Tests.Queries/CommonTermsQueryTest.cs
@@ -473,8 +473,8 @@ namespace Lucene.Net.Tests.Queries
                 int lowFreq = lowFreqQueue.Top.freq;
                 int highFreq = highFreqQueue.Top.freq;
                 AssumeTrue(@"unlucky index", highFreq - 1 > lowFreq);
-                List<TermAndFreq> highTerms = QueueToList(highFreqQueue);
-                List<TermAndFreq> lowTerms = QueueToList(lowFreqQueue);
+                IList<TermAndFreq> highTerms = QueueToList(highFreqQueue);
+                IList<TermAndFreq> lowTerms = QueueToList(lowFreqQueue);
 
                 IndexSearcher searcher = NewSearcher(reader);
                 Occur lowFreqOccur = RandomOccur(Random);
@@ -559,9 +559,9 @@ namespace Lucene.Net.Tests.Queries
             }
         }
 
-        private static List<TermAndFreq> QueueToList(Util.PriorityQueue<TermAndFreq> queue)
+        private static IList<TermAndFreq> QueueToList(Util.PriorityQueue<TermAndFreq> queue)
         {
-            var terms = new List<TermAndFreq>();
+            var terms = new JCG.List<TermAndFreq>();
             while (queue.Count > 0)
             {
                 terms.Add(queue.Pop());
diff --git a/src/Lucene.Net.Tests.Queries/TermFilterTest.cs b/src/Lucene.Net.Tests.Queries/TermFilterTest.cs
index 2c8d670..d7905d3 100644
--- a/src/Lucene.Net.Tests.Queries/TermFilterTest.cs
+++ b/src/Lucene.Net.Tests.Queries/TermFilterTest.cs
@@ -86,7 +86,7 @@ namespace Lucene.Net.Tests.Queries
 #endif
                 Random, dir);
             int num = AtLeast(100);
-            var terms = new List<Term>();
+            var terms = new JCG.List<Term>();
             for (int i = 0; i < num; i++)
             {
                 string field = @"field" + i;
diff --git a/src/Lucene.Net.Tests.Queries/TermsFilterTest.cs b/src/Lucene.Net.Tests.Queries/TermsFilterTest.cs
index f48a3a7..522345a 100644
--- a/src/Lucene.Net.Tests.Queries/TermsFilterTest.cs
+++ b/src/Lucene.Net.Tests.Queries/TermsFilterTest.cs
@@ -71,7 +71,7 @@ namespace Lucene.Net.Tests.Queries
             AtomicReaderContext context = (AtomicReaderContext)reader.Context;
             w.Dispose();
 
-            IList<Term> terms = new List<Term>();
+            IList<Term> terms = new JCG.List<Term>();
             terms.Add(new Term(fieldName, "19"));
             FixedBitSet bits = (FixedBitSet)TermsFilter(Random.NextBoolean(), terms).GetDocIdSet(context, context.AtomicReader.LiveDocs);
             assertNull("Must match nothing", bits);
@@ -154,7 +154,7 @@ namespace Lucene.Net.Tests.Queries
                 Random, dir);
             int num = AtLeast(3);
             int skip = Random.Next(num);
-            var terms = new List<Term>();
+            var terms = new JCG.List<Term>();
             for (int i = 0; i < num; i++)
             {
                 terms.Add(new Term("field" + i, "content1"));
@@ -294,7 +294,7 @@ namespace Lucene.Net.Tests.Queries
                 return new TermsFilter(termList.ToList());
             }
             TermsFilter filter;
-            var bytes = new List<BytesRef>();
+            var bytes = new JCG.List<BytesRef>();
             string field = null;
             foreach (Term term in termList)
             {
@@ -315,7 +315,7 @@ namespace Lucene.Net.Tests.Queries
         {
             int num = AtLeast(100);
             bool singleField = Random.NextBoolean();
-            IList<Term> terms = new List<Term>();
+            IList<Term> terms = new JCG.List<Term>();
             var uniqueTerms = new JCG.HashSet<Term>();
             for (int i = 0; i < num; i++)
             {
@@ -330,7 +330,7 @@ namespace Lucene.Net.Tests.Queries
                 assertEquals(right.GetHashCode(), left.GetHashCode());
                 if (uniqueTerms.Count > 1)
                 {
-                    IList<Term> asList = new List<Term>(uniqueTerms);
+                    IList<Term> asList = new JCG.List<Term>(uniqueTerms);
                     asList.RemoveAt(0);
                     TermsFilter notEqual = TermsFilter(singleField && Random.NextBoolean(), asList);
                     assertFalse(left.Equals(notEqual));
diff --git a/src/Lucene.Net.Tests.Spatial/DistanceStrategyTest.cs b/src/Lucene.Net.Tests.Spatial/DistanceStrategyTest.cs
index 9795238..0427f38 100644
--- a/src/Lucene.Net.Tests.Spatial/DistanceStrategyTest.cs
+++ b/src/Lucene.Net.Tests.Spatial/DistanceStrategyTest.cs
@@ -9,6 +9,7 @@ using Spatial4n.Core.Context;
 using Spatial4n.Core.Shapes;
 using System;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Spatial
 {
@@ -34,7 +35,7 @@ namespace Lucene.Net.Spatial
         //@ParametersFactory
         public static IList<Object[]> Parameters()
         {
-            List<Object[]> ctorArgs = new List<object[]>();
+            IList<Object[]> ctorArgs = new JCG.List<object[]>();
 
             SpatialContext ctx = SpatialContext.GEO;
             SpatialPrefixTree grid;
diff --git a/src/Lucene.Net.Tests.Spatial/PortedSolr3Test.cs b/src/Lucene.Net.Tests.Spatial/PortedSolr3Test.cs
index e169996..5ee7789 100644
--- a/src/Lucene.Net.Tests.Spatial/PortedSolr3Test.cs
+++ b/src/Lucene.Net.Tests.Spatial/PortedSolr3Test.cs
@@ -40,7 +40,7 @@ namespace Lucene.Net.Spatial
         //@ParametersFactory
         public static IList<Object[]> Parameters()
         {
-            List<Object[]> ctorArgs = new List<object[]>();
+            IList<Object[]> ctorArgs = new JCG.List<object[]>();
 
             SpatialContext ctx = SpatialContext.GEO;
             SpatialPrefixTree grid;
diff --git a/src/Lucene.Net.Tests.Spatial/Prefix/SpatialOpRecursivePrefixTreeTest.cs b/src/Lucene.Net.Tests.Spatial/Prefix/SpatialOpRecursivePrefixTreeTest.cs
index 13ea7ab..4f78f7b 100644
--- a/src/Lucene.Net.Tests.Spatial/Prefix/SpatialOpRecursivePrefixTreeTest.cs
+++ b/src/Lucene.Net.Tests.Spatial/Prefix/SpatialOpRecursivePrefixTreeTest.cs
@@ -185,13 +185,13 @@ namespace Lucene.Net.Spatial.Prefix
                 IList<IShape> shapes;
                 if (shape is ShapePair)
                 {
-                    shapes = new List<IShape>(2);
+                    shapes = new JCG.List<IShape>(2);
                     shapes.Add(((ShapePair)shape).shape1);
                     shapes.Add(((ShapePair)shape).shape2);
                 }
                 else
                 {
-                    shapes = new List<IShape>(new IShape[] { shape });//Collections.Singleton(shape);
+                    shapes = new JCG.List<IShape>(new IShape[] { shape });//Collections.Singleton(shape);
                 }
                 foreach (IShape shapei in shapes)
                 {
@@ -414,7 +414,7 @@ namespace Lucene.Net.Spatial.Prefix
             IList<Cell> cells = grid.GetCells(snapMe, detailLevel, false, true);
 
             //calc bounding box of cells.
-            List<IShape> cellShapes = new List<IShape>(cells.size());
+            IList<IShape> cellShapes = new JCG.List<IShape>(cells.size());
             foreach (Cell cell in cells)
             {
                 cellShapes.Add(cell.Shape);
@@ -437,7 +437,7 @@ namespace Lucene.Net.Spatial.Prefix
             internal bool biasContainsThenWithin;//a hack
 
             public ShapePair(IShape shape1, IShape shape2, bool containsThenWithin, SpatialContext ctx)
-                        : base(new List<IShape> { shape1, shape2 }, ctx)
+                        : base(new JCG.List<IShape> { shape1, shape2 }, ctx)
             {
                 this.ctx = ctx;
 
diff --git a/src/Lucene.Net.Tests.Spatial/QueryEqualsHashCodeTest.cs b/src/Lucene.Net.Tests.Spatial/QueryEqualsHashCodeTest.cs
index d0c667c..fc33858 100644
--- a/src/Lucene.Net.Tests.Spatial/QueryEqualsHashCodeTest.cs
+++ b/src/Lucene.Net.Tests.Spatial/QueryEqualsHashCodeTest.cs
@@ -9,6 +9,7 @@ using Spatial4n.Core.Context;
 using Spatial4n.Core.Shapes;
 using System;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Spatial
 {
@@ -40,7 +41,7 @@ namespace Lucene.Net.Spatial
             SpatialPrefixTree gridQuad = new QuadPrefixTree(ctx, 10);
             SpatialPrefixTree gridGeohash = new GeohashPrefixTree(ctx, 10);
 
-            List<SpatialStrategy> strategies = new List<SpatialStrategy>();
+            IList<SpatialStrategy> strategies = new JCG.List<SpatialStrategy>();
             strategies.Add(new RecursivePrefixTreeStrategy(gridGeohash, "recursive_geohash"));
             strategies.Add(new TermQueryPrefixTreeStrategy(gridQuad, "termquery_quad"));
             strategies.Add(new PointVectorStrategy(ctx, "pointvector"));
diff --git a/src/Lucene.Net.Tests.Spatial/SpatialTestCase.cs b/src/Lucene.Net.Tests.Spatial/SpatialTestCase.cs
index 688c0c8..383e6d5 100644
--- a/src/Lucene.Net.Tests.Spatial/SpatialTestCase.cs
+++ b/src/Lucene.Net.Tests.Spatial/SpatialTestCase.cs
@@ -10,8 +10,8 @@ using Spatial4n.Core.Context;
 using Spatial4n.Core.Shapes;
 using System;
 using System.Collections.Generic;
-using System.IO;
 using System.Text;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Spatial
 {
@@ -115,7 +115,7 @@ namespace Lucene.Net.Spatial
             {
                 TopDocs topDocs = indexSearcher.Search(query, numDocs);
 
-                List<SearchResult> results = new List<SearchResult>();
+                IList<SearchResult> results = new JCG.List<SearchResult>();
                 foreach (ScoreDoc scoreDoc in topDocs.ScoreDocs)
                 {
                     results.Add(new SearchResult(scoreDoc.Score, indexSearcher.Doc(scoreDoc.Doc)));
@@ -196,9 +196,9 @@ namespace Lucene.Net.Spatial
         {
 
             public int numFound;
-            public List<SearchResult> results;
+            public IList<SearchResult> results;
 
-            public SearchResults(int numFound, List<SearchResult> results)
+            public SearchResults(int numFound, IList<SearchResult> results)
             {
                 this.numFound = numFound;
                 this.results = results;
diff --git a/src/Lucene.Net.Tests.Spatial/SpatialTestData.cs b/src/Lucene.Net.Tests.Spatial/SpatialTestData.cs
index 3d0f9e0..671ac72 100644
--- a/src/Lucene.Net.Tests.Spatial/SpatialTestData.cs
+++ b/src/Lucene.Net.Tests.Spatial/SpatialTestData.cs
@@ -5,6 +5,7 @@ using System;
 using System.Collections.Generic;
 using System.IO;
 using System.Text;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Spatial
 {
@@ -41,7 +42,7 @@ namespace Lucene.Net.Spatial
          */
         public static IEnumerator<SpatialTestData> GetTestData(Stream @in, SpatialContext ctx)
         {
-            List<SpatialTestData> results = new List<SpatialTestData>();
+            IList<SpatialTestData> results = new JCG.List<SpatialTestData>();
             TextReader bufInput = new StreamReader(@in, Encoding.UTF8);
             try
             {
diff --git a/src/Lucene.Net.Tests.Spatial/SpatialTestQuery.cs b/src/Lucene.Net.Tests.Spatial/SpatialTestQuery.cs
index 7494aa7..8a33fba 100644
--- a/src/Lucene.Net.Tests.Spatial/SpatialTestQuery.cs
+++ b/src/Lucene.Net.Tests.Spatial/SpatialTestQuery.cs
@@ -5,6 +5,7 @@ using System;
 using System.Collections.Generic;
 using System.IO;
 using System.Text;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Spatial
 {
@@ -34,7 +35,7 @@ namespace Lucene.Net.Spatial
         public string line;
         public int lineNumber = -1;
         public SpatialArgs args;
-        public List<string> ids = new List<string>();
+        public IList<string> ids = new JCG.List<string>();
 
         /**
          * Get Test Queries.  The InputStream is closed.
@@ -46,7 +47,7 @@ namespace Lucene.Net.Spatial
             Stream @in)
         {
 
-            List<SpatialTestQuery> results = new List<SpatialTestQuery>();
+            IList<SpatialTestQuery> results = new JCG.List<SpatialTestQuery>();
 
             TextReader bufInput = new StreamReader(@in, Encoding.UTF8);
             try
diff --git a/src/Lucene.Net.Tests.Spatial/StrategyTestCase.cs b/src/Lucene.Net.Tests.Spatial/StrategyTestCase.cs
index 571d839..a18ba2b 100644
--- a/src/Lucene.Net.Tests.Spatial/StrategyTestCase.cs
+++ b/src/Lucene.Net.Tests.Spatial/StrategyTestCase.cs
@@ -66,19 +66,19 @@ namespace Lucene.Net.Spatial
 
         protected virtual void getAddAndVerifyIndexedDocuments(String testDataFile)
         {
-            List<Document> testDocuments = getDocuments(testDataFile);
+            IList<Document> testDocuments = getDocuments(testDataFile);
             addDocumentsAndCommit(testDocuments);
             VerifyDocumentsIndexed(testDocuments.size());
         }
 
-        protected virtual List<Document> getDocuments(String testDataFile)
+        protected virtual IList<Document> getDocuments(String testDataFile)
         {
             return getDocuments(getSampleData(testDataFile));
         }
 
-        protected virtual List<Document> getDocuments(IEnumerator<SpatialTestData> sampleData)
+        protected virtual IList<Document> getDocuments(IEnumerator<SpatialTestData> sampleData)
         {
-            List<Document> documents = new List<Document>();
+            IList<Document> documents = new JCG.List<Document>();
             while (sampleData.MoveNext())
             {
                 SpatialTestData data = sampleData.Current;
@@ -182,7 +182,7 @@ namespace Lucene.Net.Spatial
                 }
                 else
                 {
-                    List<string> found = new List<string>();
+                    IList<string> found = new JCG.List<string>();
                     foreach (SearchResult r in got.results)
                     {
                         found.Add(r.document.Get("id"));
@@ -191,7 +191,7 @@ namespace Lucene.Net.Spatial
                     // sort both so that the order is not important
                     CollectionUtil.TimSort(q.ids);
                     CollectionUtil.TimSort(found);
-                    assertEquals(msg, q.ids.toString(), found.toString());
+                    assertEquals(msg, q.ids.ToString(), found.ToString());
                 }
             }
         }
@@ -268,7 +268,7 @@ namespace Lucene.Net.Spatial
 
             SpatialTestQuery testQuery = new SpatialTestQuery();
             testQuery.args = new SpatialArgs(operation, queryShape);
-            testQuery.ids = new List<string>(expectedIds);
+            testQuery.ids = new JCG.List<string>(expectedIds);
             runTestQuery(SpatialMatchConcern.FILTER, testQuery);
         }
     }
diff --git a/src/Lucene.Net.Tests.Spatial/TestTestFramework.cs b/src/Lucene.Net.Tests.Spatial/TestTestFramework.cs
index 67a4242..9c4c290 100644
--- a/src/Lucene.Net.Tests.Spatial/TestTestFramework.cs
+++ b/src/Lucene.Net.Tests.Spatial/TestTestFramework.cs
@@ -6,7 +6,7 @@ using Spatial4n.Core.Shapes;
 using System;
 using System.Collections.Generic;
 using System.IO;
-using System.Reflection;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Spatial
 {
@@ -38,7 +38,7 @@ namespace Lucene.Net.Spatial
             SpatialContext ctx = SpatialContext.GEO;
             IEnumerator<SpatialTestQuery> iter = SpatialTestQuery.GetTestQueries(
                 new SpatialArgsParser(), ctx, name, @in);//closes the InputStream
-            List<SpatialTestQuery> tests = new List<SpatialTestQuery>();
+            IList<SpatialTestQuery> tests = new JCG.List<SpatialTestQuery>();
             while (iter.MoveNext())
             {
                 tests.Add(iter.Current);
diff --git a/src/Lucene.Net.Tests.Suggest/Spell/TestWordBreakSpellChecker.cs b/src/Lucene.Net.Tests.Suggest/Spell/TestWordBreakSpellChecker.cs
index b8b27bf..03d4c70 100644
--- a/src/Lucene.Net.Tests.Suggest/Spell/TestWordBreakSpellChecker.cs
+++ b/src/Lucene.Net.Tests.Suggest/Spell/TestWordBreakSpellChecker.cs
@@ -8,6 +8,7 @@ using NUnit.Framework;
 using System;
 using System.Collections.Generic;
 using System.Text.RegularExpressions;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Search.Spell
 {
@@ -295,8 +296,8 @@ namespace Lucene.Net.Search.Spell
                     Random, dir, new MockAnalyzer(Random,
                     MockTokenizer.WHITESPACE, false));
                 int maxLength = TestUtil.NextInt32(Random, 5, 50);
-                List<string> originals = new List<string>(numDocs);
-                List<string[]> breaks = new List<string[]>(numDocs);
+                IList<string> originals = new JCG.List<string>(numDocs);
+                IList<string[]> breaks = new JCG.List<string[]>(numDocs);
                 for (int i = 0; i < numDocs; i++)
                 {
                     string orig = "";
diff --git a/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/AnalyzingInfixSuggesterTest.cs b/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/AnalyzingInfixSuggesterTest.cs
index b65f88e..56ccd4d 100644
--- a/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/AnalyzingInfixSuggesterTest.cs
+++ b/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/AnalyzingInfixSuggesterTest.cs
@@ -157,7 +157,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing
                     ICharTermAttribute termAtt = ts.AddAttribute<ICharTermAttribute>();
                     IOffsetAttribute offsetAtt = ts.AddAttribute<IOffsetAttribute>();
                     ts.Reset();
-                    List<LookupHighlightFragment> fragments = new List<LookupHighlightFragment>();
+                    IList<LookupHighlightFragment> fragments = new JCG.List<LookupHighlightFragment>();
                     int upto = 0;
                     while (ts.IncrementToken())
                     {
@@ -219,7 +219,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing
 
             IList<Lookup.LookupResult> results = suggester.DoLookup(TestUtil.StringToCharSequence("ear", Random).ToString(), 10, true, true);
             assertEquals(1, results.size());
-            assertEquals("a penny saved is a penny <b>ear</b>ned", ToString((List<LookupHighlightFragment>)results[0].HighlightKey));
+            assertEquals("a penny saved is a penny <b>ear</b>ned", ToString((IList<LookupHighlightFragment>)results[0].HighlightKey));
             assertEquals(10, results[0].Value);
             assertEquals(new BytesRef("foobaz"), results[0].Payload);
         }
@@ -611,8 +611,8 @@ namespace Lucene.Net.Search.Suggest.Analyzing
                 ISet<long> usedWeights = new JCG.HashSet<long>();
                 ISet<string> usedKeys = new JCG.HashSet<string>();
 
-                List<Input> inputs = new List<Input>();
-                List<Update> pendingUpdates = new List<Update>();
+                IList<Input> inputs = new JCG.List<Input>();
+                IList<Update> pendingUpdates = new JCG.List<Update>();
 
                 for (int iter = 0; iter < iters; iter++)
                 {
@@ -725,7 +725,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing
                         }
 
                         // Stupid slow but hopefully correct matching:
-                        List<Input> expected = new List<Input>();
+                        IList<Input> expected = new JCG.List<Input>();
                         for (int i = 0; i < visibleUpto; i++)
                         {
                             Input input = inputs[i];
diff --git a/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/AnalyzingSuggesterTest.cs b/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/AnalyzingSuggesterTest.cs
index ab0c75b..1d36a21 100644
--- a/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/AnalyzingSuggesterTest.cs
+++ b/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/AnalyzingSuggesterTest.cs
@@ -145,7 +145,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing
         {
             LineFileDocs lineFile = new LineFileDocs(Random);
             IDictionary<string, long> mapping = new JCG.Dictionary<string, long>();
-            List<Input> keys = new List<Input>();
+            IList<Input> keys = new JCG.List<Input>();
 
             int howMany = AtLeast(100); // this might bring up duplicates
             for (int i = 0; i < howMany; i++)
@@ -164,7 +164,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing
             bool doPayloads = Random.nextBoolean();
             if (doPayloads)
             {
-                List<Input> keysAndPayloads = new List<Input>();
+                IList<Input> keysAndPayloads = new JCG.List<Input>();
                 foreach (Input termFreq in keys)
                 {
                     keysAndPayloads.Add(new Input(termFreq.term, termFreq.v, new BytesRef(termFreq.v.ToString())));
@@ -778,7 +778,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing
 
             int numQueries = AtLeast(1000);
 
-            List<TermFreq2> slowCompletor = new List<TermFreq2>();
+            IList<TermFreq2> slowCompletor = new JCG.List<TermFreq2>();
             ISet<string> allPrefixes = new JCG.SortedSet<string>(StringComparer.Ordinal);
             ISet<string> seen = new JCG.HashSet<string>();
 
@@ -894,7 +894,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing
             {
                 // Don't just sort original list, to avoid VERBOSE
                 // altering the test:
-                List<TermFreq2> sorted = new List<TermFreq2>(slowCompletor);
+                IList<TermFreq2> sorted = new JCG.List<TermFreq2>(slowCompletor);
                 // LUCENENET NOTE: Must use TimSort because comparer is not expecting ties
                 CollectionUtil.TimSort(sorted);
                 foreach (TermFreq2 ent in sorted)
@@ -1491,7 +1491,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing
 
         internal static IEnumerable<Input> Shuffle(params Input[] values)
         {
-            IList<Input> asList = new List<Input>(values.Length);
+            IList<Input> asList = new JCG.List<Input>(values.Length);
             foreach (Input value in values)
             {
                 asList.Add(value);
diff --git a/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/FuzzySuggesterTest.cs b/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/FuzzySuggesterTest.cs
index 66a6660..52d3df8 100644
--- a/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/FuzzySuggesterTest.cs
+++ b/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/FuzzySuggesterTest.cs
@@ -40,7 +40,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing
         [Test]
         public void TestRandomEdits()
         {
-            List<Input> keys = new List<Input>();
+            IList<Input> keys = new JCG.List<Input>();
             int numTerms = AtLeast(100);
             for (int i = 0; i < numTerms; i++)
             {
@@ -65,7 +65,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing
         [Test]
         public void TestNonLatinRandomEdits()
         {
-            List<Input> keys = new List<Input>();
+            IList<Input> keys = new JCG.List<Input>();
             int numTerms = AtLeast(100);
             for (int i = 0; i < numTerms; i++)
             {
@@ -723,7 +723,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing
 
             int numQueries = AtLeast(100);
 
-            List<TermFreqPayload2> slowCompletor = new List<TermFreqPayload2>();
+            IList<TermFreqPayload2> slowCompletor = new JCG.List<TermFreqPayload2>();
             JCG.SortedSet<string> allPrefixes = new JCG.SortedSet<string>(StringComparer.Ordinal);
             ISet<string> seen = new JCG.HashSet<string>();
 
@@ -817,7 +817,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing
             {
                 // Don't just sort original list, to avoid VERBOSE
                 // altering the test:
-                List<TermFreqPayload2> sorted = new List<TermFreqPayload2>(slowCompletor);
+                IList<TermFreqPayload2> sorted = new JCG.List<TermFreqPayload2>(slowCompletor);
                 // LUCENENET NOTE: Must use TimSort because comparer is not expecting ties
                 CollectionUtil.TimSort(sorted);
                 foreach (TermFreqPayload2 ent in sorted)
@@ -1118,7 +1118,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing
         public void TestRandom2()
         {
             int NUM = AtLeast(200);
-            List<Input> answers = new List<Input>();
+            IList<Input> answers = new JCG.List<Input>();
             ISet<string> seen = new JCG.HashSet<string>();
             for (int i = 0; i < NUM; i++)
             {
@@ -1165,7 +1165,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing
                 {
                     Console.WriteLine("\nTEST: iter frag=" + frag);
                 }
-                List<Lookup.LookupResult> expected = SlowFuzzyMatch(prefixLen, maxEdits, transpositions, answers, frag);
+                IList<Lookup.LookupResult> expected = SlowFuzzyMatch(prefixLen, maxEdits, transpositions, answers, frag);
                 if (Verbose)
                 {
                     Console.WriteLine("  expected: " + expected.size());
@@ -1174,7 +1174,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing
                         Console.WriteLine("    " + c);
                     }
                 }
-                List<Lookup.LookupResult> actual = new List<Lookup.LookupResult>(suggest.DoLookup(frag, false, NUM));
+                JCG.List<Lookup.LookupResult> actual = new JCG.List<Lookup.LookupResult>(suggest.DoLookup(frag, false, NUM));
                 if (Verbose)
                 {
                     Console.WriteLine("  actual: " + actual.size());
@@ -1201,9 +1201,9 @@ namespace Lucene.Net.Search.Suggest.Analyzing
             }
         }
 
-        private List<Lookup.LookupResult> SlowFuzzyMatch(int prefixLen, int maxEdits, bool allowTransposition, List<Input> answers, string frag)
+        private IList<Lookup.LookupResult> SlowFuzzyMatch(int prefixLen, int maxEdits, bool allowTransposition, IList<Input> answers, string frag)
         {
-            List<Lookup.LookupResult> results = new List<Lookup.LookupResult>();
+            IList<Lookup.LookupResult> results = new JCG.List<Lookup.LookupResult>();
             int fragLen = frag.Length;
             foreach (Input tf in answers)
             {
diff --git a/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/TestFreeTextSuggester.cs b/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/TestFreeTextSuggester.cs
index 587c45c..7bab0b0 100644
--- a/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/TestFreeTextSuggester.cs
+++ b/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/TestFreeTextSuggester.cs
@@ -420,7 +420,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing
             sug.Build(new TestRandomInputEnumerator(docs));
 
             // Build inefficient but hopefully correct model:
-            List<IDictionary<string, int?>> gramCounts = new List<IDictionary<string, int?>>(grams);
+            IList<IDictionary<string, int?>> gramCounts = new JCG.List<IDictionary<string, int?>>(grams);
             for (int gram = 0; gram < grams; gram++)
             {
                 if (Verbose)
@@ -498,7 +498,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing
                 }
 
                 // Expected:
-                List<Lookup.LookupResult> expected = new List<Lookup.LookupResult>();
+                JCG.List<Lookup.LookupResult> expected = new JCG.List<Lookup.LookupResult>();
                 double backoff = 1.0;
                 seen = new JCG.HashSet<string>();
 
@@ -581,7 +581,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing
                     {
                         Console.WriteLine("      find terms w/ prefix=" + tokens[tokens.Length - 1]);
                     }
-                    List<Lookup.LookupResult> tmp = new List<Lookup.LookupResult>();
+                    JCG.List<Lookup.LookupResult> tmp = new JCG.List<Lookup.LookupResult>();
                     foreach (string term in terms)
                     {
                         if (term.StartsWith(tokens[tokens.Length - 1], StringComparison.Ordinal))
diff --git a/src/Lucene.Net.Tests.Suggest/Suggest/DocumentDictionaryTest.cs b/src/Lucene.Net.Tests.Suggest/Suggest/DocumentDictionaryTest.cs
index 4dc975c..5f7932a 100644
--- a/src/Lucene.Net.Tests.Suggest/Suggest/DocumentDictionaryTest.cs
+++ b/src/Lucene.Net.Tests.Suggest/Suggest/DocumentDictionaryTest.cs
@@ -40,10 +40,10 @@ namespace Lucene.Net.Search.Suggest
         internal const string CONTEXT_FIELD_NAME = "c1";
 
         /** Returns Pair(list of invalid document terms, Map of document term -> document) */
-        private KeyValuePair<List<string>, IDictionary<string, Document>> GenerateIndexDocuments(int ndocs, bool requiresPayload, bool requiresContexts)
+        private KeyValuePair<IList<string>, IDictionary<string, Document>> GenerateIndexDocuments(int ndocs, bool requiresPayload, bool requiresContexts)
         {
             IDictionary<string, Document> docs = new JCG.Dictionary<string, Document>();
-            List<string> invalidDocTerms = new List<string>();
+            IList<string> invalidDocTerms = new JCG.List<string>();
             for (int i = 0; i < ndocs; i++)
             {
                 Document doc = new Document();
@@ -109,7 +109,7 @@ namespace Lucene.Net.Search.Suggest
 
                 docs.Put(term, doc);
             }
-            return new KeyValuePair<List<string>, IDictionary<string, Document>>(invalidDocTerms, docs);
+            return new KeyValuePair<IList<string>, IDictionary<string, Document>>(invalidDocTerms, docs);
         }
 
         [Test]
@@ -141,9 +141,9 @@ namespace Lucene.Net.Search.Suggest
             IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random));
             iwc.SetMergePolicy(NewLogMergePolicy());
             RandomIndexWriter writer = new RandomIndexWriter(Random, dir, iwc);
-            KeyValuePair<List<string>, IDictionary<string, Document>> res = GenerateIndexDocuments(AtLeast(1000), true, false);
+            KeyValuePair<IList<string>, IDictionary<string, Document>> res = GenerateIndexDocuments(AtLeast(1000), true, false);
             IDictionary<string, Document> docs = res.Value;
-            List<String> invalidDocTerms = res.Key;
+            IList<String> invalidDocTerms = res.Key;
             foreach (Document doc in docs.Values)
             {
                 writer.AddDocument(doc);
@@ -184,9 +184,9 @@ namespace Lucene.Net.Search.Suggest
             IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random));
             iwc.SetMergePolicy(NewLogMergePolicy());
             RandomIndexWriter writer = new RandomIndexWriter(Random, dir, iwc);
-            KeyValuePair<List<string>, IDictionary<string, Document>> res = GenerateIndexDocuments(AtLeast(1000), false, false);
+            KeyValuePair<IList<string>, IDictionary<string, Document>> res = GenerateIndexDocuments(AtLeast(1000), false, false);
             IDictionary<string, Document> docs = res.Value;
-            List<string> invalidDocTerms = res.Key;
+            IList<string> invalidDocTerms = res.Key;
             foreach (Document doc in docs.Values)
             {
                 writer.AddDocument(doc);
@@ -228,9 +228,9 @@ namespace Lucene.Net.Search.Suggest
             IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random));
             iwc.SetMergePolicy(NewLogMergePolicy());
             RandomIndexWriter writer = new RandomIndexWriter(Random, dir, iwc);
-            KeyValuePair<List<string>, IDictionary<string, Document>> res = GenerateIndexDocuments(AtLeast(1000), true, true);
+            KeyValuePair<IList<string>, IDictionary<string, Document>> res = GenerateIndexDocuments(AtLeast(1000), true, true);
             IDictionary<string, Document> docs = res.Value;
-            List<string> invalidDocTerms = res.Key;
+            IList<string> invalidDocTerms = res.Key;
             foreach (Document doc in docs.Values)
             {
                 writer.AddDocument(doc);
@@ -278,11 +278,11 @@ namespace Lucene.Net.Search.Suggest
             IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random));
             iwc.SetMergePolicy(NewLogMergePolicy());
             RandomIndexWriter writer = new RandomIndexWriter(Random, dir, iwc);
-            KeyValuePair<List<string>, IDictionary<string, Document>> res = GenerateIndexDocuments(AtLeast(1000), false, false);
+            KeyValuePair<IList<string>, IDictionary<string, Document>> res = GenerateIndexDocuments(AtLeast(1000), false, false);
             IDictionary<string, Document> docs = res.Value;
-            List<String> invalidDocTerms = res.Key;
+            IList<String> invalidDocTerms = res.Key;
             Random rand = Random;
-            List<string> termsToDel = new List<string>();
+            IList<string> termsToDel = new JCG.List<string>();
             foreach (Document doc in docs.Values)
             {
                 IIndexableField f2 = doc.GetField(FIELD_NAME);
diff --git a/src/Lucene.Net.Tests.Suggest/Suggest/DocumentValueSourceDictionaryTest.cs b/src/Lucene.Net.Tests.Suggest/Suggest/DocumentValueSourceDictionaryTest.cs
index bb8fc9f..422db35 100644
--- a/src/Lucene.Net.Tests.Suggest/Suggest/DocumentValueSourceDictionaryTest.cs
+++ b/src/Lucene.Net.Tests.Suggest/Suggest/DocumentValueSourceDictionaryTest.cs
@@ -224,7 +224,7 @@ namespace Lucene.Net.Search.Suggest
             RandomIndexWriter writer = new RandomIndexWriter(Random, dir, iwc);
             IDictionary<string, Document> docs = GenerateIndexDocuments(AtLeast(100));
             Random rand = Random;
-            List<string> termsToDel = new List<string>();
+            IList<string> termsToDel = new JCG.List<string>();
             foreach (Document doc in docs.Values)
             {
                 if (rand.nextBoolean() && termsToDel.size() < docs.size() - 1)
diff --git a/src/Lucene.Net.Tests.Suggest/Suggest/FileDictionaryTest.cs b/src/Lucene.Net.Tests.Suggest/Suggest/FileDictionaryTest.cs
index aecc220..3549b50 100644
--- a/src/Lucene.Net.Tests.Suggest/Suggest/FileDictionaryTest.cs
+++ b/src/Lucene.Net.Tests.Suggest/Suggest/FileDictionaryTest.cs
@@ -1,11 +1,11 @@
-using Lucene.Net.Attributes;
-using Lucene.Net.Util;
+using Lucene.Net.Util;
 using NUnit.Framework;
 using System;
 using System.Collections.Generic;
 using System.Globalization;
 using System.IO;
 using System.Text;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Search.Suggest
 {
@@ -28,9 +28,9 @@ namespace Lucene.Net.Search.Suggest
 
     public class FileDictionaryTest : LuceneTestCase
     {
-        private KeyValuePair<List<string>, string> GenerateFileEntry(string fieldDelimiter, bool hasWeight, bool hasPayload)
+        private KeyValuePair<IList<string>, string> GenerateFileEntry(string fieldDelimiter, bool hasWeight, bool hasPayload)
         {
-            List<string> entryValues = new List<string>();
+            IList<string> entryValues = new JCG.List<string>();
             StringBuilder sb = new StringBuilder();
             string term = TestUtil.RandomSimpleString(Random, 1, 300);
             sb.Append(term);
@@ -52,12 +52,12 @@ namespace Lucene.Net.Search.Suggest
                 entryValues.Add(payload);
             }
             sb.append("\n");
-            return new KeyValuePair<List<string>, string>(entryValues, sb.ToString());
+            return new KeyValuePair<IList<string>, string>(entryValues, sb.ToString());
         }
 
-        private KeyValuePair<List<List<string>>, string> generateFileInput(int count, string fieldDelimiter, bool hasWeights, bool hasPayloads)
+        private KeyValuePair<IList<IList<string>>, string> generateFileInput(int count, string fieldDelimiter, bool hasWeights, bool hasPayloads)
         {
-            List<List<string>> entries = new List<List<string>>();
+            IList<IList<string>> entries = new JCG.List<IList<string>>();
             StringBuilder sb = new StringBuilder();
             bool hasPayload = hasPayloads;
             for (int i = 0; i < count; i++)
@@ -66,27 +66,27 @@ namespace Lucene.Net.Search.Suggest
                 {
                     hasPayload = (i == 0) ? true : Random.nextBoolean();
                 }
-                KeyValuePair<List<string>, string> entrySet = GenerateFileEntry(fieldDelimiter, (!hasPayloads && hasWeights) ? Random.nextBoolean() : hasWeights, hasPayload);
+                KeyValuePair<IList<string>, string> entrySet = GenerateFileEntry(fieldDelimiter, (!hasPayloads && hasWeights) ? Random.nextBoolean() : hasWeights, hasPayload);
                 entries.Add(entrySet.Key);
                 sb.Append(entrySet.Value);
             }
-            return new KeyValuePair<List<List<string>>, string>(entries, sb.ToString());
+            return new KeyValuePair<IList<IList<string>>, string>(entries, sb.ToString());
         }
 
         [Test]
         public void TestFileWithTerm()
         {
-            KeyValuePair<List<List<string>>, string> fileInput = generateFileInput(AtLeast(100), FileDictionary.DEFAULT_FIELD_DELIMITER, false, false);
+            KeyValuePair<IList<IList<string>>, string> fileInput = generateFileInput(AtLeast(100), FileDictionary.DEFAULT_FIELD_DELIMITER, false, false);
             Stream inputReader = new MemoryStream(fileInput.Value.getBytes(Encoding.UTF8));
             FileDictionary dictionary = new FileDictionary(inputReader);
-            List<List<string>> entries = fileInput.Key;
+            IList<IList<string>> entries = fileInput.Key;
             IInputEnumerator inputIter = dictionary.GetEntryEnumerator();
             assertFalse(inputIter.HasPayloads);
             int count = 0;
             while (inputIter.MoveNext())
             {
                 assertTrue(entries.size() > count);
-                List<string> entry = entries[count];
+                IList<string> entry = entries[count];
                 assertTrue(entry.size() >= 1); // at least a term
                 assertEquals(entry[0], inputIter.Current.Utf8ToString());
                 assertEquals(1, inputIter.Weight);
@@ -99,17 +99,17 @@ namespace Lucene.Net.Search.Suggest
         [Test]
         public void TestFileWithWeight()
         {
-            KeyValuePair<List<List<string>>, string> fileInput = generateFileInput(AtLeast(100), FileDictionary.DEFAULT_FIELD_DELIMITER, true, false);
+            KeyValuePair<IList<IList<string>>, string> fileInput = generateFileInput(AtLeast(100), FileDictionary.DEFAULT_FIELD_DELIMITER, true, false);
             Stream inputReader = new MemoryStream(fileInput.Value.getBytes(Encoding.UTF8));
             FileDictionary dictionary = new FileDictionary(inputReader);
-            List<List<String>> entries = fileInput.Key;
+            IList<IList<String>> entries = fileInput.Key;
             IInputEnumerator inputIter = dictionary.GetEntryEnumerator();
             assertFalse(inputIter.HasPayloads);
             int count = 0;
             while (inputIter.MoveNext())
             {
                 assertTrue(entries.size() > count);
-                List<String> entry = entries[count];
+                IList<String> entry = entries[count];
                 assertTrue(entry.size() >= 1); // at least a term
                 assertEquals(entry[0], inputIter.Current.Utf8ToString());
                 assertEquals((entry.size() == 2) ? long.Parse(entry[1], CultureInfo.InvariantCulture) : 1, inputIter.Weight);
@@ -122,17 +122,17 @@ namespace Lucene.Net.Search.Suggest
         [Test]
         public void TestFileWithWeightAndPayload()
         {
-            KeyValuePair<List<List<string>>, string> fileInput = generateFileInput(AtLeast(100), FileDictionary.DEFAULT_FIELD_DELIMITER, true, true);
+            KeyValuePair<IList<IList<string>>, string> fileInput = generateFileInput(AtLeast(100), FileDictionary.DEFAULT_FIELD_DELIMITER, true, true);
             Stream inputReader = new MemoryStream(fileInput.Value.getBytes(Encoding.UTF8));
             FileDictionary dictionary = new FileDictionary(inputReader);
-            List<List<string>> entries = fileInput.Key;
+            IList<IList<string>> entries = fileInput.Key;
             IInputEnumerator inputIter = dictionary.GetEntryEnumerator();
             assertTrue(inputIter.HasPayloads);
             int count = 0;
             while (inputIter.MoveNext())
             {
                 assertTrue(entries.size() > count);
-                List<string> entry = entries[count];
+                IList<string> entry = entries[count];
                 assertTrue(entry.size() >= 2); // at least term and weight
                 assertEquals(entry[0], inputIter.Current.Utf8ToString());
                 assertEquals(long.Parse(entry[1], CultureInfo.InvariantCulture), inputIter.Weight);
@@ -152,17 +152,17 @@ namespace Lucene.Net.Search.Suggest
         [Test]
         public void TestFileWithOneEntry()
         {
-            KeyValuePair<List<List<string>>, string> fileInput = generateFileInput(1, FileDictionary.DEFAULT_FIELD_DELIMITER, true, true);
+            KeyValuePair<IList<IList<string>>, string> fileInput = generateFileInput(1, FileDictionary.DEFAULT_FIELD_DELIMITER, true, true);
             Stream inputReader = new MemoryStream(fileInput.Value.getBytes(Encoding.UTF8));
             FileDictionary dictionary = new FileDictionary(inputReader);
-            List<List<string>> entries = fileInput.Key;
+            IList<IList<string>> entries = fileInput.Key;
             IInputEnumerator inputIter = dictionary.GetEntryEnumerator();
             assertTrue(inputIter.HasPayloads);
             int count = 0;
             while (inputIter.MoveNext())
             {
                 assertTrue(entries.size() > count);
-                List<string> entry = entries[count];
+                IList<string> entry = entries[count];
                 assertTrue(entry.size() >= 2); // at least term and weight
                 assertEquals(entry[0], inputIter.Current.Utf8ToString());
                 assertEquals(long.Parse(entry[1], CultureInfo.InvariantCulture), inputIter.Weight);
@@ -182,17 +182,17 @@ namespace Lucene.Net.Search.Suggest
         [Test]
         public void TestFileWithDifferentDelimiter()
         {
-            KeyValuePair<List<List<string>>, string> fileInput = generateFileInput(AtLeast(100), " , ", true, true);
+            KeyValuePair<IList<IList<string>>, string> fileInput = generateFileInput(AtLeast(100), " , ", true, true);
             Stream inputReader = new MemoryStream(fileInput.Value.getBytes(Encoding.UTF8));
             FileDictionary dictionary = new FileDictionary(inputReader, " , ");
-            List<List<string>> entries = fileInput.Key;
+            IList<IList<string>> entries = fileInput.Key;
             IInputEnumerator inputIter = dictionary.GetEntryEnumerator();
             assertTrue(inputIter.HasPayloads);
             int count = 0;
             while (inputIter.MoveNext())
             {
                 assertTrue(entries.size() > count);
-                List<string> entry = entries[count];
+                IList<string> entry = entries[count];
                 assertTrue(entry.size() >= 2); // at least term and weight
                 assertEquals(entry[0], inputIter.Current.Utf8ToString());
                 assertEquals(long.Parse(entry[1], CultureInfo.InvariantCulture), inputIter.Weight);
diff --git a/src/Lucene.Net.Tests.Suggest/Suggest/Fst/FSTCompletionTest.cs b/src/Lucene.Net.Tests.Suggest/Suggest/Fst/FSTCompletionTest.cs
index 897b91a..c9e56d6 100644
--- a/src/Lucene.Net.Tests.Suggest/Suggest/Fst/FSTCompletionTest.cs
+++ b/src/Lucene.Net.Tests.Suggest/Suggest/Fst/FSTCompletionTest.cs
@@ -8,6 +8,7 @@ using System.Globalization;
 using System.Text;
 using System.Text.RegularExpressions;
 using Console = Lucene.Net.Util.SystemConsole;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Search.Suggest.Fst
 {
@@ -182,7 +183,7 @@ namespace Lucene.Net.Search.Suggest.Fst
             FSTCompletionLookup lookup = new FSTCompletionLookup(10, true);
 
             Random r = Random;
-            List<Input> keys = new List<Input>();
+            IList<Input> keys = new JCG.List<Input>();
             for (int i = 0; i < 5000; i++)
             {
                 keys.Add(new Input(TestUtil.RandomSimpleString(r), -1));
@@ -234,7 +235,7 @@ namespace Lucene.Net.Search.Suggest.Fst
         [Test]
         public void TestRandom()
         {
-            List<Input> freqs = new List<Input>();
+            JCG.List<Input> freqs = new JCG.List<Input>();
             Random rnd = Random;
             for (int i = 0; i < 2500 + rnd.nextInt(2500); i++)
             {
diff --git a/src/Lucene.Net.Tests.Suggest/Suggest/Fst/WFSTCompletionTest.cs b/src/Lucene.Net.Tests.Suggest/Suggest/Fst/WFSTCompletionTest.cs
index 4ea2d69..c7d9f81 100644
--- a/src/Lucene.Net.Tests.Suggest/Suggest/Fst/WFSTCompletionTest.cs
+++ b/src/Lucene.Net.Tests.Suggest/Suggest/Fst/WFSTCompletionTest.cs
@@ -178,7 +178,7 @@ namespace Lucene.Net.Search.Suggest.Fst
                 IList<Lookup.LookupResult> r = suggester.DoLookup(TestUtil.StringToCharSequence(prefix, random).ToString(), false, topN);
 
                 // 2. go thru whole treemap (slowCompletor) and check its actually the best suggestion
-                List<Lookup.LookupResult> matches = new List<Lookup.LookupResult>();
+                JCG.List<Lookup.LookupResult> matches = new JCG.List<Lookup.LookupResult>();
 
                 // TODO: could be faster... but its slowCompletor for a reason
                 foreach (KeyValuePair<string, long> e in slowCompletor)
diff --git a/src/Lucene.Net.Tests.Suggest/Suggest/LookupBenchmarkTest.cs b/src/Lucene.Net.Tests.Suggest/Suggest/LookupBenchmarkTest.cs
index 2441913..17a79eb 100644
--- a/src/Lucene.Net.Tests.Suggest/Suggest/LookupBenchmarkTest.cs
+++ b/src/Lucene.Net.Tests.Suggest/Suggest/LookupBenchmarkTest.cs
@@ -91,7 +91,7 @@ namespace Lucene.Net.Search.Suggest
          */
         public static IList<Input> ReadTop50KWiki()
         {
-            List<Input> input = new List<Input>();
+            IList<Input> input = new JCG.List<Input>();
 
             var resource = typeof(LookupBenchmarkTest).FindAndGetManifestResourceStream("Top50KWiki.utf8");
             if (Debugging.AssertsEnabled) Debugging.Assert(resource != null, "Resource missing: Top50KWiki.utf8");
@@ -241,7 +241,7 @@ namespace Lucene.Net.Search.Suggest
             {
                 Lookup lookup = BuildLookup(cls, dictionaryInput);
 
-                List<string> input = new List<string>(benchmarkInput.size());
+                IList<string> input = new JCG.List<string>(benchmarkInput.size());
                 foreach (Input tf in benchmarkInput)
                 {
                     string s = tf.term.Utf8ToString();
diff --git a/src/Lucene.Net.Tests.TestFramework/Analysis/TrivialLookaheadFilter.cs b/src/Lucene.Net.Tests.TestFramework/Analysis/TrivialLookaheadFilter.cs
index 2013458..3620776 100644
--- a/src/Lucene.Net.Tests.TestFramework/Analysis/TrivialLookaheadFilter.cs
+++ b/src/Lucene.Net.Tests.TestFramework/Analysis/TrivialLookaheadFilter.cs
@@ -2,7 +2,7 @@
 
 using Lucene.Net.Analysis.TokenAttributes;
 using System;
-using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Analysis
 {
@@ -29,10 +29,10 @@ namespace Lucene.Net.Analysis
     public sealed class TrivialLookaheadFilter : LookaheadTokenFilter<TestPosition>
     {
         private ICharTermAttribute termAtt;
-  private IPositionIncrementAttribute posIncAtt;
-  private IOffsetAttribute offsetAtt;
+        private IPositionIncrementAttribute posIncAtt;
+        private IOffsetAttribute offsetAtt;
 
-  private int insertUpto;
+        private int insertUpto;
 
         public TrivialLookaheadFilter(TokenStream input)
             : base(input)
@@ -42,67 +42,75 @@ namespace Lucene.Net.Analysis
             offsetAtt = AddAttribute<IOffsetAttribute>();
         }
 
-  protected override TestPosition NewPosition()
+        protected override TestPosition NewPosition()
         {
             return new TestPosition();
         }
 
-  public override bool IncrementToken() 
+        public override bool IncrementToken()
         {
-    // At the outset, getMaxPos is -1. So we'll peek. When we reach the end of the sentence and go to the
-    // first token of the next sentence, maxPos will be the prev sentence's end token, and we'll go again.
-    if (m_positions.MaxPos < m_outputPos) {
-      peekSentence();
-    }
+            // At the outset, getMaxPos is -1. So we'll peek. When we reach the end of the sentence and go to the
+            // first token of the next sentence, maxPos will be the prev sentence's end token, and we'll go again.
+            if (m_positions.MaxPos < m_outputPos)
+            {
+                PeekSentence();
+            }
 
-    return NextToken();
-}
+            return NextToken();
+        }
 
-  public override void Reset() 
-{
-    base.Reset();
-    insertUpto = -1;
-}
+        public override void Reset()
+        {
+            base.Reset();
+            insertUpto = -1;
+        }
 
-  protected override void AfterPosition() 
-{
-    if (insertUpto < m_outputPos) {
-        InsertToken();
-        // replace term with 'improved' term.
-        ClearAttributes();
-        termAtt.SetEmpty();
-        posIncAtt.PositionIncrement=(0);
-        termAtt.Append(m_positions.Get(m_outputPos).Fact);
-        offsetAtt.SetOffset(m_positions.Get(m_outputPos).StartOffset,
-                            m_positions.Get(m_outputPos + 1).EndOffset);
-        insertUpto = m_outputPos;
-    }
-}
+        protected override void AfterPosition()
+        {
+            if (insertUpto < m_outputPos)
+            {
+                InsertToken();
+                // replace term with 'improved' term.
+                ClearAttributes();
+                termAtt.SetEmpty();
+                posIncAtt.PositionIncrement = (0);
+                termAtt.Append(m_positions.Get(m_outputPos).Fact);
+                offsetAtt.SetOffset(m_positions.Get(m_outputPos).StartOffset,
+                                    m_positions.Get(m_outputPos + 1).EndOffset);
+                insertUpto = m_outputPos;
+            }
+        }
 
-private void peekSentence() 
-{
-    var facts = new List<string>();
-    bool haveSentence = false;
-    do {
-      if (PeekToken()) {
+        private void PeekSentence()
+        {
+            var facts = new JCG.List<string>();
+            bool haveSentence = false;
+            do
+            {
+                if (PeekToken())
+                {
 
-        String term = new String(termAtt.Buffer, 0, termAtt.Length);
-facts.Add(term + "-huh?");
-        if (".".equals(term)) {
-          haveSentence = true;
-        }
+                    String term = new String(termAtt.Buffer, 0, termAtt.Length);
+                    facts.Add(term + "-huh?");
+                    if (".".equals(term))
+                    {
+                        haveSentence = true;
+                    }
 
-      } else {
-        haveSentence = true;
-      }
+                }
+                else
+                {
+                    haveSentence = true;
+                }
 
-    } while (!haveSentence);
+            } while (!haveSentence);
 
-    // attach the (now disambiguated) analyzed tokens to the positions.
-    for (int x = 0; x<facts.size(); x++) {
+            // attach the (now disambiguated) analyzed tokens to the positions.
+            for (int x = 0; x < facts.size(); x++)
+            {
                 // sentenceTokens is just relative to sentence, positions is absolute.
-                m_positions.Get(m_outputPos + x).Fact=(facts[x]);
-    }
-  }
+                m_positions.Get(m_outputPos + x).Fact = (facts[x]);
+            }
+        }
     }
 }
diff --git a/src/Lucene.Net.Tests/Analysis/TestGraphTokenizers.cs b/src/Lucene.Net.Tests/Analysis/TestGraphTokenizers.cs
index 2198e92..7637b50 100644
--- a/src/Lucene.Net.Tests/Analysis/TestGraphTokenizers.cs
+++ b/src/Lucene.Net.Tests/Analysis/TestGraphTokenizers.cs
@@ -6,6 +6,7 @@ using System;
 using System.Collections.Generic;
 using System.IO;
 using System.Text;
+using JCG = J2N.Collections.Generic;
 using Assert = Lucene.Net.TestFramework.Assert;
 using Console = Lucene.Net.Util.SystemConsole;
 
@@ -139,7 +140,7 @@ namespace Lucene.Net.Analysis
 
                 string[] parts = sb.ToString().Split(' ').TrimEnd();
 
-                tokens = new List<Token>();
+                tokens = new JCG.List<Token>();
                 int pos = 0;
                 int maxPos = -1;
                 int offset = 0;
@@ -500,7 +501,7 @@ namespace Lucene.Net.Analysis
 
         private Automaton Join(params string[] strings)
         {
-            IList<Automaton> @as = new List<Automaton>();
+            IList<Automaton> @as = new JCG.List<Automaton>();
             foreach (string s in strings)
             {
                 @as.Add(BasicAutomata.MakeString(s));
diff --git a/src/Lucene.Net.Tests/Analysis/TrivialLookaheadFilter.cs b/src/Lucene.Net.Tests/Analysis/TrivialLookaheadFilter.cs
index 1bfcd77..78c6ab2 100644
--- a/src/Lucene.Net.Tests/Analysis/TrivialLookaheadFilter.cs
+++ b/src/Lucene.Net.Tests/Analysis/TrivialLookaheadFilter.cs
@@ -1,6 +1,7 @@
-using Lucene.Net.Analysis.TokenAttributes;
+using Lucene.Net.Analysis.TokenAttributes;
 using System;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Analysis
 {
@@ -80,7 +81,7 @@ namespace Lucene.Net.Analysis
 
         private void PeekSentence()
         {
-            IList<string> facts = new List<string>();
+            IList<string> facts = new JCG.List<string>();
             bool haveSentence = false;
             do
             {
diff --git a/src/Lucene.Net.Tests/Codecs/Lucene3x/TestSurrogates.cs b/src/Lucene.Net.Tests/Codecs/Lucene3x/TestSurrogates.cs
index ab710ce..387e6ab 100644
--- a/src/Lucene.Net.Tests/Codecs/Lucene3x/TestSurrogates.cs
+++ b/src/Lucene.Net.Tests/Codecs/Lucene3x/TestSurrogates.cs
@@ -356,7 +356,7 @@ namespace Lucene.Net.Codecs.Lucene3x
 
             int tc = 0;
 
-            var fieldTerms = new List<Term>();
+            var fieldTerms = new JCG.List<Term>();
 
             for (int f = 0; f < numField; f++)
             {
diff --git a/src/Lucene.Net.Tests/Codecs/Lucene3x/TestTermInfosReaderIndex.cs b/src/Lucene.Net.Tests/Codecs/Lucene3x/TestTermInfosReaderIndex.cs
index b63709e..2b5f104 100644
--- a/src/Lucene.Net.Tests/Codecs/Lucene3x/TestTermInfosReaderIndex.cs
+++ b/src/Lucene.Net.Tests/Codecs/Lucene3x/TestTermInfosReaderIndex.cs
@@ -1,10 +1,11 @@
-using J2N.Collections.Generic.Extensions;
+using J2N.Collections.Generic.Extensions;
 using Lucene.Net.Documents;
 using Lucene.Net.Index.Extensions;
 using NUnit.Framework;
 using System;
 using System.Collections.Generic;
 using System.Globalization;
+using JCG = J2N.Collections.Generic;
 using Assert = Lucene.Net.TestFramework.Assert;
 
 namespace Lucene.Net.Codecs.Lucene3x
@@ -167,7 +168,7 @@ namespace Lucene.Net.Codecs.Lucene3x
 
         private static IList<Term> Sample(Random random, IndexReader reader, int size)
         {
-            IList<Term> sample = new List<Term>();
+            IList<Term> sample = new JCG.List<Term>();
             Fields fields = MultiFields.GetFields(reader);
             foreach (string field in fields)
             {
diff --git a/src/Lucene.Net.Tests/Codecs/Lucene40/TestLucene40PostingsReader.cs b/src/Lucene.Net.Tests/Codecs/Lucene40/TestLucene40PostingsReader.cs
index d8d522b..294f608 100644
--- a/src/Lucene.Net.Tests/Codecs/Lucene40/TestLucene40PostingsReader.cs
+++ b/src/Lucene.Net.Tests/Codecs/Lucene40/TestLucene40PostingsReader.cs
@@ -7,6 +7,7 @@ using RandomizedTesting.Generators;
 using System;
 using System.Collections.Generic;
 using System.Text;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Codecs.Lucene40
 {
@@ -141,7 +142,7 @@ namespace Lucene.Net.Codecs.Lucene40
 
         internal virtual string FieldValue(int maxTF)
         {
-            IList<string> shuffled = new List<string>();
+            IList<string> shuffled = new JCG.List<string>();
             StringBuilder sb = new StringBuilder();
             int i = Random.Next(terms.Length);
             while (i < terms.Length)
diff --git a/src/Lucene.Net.Tests/Codecs/Lucene41/TestBlockPostingsFormat3.cs b/src/Lucene.Net.Tests/Codecs/Lucene41/TestBlockPostingsFormat3.cs
index e358057..98ee4cf 100644
--- a/src/Lucene.Net.Tests/Codecs/Lucene41/TestBlockPostingsFormat3.cs
+++ b/src/Lucene.Net.Tests/Codecs/Lucene41/TestBlockPostingsFormat3.cs
@@ -261,7 +261,7 @@ namespace Lucene.Net.Codecs.Lucene41
                 numPasses++;
             }
 
-            List<BytesRef> shuffledTests = new List<BytesRef>(tests);
+            IList<BytesRef> shuffledTests = new JCG.List<BytesRef>(tests);
             shuffledTests.Shuffle(Random);
 
             foreach (BytesRef b in shuffledTests)
diff --git a/src/Lucene.Net.Tests/Index/Test2BTerms.cs b/src/Lucene.Net.Tests/Index/Test2BTerms.cs
index f15c6cb..ceefb7b 100644
--- a/src/Lucene.Net.Tests/Index/Test2BTerms.cs
+++ b/src/Lucene.Net.Tests/Index/Test2BTerms.cs
@@ -237,7 +237,7 @@ namespace Lucene.Net.Index
                 savedTerms = FindTerms(r);
             }
             int numSavedTerms = savedTerms.Count;
-            IList<BytesRef> bigOrdTerms = new List<BytesRef>(savedTerms.GetView(numSavedTerms - 10, 10)); // LUCENENET: Converted end index to length
+            IList<BytesRef> bigOrdTerms = new JCG.List<BytesRef>(savedTerms.GetView(numSavedTerms - 10, 10)); // LUCENENET: Converted end index to length
             Console.WriteLine("TEST: test big ord terms...");
             TestSavedTerms(r, bigOrdTerms);
             Console.WriteLine("TEST: test all saved terms...");
@@ -257,7 +257,7 @@ namespace Lucene.Net.Index
         {
             Console.WriteLine("TEST: findTerms");
             TermsEnum termsEnum = MultiFields.GetTerms(r, "field").GetEnumerator();
-            IList<BytesRef> savedTerms = new List<BytesRef>();
+            IList<BytesRef> savedTerms = new JCG.List<BytesRef>();
             int nextSave = TestUtil.NextInt32(Random, 500000, 1000000);
             BytesRef term;
             while (termsEnum.MoveNext())
diff --git a/src/Lucene.Net.Tests/Index/TestAddIndexes.cs b/src/Lucene.Net.Tests/Index/TestAddIndexes.cs
index 77a53a0..766750b 100644
--- a/src/Lucene.Net.Tests/Index/TestAddIndexes.cs
+++ b/src/Lucene.Net.Tests/Index/TestAddIndexes.cs
@@ -1,5 +1,4 @@
 using J2N.Threading;
-using Lucene.Net.Attributes;
 using Lucene.Net.Codecs;
 using Lucene.Net.Documents;
 using Lucene.Net.Index.Extensions;
@@ -8,11 +7,11 @@ using Lucene.Net.Support.Threading;
 using NUnit.Framework;
 using System;
 using System.Collections.Generic;
-using System.IO;
 using System.Linq;
 using System.Threading;
 using Assert = Lucene.Net.TestFramework.Assert;
 using Console = Lucene.Net.Util.SystemConsole;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Index
 {
@@ -637,7 +636,7 @@ namespace Lucene.Net.Index
             internal Directory dir, dir2;
             internal const int NUM_INIT_DOCS = 17;
             internal IndexWriter writer2;
-            internal readonly IList<Exception> failures = new List<Exception>();
+            internal readonly IList<Exception> failures = new JCG.List<Exception>();
             internal volatile bool didClose;
             internal readonly IndexReader[] readers;
             internal readonly int NUM_COPY;
diff --git a/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility.cs b/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility.cs
index c0dd829..2e2ad73 100644
--- a/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility.cs
+++ b/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility.cs
@@ -10,6 +10,7 @@ using System.Collections.Generic;
 using System.IO;
 using System.Linq;
 using System.Text;
+using JCG = J2N.Collections.Generic;
 using Assert = Lucene.Net.TestFramework.Assert;
 using Console = Lucene.Net.Util.SystemConsole;
 
@@ -204,7 +205,7 @@ namespace Lucene.Net.Index
             base.BeforeClass();
 
             Assert.IsFalse(OldFormatImpersonationIsActive, "test infra is broken!");
-            List<string> names = new List<string>(oldNames.Length + oldSingleSegmentNames.Length);
+            JCG.List<string> names = new JCG.List<string>(oldNames.Length + oldSingleSegmentNames.Length);
             names.AddRange(oldNames);
             names.AddRange(oldSingleSegmentNames);
             oldIndexDirs = new Dictionary<string, Directory>();
@@ -929,7 +930,7 @@ namespace Lucene.Net.Index
         [Test]
         public virtual void TestUpgradeOldIndex()
         {
-            List<string> names = new List<string>(oldNames.Length + oldSingleSegmentNames.Length);
+            JCG.List<string> names = new JCG.List<string>(oldNames.Length + oldSingleSegmentNames.Length);
             names.AddRange(oldNames);
             names.AddRange(oldSingleSegmentNames);
             foreach (string name in names)
@@ -963,7 +964,7 @@ namespace Lucene.Net.Index
 
                 string path = dir.FullName;
 
-                IList<string> args = new List<string>();
+                IList<string> args = new JCG.List<string>();
                 if (Random.NextBoolean())
                 {
                     args.Add("-verbose");
diff --git a/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility3x.cs b/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility3x.cs
index ec17dff..a18d5ac 100644
--- a/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility3x.cs
+++ b/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility3x.cs
@@ -8,6 +8,7 @@ using System;
 using System.Collections.Generic;
 using System.IO;
 using System.Text;
+using JCG = J2N.Collections.Generic;
 using Assert = Lucene.Net.TestFramework.Assert;
 using Console = Lucene.Net.Util.SystemConsole;
 
@@ -136,7 +137,7 @@ namespace Lucene.Net.Index
             base.BeforeClass();
 
             assertFalse("test infra is broken!", OldFormatImpersonationIsActive);
-            List<string> names = new List<string>(oldNames.Length + oldSingleSegmentNames.Length);
+            JCG.List<string> names = new JCG.List<string>(oldNames.Length + oldSingleSegmentNames.Length);
             names.AddRange(oldNames);
             names.AddRange(oldSingleSegmentNames);
             oldIndexDirs = new Dictionary<string, Directory>();
@@ -890,7 +891,7 @@ namespace Lucene.Net.Index
         [Test]
         public virtual void TestUpgradeOldIndex()
         {
-            List<string> names = new List<string>(oldNames.Length + oldSingleSegmentNames.Length);
+            JCG.List<string> names = new JCG.List<string>(oldNames.Length + oldSingleSegmentNames.Length);
             names.AddRange(oldNames);
             names.AddRange(oldSingleSegmentNames);
             foreach (string name in names)
diff --git a/src/Lucene.Net.Tests/Index/TestBagOfPositions.cs b/src/Lucene.Net.Tests/Index/TestBagOfPositions.cs
index ec324b7..19738a6 100644
--- a/src/Lucene.Net.Tests/Index/TestBagOfPositions.cs
+++ b/src/Lucene.Net.Tests/Index/TestBagOfPositions.cs
@@ -10,6 +10,7 @@ using System.Collections.Generic;
 using System.Globalization;
 using System.Text;
 using System.Threading;
+using JCG = J2N.Collections.Generic;
 using Assert = Lucene.Net.TestFramework.Assert;
 using Console = Lucene.Net.Util.SystemConsole;
 
@@ -56,7 +57,7 @@ namespace Lucene.Net.Index
         [Slow]
         public virtual void Test()
         {
-            IList<string> postingsList = new List<string>();
+            IList<string> postingsList = new JCG.List<string>();
             int numTerms = AtLeast(300);
             int maxTermsPerDoc = TestUtil.NextInt32(Random, 10, 20);
             bool isSimpleText = "SimpleText".Equals(TestUtil.GetPostingsFormat("field"), StringComparison.Ordinal);
diff --git a/src/Lucene.Net.Tests/Index/TestBagOfPostings.cs b/src/Lucene.Net.Tests/Index/TestBagOfPostings.cs
index 0965fb3..3536a0d 100644
--- a/src/Lucene.Net.Tests/Index/TestBagOfPostings.cs
+++ b/src/Lucene.Net.Tests/Index/TestBagOfPostings.cs
@@ -52,7 +52,7 @@ namespace Lucene.Net.Index
         [Test]
         public virtual void Test()
         {
-            IList<string> postingsList = new List<string>();
+            IList<string> postingsList = new JCG.List<string>();
             int numTerms = AtLeast(300);
             int maxTermsPerDoc = TestUtil.NextInt32(Random, 10, 20);
 
diff --git a/src/Lucene.Net.Tests/Index/TestCheckIndex.cs b/src/Lucene.Net.Tests/Index/TestCheckIndex.cs
index 6bc5ae2..c9e990a 100644
--- a/src/Lucene.Net.Tests/Index/TestCheckIndex.cs
+++ b/src/Lucene.Net.Tests/Index/TestCheckIndex.cs
@@ -1,10 +1,11 @@
-using Lucene.Net.Documents;
+using Lucene.Net.Documents;
 using Lucene.Net.Index.Extensions;
 using Lucene.Net.Support.IO;
 using NUnit.Framework;
 using System.Collections.Generic;
 using System.IO;
 using System.Text;
+using JCG = J2N.Collections.Generic;
 using Assert = Lucene.Net.TestFramework.Assert;
 using Console = Lucene.Net.Util.SystemConsole;
 
@@ -102,7 +103,7 @@ namespace Lucene.Net.Index
             Assert.AreEqual(18, seg.TermVectorStatus.TotVectors);
 
             Assert.IsTrue(seg.Diagnostics.Count > 0);
-            IList<string> onlySegments = new List<string>();
+            IList<string> onlySegments = new JCG.List<string>();
             onlySegments.Add("_0");
 
             Assert.IsTrue(checker.DoCheckIndex(onlySegments).Clean == true);
diff --git a/src/Lucene.Net.Tests/Index/TestDoc.cs b/src/Lucene.Net.Tests/Index/TestDoc.cs
index 3004ed9..4046194 100644
--- a/src/Lucene.Net.Tests/Index/TestDoc.cs
+++ b/src/Lucene.Net.Tests/Index/TestDoc.cs
@@ -1,4 +1,4 @@
-using Lucene.Net.Documents;
+using Lucene.Net.Documents;
 using Lucene.Net.Index.Extensions;
 using NUnit.Framework;
 using System;
@@ -216,7 +216,7 @@ namespace Lucene.Net.Index
             TrackingDirectoryWrapper trackingDir = new TrackingDirectoryWrapper(si1.Info.Dir);
             SegmentInfo si = new SegmentInfo(si1.Info.Dir, Constants.LUCENE_MAIN_VERSION, merged, -1, false, codec, null);
 
-            SegmentMerger merger = new SegmentMerger(new List<AtomicReader> { r1, r2 }, si, (InfoStream)InfoStream.Default, trackingDir, IndexWriterConfig.DEFAULT_TERM_INDEX_INTERVAL, CheckAbort.NONE, new FieldInfos.FieldNumbers(), context, true);
+            SegmentMerger merger = new SegmentMerger(new JCG.List<AtomicReader> { r1, r2 }, si, (InfoStream)InfoStream.Default, trackingDir, IndexWriterConfig.DEFAULT_TERM_INDEX_INTERVAL, CheckAbort.NONE, new FieldInfos.FieldNumbers(), context, true);
 
             MergeState mergeState = merger.Merge();
             r1.Dispose();
diff --git a/src/Lucene.Net.Tests/Index/TestDocTermOrds.cs b/src/Lucene.Net.Tests/Index/TestDocTermOrds.cs
index 59720ba..22b3371 100644
--- a/src/Lucene.Net.Tests/Index/TestDocTermOrds.cs
+++ b/src/Lucene.Net.Tests/Index/TestDocTermOrds.cs
@@ -1,4 +1,4 @@
-using J2N.Collections.Generic.Extensions;
+using J2N.Collections.Generic.Extensions;
 using Lucene.Net.Documents;
 using Lucene.Net.Index.Extensions;
 using Lucene.Net.Search;
@@ -296,7 +296,7 @@ namespace Lucene.Net.Index
                 for (int id = 0; id < NUM_DOCS; id++)
                 {
                     int[] docOrds = idToOrds[id];
-                    IList<int?> newOrds = new List<int?>();
+                    IList<int?> newOrds = new JCG.List<int?>();
                     foreach (int ord in idToOrds[id])
                     {
                         if (StringHelper.StartsWith(termsArray[ord], prefixRef))
diff --git a/src/Lucene.Net.Tests/Index/TestDocValuesWithThreads.cs b/src/Lucene.Net.Tests/Index/TestDocValuesWithThreads.cs
index 262b64c..b160376 100644
--- a/src/Lucene.Net.Tests/Index/TestDocValuesWithThreads.cs
+++ b/src/Lucene.Net.Tests/Index/TestDocValuesWithThreads.cs
@@ -51,9 +51,9 @@ namespace Lucene.Net.Index
             Directory dir = NewDirectory();
             IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMergePolicy(NewLogMergePolicy()));
 
-            IList<long?> numbers = new List<long?>();
-            IList<BytesRef> binary = new List<BytesRef>();
-            IList<BytesRef> sorted = new List<BytesRef>();
+            IList<long?> numbers = new JCG.List<long?>();
+            IList<BytesRef> binary = new JCG.List<BytesRef>();
+            IList<BytesRef> sorted = new JCG.List<BytesRef>();
             int numDocs = AtLeast(100);
             for (int i = 0; i < numDocs; i++)
             {
@@ -78,7 +78,7 @@ namespace Lucene.Net.Index
             AtomicReader ar = (AtomicReader)r.Leaves[0].Reader;
 
             int numThreads = TestUtil.NextInt32(Random, 2, 5);
-            IList<ThreadJob> threads = new List<ThreadJob>();
+            IList<ThreadJob> threads = new JCG.List<ThreadJob>();
             CountdownEvent startingGun = new CountdownEvent(1);
             for (int t = 0; t < numThreads; t++)
             {
@@ -200,7 +200,7 @@ namespace Lucene.Net.Index
                 Console.WriteLine("TEST: NUM_DOCS=" + NUM_DOCS + " allowDups=" + allowDups);
             }
             int numDocs = 0;
-            IList<BytesRef> docValues = new List<BytesRef>();
+            IList<BytesRef> docValues = new JCG.List<BytesRef>();
 
             // TODO: deletions
             while (numDocs < NUM_DOCS)
diff --git a/src/Lucene.Net.Tests/Index/TestDocsAndPositions.cs b/src/Lucene.Net.Tests/Index/TestDocsAndPositions.cs
index e343094..b854216 100644
--- a/src/Lucene.Net.Tests/Index/TestDocsAndPositions.cs
+++ b/src/Lucene.Net.Tests/Index/TestDocsAndPositions.cs
@@ -3,9 +3,9 @@ using Lucene.Net.Index.Extensions;
 using NUnit.Framework;
 using RandomizedTesting.Generators;
 using System;
-using System.Collections.Generic;
 using System.Text;
 using Assert = Lucene.Net.TestFramework.Assert;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Index
 {
@@ -26,13 +26,13 @@ namespace Lucene.Net.Index
      * limitations under the License.
      */
 
-    using IBits = Lucene.Net.Util.IBits;
     using BytesRef = Lucene.Net.Util.BytesRef;
     using Directory = Lucene.Net.Store.Directory;
     using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
     using Document = Documents.Document;
     using Field = Field;
     using FieldType = FieldType;
+    using IBits = Lucene.Net.Util.IBits;
     using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
     using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
     using TestUtil = Lucene.Net.Util.TestUtil;
@@ -135,7 +135,7 @@ namespace Lucene.Net.Index
             for (int i = 0; i < numDocs; i++)
             {
                 Document doc = new Document();
-                List<int?> positions = new List<int?>();
+                JCG.List<int?> positions = new JCG.List<int?>();
                 StringBuilder builder = new StringBuilder();
                 int num = AtLeast(131);
                 for (int j = 0; j < num; j++)
diff --git a/src/Lucene.Net.Tests/Index/TestFlushByRamOrCountsPolicy.cs b/src/Lucene.Net.Tests/Index/TestFlushByRamOrCountsPolicy.cs
index 23665f2..9029af1 100644
--- a/src/Lucene.Net.Tests/Index/TestFlushByRamOrCountsPolicy.cs
+++ b/src/Lucene.Net.Tests/Index/TestFlushByRamOrCountsPolicy.cs
@@ -6,6 +6,7 @@ using Lucene.Net.Store;
 using NUnit.Framework;
 using System;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 using Assert = Lucene.Net.TestFramework.Assert;
 using Console = Lucene.Net.Util.SystemConsole;
 
@@ -390,8 +391,8 @@ namespace Lucene.Net.Index
 
             public override void OnDelete(DocumentsWriterFlushControl control, ThreadState state)
             {
-                List<ThreadState> pending = new List<ThreadState>();
-                List<ThreadState> notPending = new List<ThreadState>();
+                IList<ThreadState> pending = new JCG.List<ThreadState>();
+                IList<ThreadState> notPending = new JCG.List<ThreadState>();
                 FindPending(control, pending, notPending);
                 bool flushCurrent = state.IsFlushPending;
                 ThreadState toFlush;
@@ -430,8 +431,8 @@ namespace Lucene.Net.Index
 
             public override void OnInsert(DocumentsWriterFlushControl control, ThreadState state)
             {
-                List<ThreadState> pending = new List<ThreadState>();
-                List<ThreadState> notPending = new List<ThreadState>();
+                IList<ThreadState> pending = new JCG.List<ThreadState>();
+                IList<ThreadState> notPending = new JCG.List<ThreadState>();
                 FindPending(control, pending, notPending);
                 bool flushCurrent = state.IsFlushPending;
                 long activeBytes = control.ActiveBytes;
@@ -480,7 +481,7 @@ namespace Lucene.Net.Index
             }
         }
 
-        internal static void FindPending(DocumentsWriterFlushControl flushControl, List<ThreadState> pending, List<ThreadState> notPending)
+        internal static void FindPending(DocumentsWriterFlushControl flushControl, IList<ThreadState> pending, IList<ThreadState> notPending)
         {
             IEnumerator<ThreadState> allActiveThreads = flushControl.AllActiveThreadStates();
             while (allActiveThreads.MoveNext())
diff --git a/src/Lucene.Net.Tests/Index/TestIndexReaderClose.cs b/src/Lucene.Net.Tests/Index/TestIndexReaderClose.cs
index d717db4..2933770 100644
--- a/src/Lucene.Net.Tests/Index/TestIndexReaderClose.cs
+++ b/src/Lucene.Net.Tests/Index/TestIndexReaderClose.cs
@@ -3,6 +3,7 @@ using NUnit.Framework;
 using RandomizedTesting.Generators;
 using System;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 using Assert = Lucene.Net.TestFramework.Assert;
 
 namespace Lucene.Net.Index
@@ -46,7 +47,7 @@ namespace Lucene.Net.Index
                 bool throwOnClose = !Rarely();
                 AtomicReader wrap = SlowCompositeReaderWrapper.Wrap(open);
                 FilterAtomicReader reader = new FilterAtomicReaderAnonymousClass(this, wrap, throwOnClose);
-                IList<IndexReader.IReaderClosedListener> listeners = new List<IndexReader.IReaderClosedListener>();
+                IList<IndexReader.IReaderClosedListener> listeners = new JCG.List<IndexReader.IReaderClosedListener>();
                 int listenerCount = Random.Next(20);
                 AtomicInt32 count = new AtomicInt32();
                 bool faultySet = false;
diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriter.cs b/src/Lucene.Net.Tests/Index/TestIndexWriter.cs
index ed87d16..449186d 100644
--- a/src/Lucene.Net.Tests/Index/TestIndexWriter.cs
+++ b/src/Lucene.Net.Tests/Index/TestIndexWriter.cs
@@ -1046,7 +1046,7 @@ namespace Lucene.Net.Index
                 this.outerInstance = outerInstance;
                 termAtt = AddAttribute<ICharTermAttribute>();
                 posIncrAtt = AddAttribute<IPositionIncrementAttribute>();
-                terms = new List<string> { "a", "b", "c" }.GetEnumerator();
+                terms = new JCG.List<string> { "a", "b", "c" }.GetEnumerator();
                 first = true;
             }
 
@@ -1707,7 +1707,7 @@ namespace Lucene.Net.Index
                     r = DirectoryReader.Open(dir);
                 }
 
-                IList<string> files = new List<string>(dir.ListAll());
+                IList<string> files = new JCG.List<string>(dir.ListAll());
 
                 // RAMDir won't have a write.lock, but fs dirs will:
                 files.Remove("write.lock");
@@ -1867,7 +1867,7 @@ namespace Lucene.Net.Index
             int computedExtraFileCount = 0;
             foreach (string file in dir.ListAll())
             {
-                if (file.LastIndexOf('.') < 0 || !new List<string> { "fdx", "fdt", "tvx", "tvd", "tvf" }.Contains(file.Substring(file.LastIndexOf('.') + 1)))
+                if (file.LastIndexOf('.') < 0 || !new JCG.List<string> { "fdx", "fdt", "tvx", "tvd", "tvf" }.Contains(file.Substring(file.LastIndexOf('.') + 1)))
                 // don't count stored fields and term vectors in
                 {
                     ++computedExtraFileCount;
@@ -2247,7 +2247,7 @@ namespace Lucene.Net.Index
             Directory dir = NewDirectory();
             IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)));
 
-            IList<Document> docs = new List<Document>();
+            IList<Document> docs = new JCG.List<Document>();
             docs.Add(new Document());
             w.UpdateDocuments(new Term("foo", "bar"), docs);
             w.Dispose();
@@ -2608,7 +2608,7 @@ namespace Lucene.Net.Index
             ISet<string> liveIds = new JCG.HashSet<string>();
             for (int i = 0; i < iters; i++)
             {
-                IList<IEnumerable<IIndexableField>> docs = new List<IEnumerable<IIndexableField>>();
+                IList<IEnumerable<IIndexableField>> docs = new JCG.List<IEnumerable<IIndexableField>>();
                 FieldType ft = new FieldType(TextField.TYPE_NOT_STORED);
                 FieldType idFt = new FieldType(TextField.TYPE_STORED);
 
diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriterDelete.cs b/src/Lucene.Net.Tests/Index/TestIndexWriterDelete.cs
index 97cc1b1..dcbd2e3 100644
--- a/src/Lucene.Net.Tests/Index/TestIndexWriterDelete.cs
+++ b/src/Lucene.Net.Tests/Index/TestIndexWriterDelete.cs
@@ -14,6 +14,7 @@ using System.Collections.Generic;
 using System.IO;
 using System.Text;
 using System.Threading;
+using JCG = J2N.Collections.Generic;
 using Assert = Lucene.Net.TestFramework.Assert;
 using Console = Lucene.Net.Util.SystemConsole;
 
@@ -1113,7 +1114,7 @@ namespace Lucene.Net.Index
 #endif
                 Random, dir);
             int NUM_DOCS = AtLeast(1000);
-            IList<int?> ids = new List<int?>(NUM_DOCS);
+            IList<int?> ids = new JCG.List<int?>(NUM_DOCS);
             for (int id = 0; id < NUM_DOCS; id++)
             {
                 ids.Add(id);
diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriterExceptions.cs b/src/Lucene.Net.Tests/Index/TestIndexWriterExceptions.cs
index 8172648..78712bd 100644
--- a/src/Lucene.Net.Tests/Index/TestIndexWriterExceptions.cs
+++ b/src/Lucene.Net.Tests/Index/TestIndexWriterExceptions.cs
@@ -1,7 +1,6 @@
 using J2N.Threading;
 using J2N.Threading.Atomic;
 using Lucene.Net.Analysis;
-using Lucene.Net.Attributes;
 using Lucene.Net.Diagnostics;
 using Lucene.Net.Documents;
 using Lucene.Net.Index.Extensions;
@@ -17,6 +16,7 @@ using System.IO;
 using System.Threading;
 using Assert = Lucene.Net.TestFramework.Assert;
 using Console = Lucene.Net.Util.SystemConsole;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Index
 {
@@ -1645,7 +1645,7 @@ namespace Lucene.Net.Index
                 w.AddDocument(doc);
             }
 
-            IList<Document> docs = new List<Document>();
+            IList<Document> docs = new JCG.List<Document>();
             for (int docCount = 0; docCount < 7; docCount++)
             {
                 Document doc = new Document();
@@ -1716,7 +1716,7 @@ namespace Lucene.Net.Index
             }
 
             // Use addDocs (no exception) to get docs in the index:
-            IList<Document> docs = new List<Document>();
+            IList<Document> docs = new JCG.List<Document>();
             int numDocs2 = Random.Next(25);
             for (int docCount = 0; docCount < numDocs2; docCount++)
             {
@@ -1895,7 +1895,7 @@ namespace Lucene.Net.Index
             {
                 doc = new Document();
                 // try to boost with norms omitted
-                IList<IIndexableField> list = new List<IIndexableField>();
+                IList<IIndexableField> list = new JCG.List<IIndexableField>();
                 list.Add(new IndexableFieldAnonymousClass());
                 iw.AddDocument(list);
                 Assert.Fail("didn't get any exception, boost silently discarded");
diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriterMerging.cs b/src/Lucene.Net.Tests/Index/TestIndexWriterMerging.cs
index 376f9e8..8e72f97 100644
--- a/src/Lucene.Net.Tests/Index/TestIndexWriterMerging.cs
+++ b/src/Lucene.Net.Tests/Index/TestIndexWriterMerging.cs
@@ -8,6 +8,7 @@ using System;
 using System.Collections.Generic;
 using System.Runtime.ExceptionServices;
 using System.Threading;
+using JCG = J2N.Collections.Generic;
 using Assert = Lucene.Net.TestFramework.Assert;
 using Console = Lucene.Net.Util.SystemConsole;
 
@@ -411,7 +412,7 @@ namespace Lucene.Net.Index
                     ((LogMergePolicy)writer.Config.MergePolicy).MergeFactor = 2;
 
                     IndexWriter finalWriter = writer;
-                    List<Exception> failure = new List<Exception>();
+                    IList<Exception> failure = new JCG.List<Exception>();
                     ThreadJob t1 = new ThreadAnonymousClass(this, doc, finalWriter, failure);
 
                     if (failure.Count > 0)
@@ -443,9 +444,9 @@ namespace Lucene.Net.Index
 
             private Document doc;
             private IndexWriter finalWriter;
-            private List<Exception> failure;
+            private IList<Exception> failure;
 
-            public ThreadAnonymousClass(TestIndexWriterMerging outerInstance, Document doc, IndexWriter finalWriter, List<Exception> failure)
+            public ThreadAnonymousClass(TestIndexWriterMerging outerInstance, Document doc, IndexWriter finalWriter, IList<Exception> failure)
             {
                 this.outerInstance = outerInstance;
                 this.doc = doc;
diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriterOnJRECrash.cs b/src/Lucene.Net.Tests/Index/TestIndexWriterOnJRECrash.cs
index 74412ba..60d5268 100644
--- a/src/Lucene.Net.Tests/Index/TestIndexWriterOnJRECrash.cs
+++ b/src/Lucene.Net.Tests/Index/TestIndexWriterOnJRECrash.cs
@@ -120,7 +120,7 @@
 //        /// fork ourselves in a new jvm. sets -Dtests.crashmode=true </summary>
 //        public virtual void ForkTest()
 //        {
-//            IList<string> cmd = new List<string>();
+//            IList<string> cmd = new JCG.List<string>();
 //            cmd.Add(System.getProperty("java.home") + System.getProperty("file.separator") + "bin" + System.getProperty("file.separator") + "java");
 //            cmd.Add("-Xmx512m");
 //            cmd.Add("-Dtests.crashmode=true");
diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriterReader.cs b/src/Lucene.Net.Tests/Index/TestIndexWriterReader.cs
index 590fb85..463a918 100644
--- a/src/Lucene.Net.Tests/Index/TestIndexWriterReader.cs
+++ b/src/Lucene.Net.Tests/Index/TestIndexWriterReader.cs
@@ -11,6 +11,7 @@ using RandomizedTesting.Generators;
 using System;
 using System.Collections.Concurrent;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 using Assert = Lucene.Net.TestFramework.Assert;
 using Console = Lucene.Net.Util.SystemConsole;
 
@@ -471,7 +472,7 @@ namespace Lucene.Net.Index
             internal int numDirs;
             internal ThreadJob[] threads;
             internal IndexWriter mainWriter;
-            internal readonly IList<Exception> failures = new List<Exception>();
+            internal readonly IList<Exception> failures = new JCG.List<Exception>();
             internal IndexReader[] readers;
             internal bool didClose = false;
             internal AtomicInt32 count = new AtomicInt32(0);
diff --git a/src/Lucene.Net.Tests/Index/TestIntBlockPool.cs b/src/Lucene.Net.Tests/Index/TestIntBlockPool.cs
index a806000..6e327e6 100644
--- a/src/Lucene.Net.Tests/Index/TestIntBlockPool.cs
+++ b/src/Lucene.Net.Tests/Index/TestIntBlockPool.cs
@@ -1,6 +1,7 @@
 using NUnit.Framework;
 using RandomizedTesting.Generators;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 using Assert = Lucene.Net.TestFramework.Assert;
 
 namespace Lucene.Net.Index
@@ -77,7 +78,7 @@ namespace Lucene.Net.Index
             Int32BlockPool pool = new Int32BlockPool(new ByteTrackingAllocator(bytesUsed));
             for (int j = 0; j < 2; j++)
             {
-                IList<StartEndAndValues> holders = new List<StartEndAndValues>();
+                IList<StartEndAndValues> holders = new JCG.List<StartEndAndValues>();
                 int num = AtLeast(4);
                 for (int i = 0; i < num; i++)
                 {
diff --git a/src/Lucene.Net.Tests/Index/TestMaxTermFrequency.cs b/src/Lucene.Net.Tests/Index/TestMaxTermFrequency.cs
index d9bd693..07ea845 100644
--- a/src/Lucene.Net.Tests/Index/TestMaxTermFrequency.cs
+++ b/src/Lucene.Net.Tests/Index/TestMaxTermFrequency.cs
@@ -1,10 +1,11 @@
-using J2N.Collections.Generic.Extensions;
+using J2N.Collections.Generic.Extensions;
 using Lucene.Net.Documents;
 using Lucene.Net.Index.Extensions;
 using Lucene.Net.Support;
 using NUnit.Framework;
 using System;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 using Assert = Lucene.Net.TestFramework.Assert;
 
 namespace Lucene.Net.Index
@@ -45,7 +46,7 @@ namespace Lucene.Net.Index
         private Directory dir;
         private IndexReader reader;
         /* expected maxTermFrequency values for our documents */
-        private readonly List<int?> expected = new List<int?>();
+        private readonly IList<int?> expected = new JCG.List<int?>();
 
         [SetUp]
         public override void SetUp()
@@ -93,7 +94,7 @@ namespace Lucene.Net.Index
         /// </summary>
         private string AddValue()
         {
-            IList<string> terms = new List<string>();
+            IList<string> terms = new JCG.List<string>();
             int maxCeiling = TestUtil.NextInt32(Random, 0, 255);
             int max = 0;
             for (char ch = 'a'; ch <= 'z'; ch++)
diff --git a/src/Lucene.Net.Tests/Index/TestMultiDocValues.cs b/src/Lucene.Net.Tests/Index/TestMultiDocValues.cs
index f7c2377..f4e639f 100644
--- a/src/Lucene.Net.Tests/Index/TestMultiDocValues.cs
+++ b/src/Lucene.Net.Tests/Index/TestMultiDocValues.cs
@@ -3,6 +3,7 @@ using Lucene.Net.Index.Extensions;
 using NUnit.Framework;
 using RandomizedTesting.Generators;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 using Assert = Lucene.Net.TestFramework.Assert;
 
 namespace Lucene.Net.Index
@@ -279,7 +280,7 @@ namespace Lucene.Net.Index
                 for (int i = 0; i < numDocs; i++)
                 {
                     single.SetDocument(i);
-                    List<long> expectedList = new List<long>();
+                    IList<long> expectedList = new JCG.List<long>();
                     long ord;
                     while ((ord = single.NextOrd()) != SortedSetDocValues.NO_MORE_ORDS)
                     {
@@ -356,7 +357,7 @@ namespace Lucene.Net.Index
                 for (int i = 0; i < numDocs; i++)
                 {
                     single.SetDocument(i);
-                    List<long?> expectedList = new List<long?>();
+                    IList<long?> expectedList = new JCG.List<long?>();
                     long ord;
                     while ((ord = single.NextOrd()) != SortedSetDocValues.NO_MORE_ORDS)
                     {
diff --git a/src/Lucene.Net.Tests/Index/TestMultiFields.cs b/src/Lucene.Net.Tests/Index/TestMultiFields.cs
index 380d52c..01ea3e0 100644
--- a/src/Lucene.Net.Tests/Index/TestMultiFields.cs
+++ b/src/Lucene.Net.Tests/Index/TestMultiFields.cs
@@ -54,7 +54,7 @@ namespace Lucene.Net.Index
 
                 IDictionary<BytesRef, IList<int?>> docs = new Dictionary<BytesRef, IList<int?>>();
                 ISet<int?> deleted = new JCG.HashSet<int?>();
-                IList<BytesRef> terms = new List<BytesRef>();
+                IList<BytesRef> terms = new JCG.List<BytesRef>();
 
                 int numDocs = TestUtil.NextInt32(Random, 1, 100 * RandomMultiplier);
                 Documents.Document doc = new Documents.Document();
@@ -84,7 +84,7 @@ namespace Lucene.Net.Index
                         BytesRef term = new BytesRef(s);
                         if (!docs.TryGetValue(term, out IList<int?> docsTerm))
                         {
-                            docs[term] = docsTerm = new List<int?>();
+                            docs[term] = docsTerm = new JCG.List<int?>();
                         }
                         docsTerm.Add(i);
                         terms.Add(term);
@@ -111,7 +111,7 @@ namespace Lucene.Net.Index
 
                 if (Verbose)
                 {
-                    List<BytesRef> termsList = new List<BytesRef>(uniqueTerms);
+                    IList<BytesRef> termsList = new JCG.List<BytesRef>(uniqueTerms);
 #pragma warning disable 612, 618
                     termsList.Sort(BytesRef.UTF8SortedAsUTF16Comparer);
 #pragma warning restore 612, 618
@@ -173,7 +173,7 @@ namespace Lucene.Net.Index
         }
 
         /*
-        private void verify(IndexReader r, String term, List<Integer> expected) throws Exception {
+        private void verify(IndexReader r, String term, IList<Integer> expected) throws Exception {
           DocsEnum docs = TestUtil.Docs(random, r,
                                          "field",
                                          new BytesRef(term),
diff --git a/src/Lucene.Net.Tests/Index/TestPayloads.cs b/src/Lucene.Net.Tests/Index/TestPayloads.cs
index aff3c76..e9d770d 100644
--- a/src/Lucene.Net.Tests/Index/TestPayloads.cs
+++ b/src/Lucene.Net.Tests/Index/TestPayloads.cs
@@ -12,6 +12,7 @@ using System;
 using System.Collections.Generic;
 using System.IO;
 using System.Text;
+using JCG = J2N.Collections.Generic;
 using Assert = Lucene.Net.TestFramework.Assert;
 using Console = Lucene.Net.Util.SystemConsole;
 
@@ -609,7 +610,7 @@ namespace Lucene.Net.Index
 
             internal ByteArrayPool(int capacity, int size)
             {
-                pool = new List<byte[]>();
+                pool = new JCG.List<byte[]>();
                 for (int i = 0; i < capacity; i++)
                 {
                     pool.Add(new byte[size]);
diff --git a/src/Lucene.Net.Tests/Index/TestPerSegmentDeletes.cs b/src/Lucene.Net.Tests/Index/TestPerSegmentDeletes.cs
index 534ab1c..9f5d8f3 100644
--- a/src/Lucene.Net.Tests/Index/TestPerSegmentDeletes.cs
+++ b/src/Lucene.Net.Tests/Index/TestPerSegmentDeletes.cs
@@ -1,11 +1,11 @@
 using J2N.Collections.Generic.Extensions;
 using Lucene.Net.Index.Extensions;
-using Lucene.Net.Support;
 using NUnit.Framework;
 using System;
 using System.Collections.Generic;
 using Assert = Lucene.Net.TestFramework.Assert;
 using Console = Lucene.Net.Util.SystemConsole;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Index
 {
@@ -26,11 +26,10 @@ namespace Lucene.Net.Index
      * limitations under the License.
      */
 
-    using ArrayUtil = Lucene.Net.Util.ArrayUtil;
-    using IBits = Lucene.Net.Util.IBits;
     using BytesRef = Lucene.Net.Util.BytesRef;
     using Directory = Lucene.Net.Store.Directory;
     using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
+    using IBits = Lucene.Net.Util.IBits;
     using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
     using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
     using MockDirectoryWrapper = Lucene.Net.Store.MockDirectoryWrapper;
@@ -260,7 +259,7 @@ namespace Lucene.Net.Index
 
         public static int[] ToArray(DocsEnum docsEnum)
         {
-            List<int> docs = new List<int>();
+            IList<int> docs = new JCG.List<int>();
             while (docsEnum.NextDoc() != DocIdSetIterator.NO_MORE_DOCS)
             {
                 int docID = docsEnum.DocID;
diff --git a/src/Lucene.Net.Tests/Index/TestPostingsOffsets.cs b/src/Lucene.Net.Tests/Index/TestPostingsOffsets.cs
index a453fe4..d56832f 100644
--- a/src/Lucene.Net.Tests/Index/TestPostingsOffsets.cs
+++ b/src/Lucene.Net.Tests/Index/TestPostingsOffsets.cs
@@ -8,6 +8,7 @@ using RandomizedTesting.Generators;
 using System;
 using System.Collections.Generic;
 using System.Linq;
+using JCG = J2N.Collections.Generic;
 using Assert = Lucene.Net.TestFramework.Assert;
 
 namespace Lucene.Net.Index
@@ -265,7 +266,7 @@ namespace Lucene.Net.Index
             {
                 Document doc = new Document();
                 doc.Add(new Int32Field("id", docCount, Field.Store.NO));
-                IList<Token> tokens = new List<Token>();
+                IList<Token> tokens = new JCG.List<Token>();
                 int numTokens = AtLeast(100);
                 //final int numTokens = AtLeast(20);
                 int pos = -1;
@@ -306,7 +307,7 @@ namespace Lucene.Net.Index
                     }
                     if (!postingsByDoc.TryGetValue(docCount, out IList<Token> postings))
                     {
-                        postingsByDoc[docCount] = postings = new List<Token>();
+                        postingsByDoc[docCount] = postings = new JCG.List<Token>();
                     }
                     postings.Add(token);
                     tokens.Add(token);
diff --git a/src/Lucene.Net.Tests/Index/TestPrefixCodedTerms.cs b/src/Lucene.Net.Tests/Index/TestPrefixCodedTerms.cs
index c8427f8..4b246d4 100644
--- a/src/Lucene.Net.Tests/Index/TestPrefixCodedTerms.cs
+++ b/src/Lucene.Net.Tests/Index/TestPrefixCodedTerms.cs
@@ -1,4 +1,4 @@
-using Lucene.Net.Util;
+using Lucene.Net.Util;
 using NUnit.Framework;
 using System.Collections.Generic;
 using Assert = Lucene.Net.TestFramework.Assert;
@@ -121,7 +121,7 @@ namespace Lucene.Net.Index
                 pb[i] = b.Finish();
             }
 
-            List<IEnumerator<Term>> subs = new List<IEnumerator<Term>>();
+            JCG.List<IEnumerator<Term>> subs = new JCG.List<IEnumerator<Term>>();
             for (int i = 0; i < pb.Length; i++)
             {
                 subs.Add(pb[i].GetEnumerator());
diff --git a/src/Lucene.Net.Tests/Index/TestSegmentMerger.cs b/src/Lucene.Net.Tests/Index/TestSegmentMerger.cs
index 934a1b9..c439c42 100644
--- a/src/Lucene.Net.Tests/Index/TestSegmentMerger.cs
+++ b/src/Lucene.Net.Tests/Index/TestSegmentMerger.cs
@@ -1,6 +1,6 @@
-using NUnit.Framework;
+using NUnit.Framework;
 using System;
-using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 using Assert = Lucene.Net.TestFramework.Assert;
 
 namespace Lucene.Net.Index
@@ -97,7 +97,7 @@ namespace Lucene.Net.Index
             Codec codec = Codec.Default;
             SegmentInfo si = new SegmentInfo(mergedDir, Constants.LUCENE_MAIN_VERSION, mergedSegment, -1, false, codec, null);
 
-            SegmentMerger merger = new SegmentMerger(new List<AtomicReader> { reader1, reader2 }, si, (InfoStream)InfoStream.Default, mergedDir, IndexWriterConfig.DEFAULT_TERM_INDEX_INTERVAL, CheckAbort.NONE, new FieldInfos.FieldNumbers(), NewIOContext(Random), true);
+            SegmentMerger merger = new SegmentMerger(new JCG.List<AtomicReader> { reader1, reader2 }, si, (InfoStream)InfoStream.Default, mergedDir, IndexWriterConfig.DEFAULT_TERM_INDEX_INTERVAL, CheckAbort.NONE, new FieldInfos.FieldNumbers(), NewIOContext(Random), true);
             MergeState mergeState = merger.Merge();
             int docsMerged = mergeState.SegmentInfo.DocCount;
             Assert.IsTrue(docsMerged == 2);
diff --git a/src/Lucene.Net.Tests/Index/TestSnapshotDeletionPolicy.cs b/src/Lucene.Net.Tests/Index/TestSnapshotDeletionPolicy.cs
index 963d533..836ac42 100644
--- a/src/Lucene.Net.Tests/Index/TestSnapshotDeletionPolicy.cs
+++ b/src/Lucene.Net.Tests/Index/TestSnapshotDeletionPolicy.cs
@@ -6,6 +6,7 @@ using NUnit.Framework;
 using System;
 using System.Collections.Generic;
 using System.Threading;
+using JCG = J2N.Collections.Generic;
 using Assert = Lucene.Net.TestFramework.Assert;
 using Console = Lucene.Net.Util.SystemConsole;
 
@@ -112,7 +113,7 @@ namespace Lucene.Net.Index
         {
             base.SetUp();
 
-            this.snapshots = new List<IndexCommit>();
+            this.snapshots = new JCG.List<IndexCommit>();
         }
 
         [Test]
diff --git a/src/Lucene.Net.Tests/Index/TestStressAdvance.cs b/src/Lucene.Net.Tests/Index/TestStressAdvance.cs
index cfa6852..e4a138a 100644
--- a/src/Lucene.Net.Tests/Index/TestStressAdvance.cs
+++ b/src/Lucene.Net.Tests/Index/TestStressAdvance.cs
@@ -1,4 +1,4 @@
-using Lucene.Net.Documents;
+using Lucene.Net.Documents;
 using Lucene.Net.Store;
 using Lucene.Net.Util;
 using NUnit.Framework;
@@ -79,8 +79,8 @@ namespace Lucene.Net.Index
 
                 w.ForceMerge(1);
 
-                IList<int> aDocIDs = new List<int>();
-                IList<int> bDocIDs = new List<int>();
+                IList<int> aDocIDs = new JCG.List<int>();
+                IList<int> bDocIDs = new JCG.List<int>();
 
                 DirectoryReader r = w.GetReader();
                 int[] idToDocID = new int[r.MaxDoc];
diff --git a/src/Lucene.Net.Tests/Index/TestStressIndexing2.cs b/src/Lucene.Net.Tests/Index/TestStressIndexing2.cs
index 3155662..19d648a 100644
--- a/src/Lucene.Net.Tests/Index/TestStressIndexing2.cs
+++ b/src/Lucene.Net.Tests/Index/TestStressIndexing2.cs
@@ -15,6 +15,7 @@ using System;
 using System.Collections.Generic;
 using System.Globalization;
 using System.Threading;
+using JCG = J2N.Collections.Generic;
 using Assert = Lucene.Net.TestFramework.Assert;
 using Console = Lucene.Net.Util.SystemConsole;
 
@@ -293,7 +294,7 @@ namespace Lucene.Net.Index
             while (iter.MoveNext())
             {
                 Document d = iter.Current;
-                List<IIndexableField> fields = new List<IIndexableField>();
+                IList<IIndexableField> fields = new JCG.List<IIndexableField>();
                 fields.AddRange(d.Fields);
                 // put fields in same order each time
                 fields.Sort(fieldNameComparer);
@@ -919,7 +920,7 @@ namespace Lucene.Net.Index
                 customType1.IsTokenized = false;
                 customType1.OmitNorms = true;
 
-                List<Field> fields = new List<Field>();
+                IList<Field> fields = new JCG.List<Field>();
                 string idString = IdString;
                 Field idField = NewField("id", idString, customType1);
                 fields.Add(idField);
diff --git a/src/Lucene.Net.Tests/Index/TestStressNRT.cs b/src/Lucene.Net.Tests/Index/TestStressNRT.cs
index b4f1ebd..65d7b3e 100644
--- a/src/Lucene.Net.Tests/Index/TestStressNRT.cs
+++ b/src/Lucene.Net.Tests/Index/TestStressNRT.cs
@@ -10,6 +10,7 @@ using System;
 using System.Collections.Concurrent;
 using System.Collections.Generic;
 using System.Threading;
+using JCG = J2N.Collections.Generic;
 using Console = Lucene.Net.Util.SystemConsole;
 
 namespace Lucene.Net.Index
@@ -113,7 +114,7 @@ namespace Lucene.Net.Index
 
             AtomicInt32 numCommitting = new AtomicInt32();
 
-            IList<ThreadJob> threads = new List<ThreadJob>();
+            IList<ThreadJob> threads = new JCG.List<ThreadJob>();
 
             Directory dir = NewDirectory();
 
diff --git a/src/Lucene.Net.Tests/Index/TestTermsEnum.cs b/src/Lucene.Net.Tests/Index/TestTermsEnum.cs
index 80bde48..4cee9cc 100644
--- a/src/Lucene.Net.Tests/Index/TestTermsEnum.cs
+++ b/src/Lucene.Net.Tests/Index/TestTermsEnum.cs
@@ -70,7 +70,7 @@ namespace Lucene.Net.Index
             IndexReader r = w.GetReader();
             w.Dispose();
 
-            List<BytesRef> terms = new List<BytesRef>();
+            JCG.List<BytesRef> terms = new JCG.List<BytesRef>();
             TermsEnum termsEnum = MultiFields.GetTerms(r, "body").GetEnumerator();
             while (termsEnum.MoveNext())
             {
@@ -247,7 +247,7 @@ namespace Lucene.Net.Index
             //final int numTerms = 50;
 
             ISet<string> terms = new JCG.HashSet<string>();
-            ICollection<string> pendingTerms = new List<string>();
+            ICollection<string> pendingTerms = new JCG.List<string>();
             IDictionary<BytesRef, int?> termToID = new Dictionary<BytesRef, int?>();
             int id = 0;
             while (terms.Count != numTerms)
@@ -738,7 +738,7 @@ namespace Lucene.Net.Index
 
             int END_LOC = -validTerms.Length - 1;
 
-            IList<TermAndState> termStates = new List<TermAndState>();
+            IList<TermAndState> termStates = new JCG.List<TermAndState>();
 
             for (int iter = 0; iter < 100 * RandomMultiplier; iter++)
             {
diff --git a/src/Lucene.Net.Tests/Index/TestTermsEnum2.cs b/src/Lucene.Net.Tests/Index/TestTermsEnum2.cs
index db1bb76..c53a099 100644
--- a/src/Lucene.Net.Tests/Index/TestTermsEnum2.cs
+++ b/src/Lucene.Net.Tests/Index/TestTermsEnum2.cs
@@ -99,7 +99,7 @@ namespace Lucene.Net.Index
             {
                 string reg = AutomatonTestUtil.RandomRegexp(Random);
                 Automaton automaton = (new RegExp(reg, RegExpSyntax.NONE)).ToAutomaton();
-                IList<BytesRef> matchedTerms = new List<BytesRef>();
+                IList<BytesRef> matchedTerms = new JCG.List<BytesRef>();
                 foreach (BytesRef t in terms)
                 {
                     if (BasicOperations.Run(automaton, t.Utf8ToString()))
@@ -128,7 +128,7 @@ namespace Lucene.Net.Index
                 string reg = AutomatonTestUtil.RandomRegexp(Random);
                 Automaton automaton = (new RegExp(reg, RegExpSyntax.NONE)).ToAutomaton();
                 TermsEnum te = MultiFields.GetTerms(reader, "field").GetEnumerator();
-                IList<BytesRef> unsortedTerms = new List<BytesRef>(terms);
+                IList<BytesRef> unsortedTerms = new JCG.List<BytesRef>(terms);
                 unsortedTerms.Shuffle(Random);
 
                 foreach (BytesRef term in unsortedTerms)
diff --git a/src/Lucene.Net.Tests/Index/TestUniqueTermCount.cs b/src/Lucene.Net.Tests/Index/TestUniqueTermCount.cs
index 582041c..4797010 100644
--- a/src/Lucene.Net.Tests/Index/TestUniqueTermCount.cs
+++ b/src/Lucene.Net.Tests/Index/TestUniqueTermCount.cs
@@ -38,7 +38,7 @@ namespace Lucene.Net.Index
         Directory dir;
         IndexReader reader;
         /* expected uniqueTermCount values for our documents */
-        List<int> expected = new List<int>();
+        IList<int> expected = new JCG.List<int>();
 
         public override void SetUp()
         {
diff --git a/src/Lucene.Net.Tests/Search/Similarities/TestSimilarity2.cs b/src/Lucene.Net.Tests/Search/Similarities/TestSimilarity2.cs
index cd68a27..49459c6 100644
--- a/src/Lucene.Net.Tests/Search/Similarities/TestSimilarity2.cs
+++ b/src/Lucene.Net.Tests/Search/Similarities/TestSimilarity2.cs
@@ -1,7 +1,8 @@
-using System.Collections.Generic;
+using System.Collections.Generic;
 using Lucene.Net.Documents;
 using Lucene.Net.Index;
 using NUnit.Framework;
+using JCG = J2N.Collections.Generic;
 using Assert = Lucene.Net.TestFramework.Assert;
 
 namespace Lucene.Net.Search.Similarities
@@ -47,7 +48,7 @@ namespace Lucene.Net.Search.Similarities
         public override void SetUp()
         {
             base.SetUp();
-            sims = new List<Similarity>();
+            sims = new JCG.List<Similarity>();
             sims.Add(new DefaultSimilarity());
             sims.Add(new BM25Similarity());
             // TODO: not great that we dup this all with TestSimilarityBase
diff --git a/src/Lucene.Net.Tests/Search/Similarities/TestSimilarityBase.cs b/src/Lucene.Net.Tests/Search/Similarities/TestSimilarityBase.cs
index ddb31c3..25c386f 100644
--- a/src/Lucene.Net.Tests/Search/Similarities/TestSimilarityBase.cs
+++ b/src/Lucene.Net.Tests/Search/Similarities/TestSimilarityBase.cs
@@ -1,7 +1,8 @@
-using System;
+using System;
 using System.Collections.Generic;
 using Lucene.Net.Documents;
 using NUnit.Framework;
+using JCG = J2N.Collections.Generic;
 using Assert = Lucene.Net.TestFramework.Assert;
 
 namespace Lucene.Net.Search.Similarities
@@ -126,7 +127,7 @@ namespace Lucene.Net.Search.Similarities
             searcher = NewSearcher(reader);
             writer.Dispose();
 
-            sims = new List<SimilarityBase>();
+            sims = new JCG.List<SimilarityBase>();
             foreach (BasicModel basicModel in BASIC_MODELS)
             {
                 foreach (AfterEffect afterEffect in AFTER_EFFECTS)
diff --git a/src/Lucene.Net.Tests/Search/Spans/TestBasics.cs b/src/Lucene.Net.Tests/Search/Spans/TestBasics.cs
index 1468d8c..ed831c2 100644
--- a/src/Lucene.Net.Tests/Search/Spans/TestBasics.cs
+++ b/src/Lucene.Net.Tests/Search/Spans/TestBasics.cs
@@ -1,4 +1,4 @@
-using J2N.Text;
+using J2N.Text;
 using Lucene.Net.Analysis;
 using Lucene.Net.Analysis.TokenAttributes;
 using Lucene.Net.Documents;
@@ -6,6 +6,7 @@ using Lucene.Net.Index.Extensions;
 using Lucene.Net.Util;
 using NUnit.Framework;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 using Assert = Lucene.Net.TestFramework.Assert;
 
 namespace Lucene.Net.Search.Spans
@@ -418,7 +419,7 @@ namespace Lucene.Net.Search.Spans
 #pragma warning disable 612, 618
             BytesRef pay = new BytesRef(("pos: " + 5).GetBytes(IOUtils.CHARSET_UTF_8));
 #pragma warning restore 612, 618
-            SpanQuery query = new SpanPayloadCheckQuery(term1, new List<byte[]>() { pay.Bytes });
+            SpanQuery query = new SpanPayloadCheckQuery(term1, new JCG.List<byte[]>() { pay.Bytes });
             CheckHits(query, new int[] { 1125, 1135, 1145, 1155, 1165, 1175, 1185, 1195, 1225, 1235, 1245, 1255, 1265, 1275, 1285, 1295, 1325, 1335, 1345, 1355, 1365, 1375, 1385, 1395, 1425, 1435, 1445, 1455, 1465, 1475, 1485, 1495, 1525, 1535, 1545, 1555, 1565, 1575, 1585, 1595, 1625, 1635, 1645, 1655, 1665, 1675, 1685, 1695, 1725, 1735, 1745, 1755, 1765, 1775, 1785, 1795, 1825, 1835, 1845, 1855, 1865, 1875, 1885, 1895, 1925, 1935, 1945, 1955, 1965, 1975, 1985, 1995 });
             Assert.IsTrue(searcher.Explain(query, 1125).Value > 0.0f);
 
@@ -435,7 +436,7 @@ namespace Lucene.Net.Search.Spans
             pay = new BytesRef(("pos: " + 0).GetBytes(IOUtils.CHARSET_UTF_8));
             pay2 = new BytesRef(("pos: " + 1).GetBytes(IOUtils.CHARSET_UTF_8));
 #pragma warning restore 612, 618
-            list = new List<byte[]>();
+            list = new JCG.List<byte[]>();
             list.Add(pay.Bytes);
             list.Add(pay2.Bytes);
             query = new SpanNearPayloadCheckQuery(snq, list);
@@ -450,7 +451,7 @@ namespace Lucene.Net.Search.Spans
             pay2 = new BytesRef(("pos: " + 1).GetBytes(IOUtils.CHARSET_UTF_8));
             BytesRef pay3 = new BytesRef(("pos: " + 2).GetBytes(IOUtils.CHARSET_UTF_8));
 #pragma warning restore 612, 618
-            list = new List<byte[]>();
+            list = new JCG.List<byte[]>();
             list.Add(pay.Bytes);
             list.Add(pay2.Bytes);
             list.Add(pay3.Bytes);
@@ -478,7 +479,7 @@ namespace Lucene.Net.Search.Spans
             query = new SpanPositionRangeQuery(oneThousHunThree, 0, 6);
             CheckHits(query, new int[] { 1103, 1203, 1303, 1403, 1503, 1603, 1703, 1803, 1903 });
 
-            var payloads = new List<byte[]>();
+            var payloads = new JCG.List<byte[]>();
 #pragma warning disable 612, 618
             BytesRef pay = new BytesRef(("pos: " + 0).GetBytes(IOUtils.CHARSET_UTF_8));
             BytesRef pay2 = new BytesRef(("pos: " + 1).GetBytes(IOUtils.CHARSET_UTF_8));
diff --git a/src/Lucene.Net.Tests/Search/TestBooleanQuery.cs b/src/Lucene.Net.Tests/Search/TestBooleanQuery.cs
index ddd0db0..a511922 100644
--- a/src/Lucene.Net.Tests/Search/TestBooleanQuery.cs
+++ b/src/Lucene.Net.Tests/Search/TestBooleanQuery.cs
@@ -4,6 +4,7 @@ using NUnit.Framework;
 using System;
 using System.Collections.Generic;
 using System.Threading.Tasks;
+using JCG = J2N.Collections.Generic;
 using Assert = Lucene.Net.TestFramework.Assert;
 using Console = Lucene.Net.Util.SystemConsole;
 
@@ -262,7 +263,7 @@ namespace Lucene.Net.Search
                 {
                     Console.WriteLine("iter=" + iter);
                 }
-                IList<string> terms = new List<string> { "a", "b", "c", "d", "e", "f" };
+                IList<string> terms = new JCG.List<string> { "a", "b", "c", "d", "e", "f" };
                 int numTerms = TestUtil.NextInt32(Random, 1, terms.Count);
                 while (terms.Count > numTerms)
                 {
@@ -285,7 +286,7 @@ namespace Lucene.Net.Search
                 Scorer scorer = weight.GetScorer(s.m_leafContexts[0], null);
 
                 // First pass: just use .NextDoc() to gather all hits
-                IList<ScoreDoc> hits = new List<ScoreDoc>();
+                IList<ScoreDoc> hits = new JCG.List<ScoreDoc>();
                 while (scorer.NextDoc() != DocIdSetIterator.NO_MORE_DOCS)
                 {
                     hits.Add(new ScoreDoc(scorer.DocID, scorer.GetScore()));
diff --git a/src/Lucene.Net.Tests/Search/TestBooleanScorer.cs b/src/Lucene.Net.Tests/Search/TestBooleanScorer.cs
index 2d365ba..4bbca9b 100644
--- a/src/Lucene.Net.Tests/Search/TestBooleanScorer.cs
+++ b/src/Lucene.Net.Tests/Search/TestBooleanScorer.cs
@@ -1,9 +1,10 @@
-using Lucene.Net.Diagnostics;
+using Lucene.Net.Diagnostics;
 using Lucene.Net.Documents;
 using Lucene.Net.Support;
 using NUnit.Framework;
 using System;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 using Assert = Lucene.Net.TestFramework.Assert;
 
 namespace Lucene.Net.Search
@@ -104,7 +105,7 @@ namespace Lucene.Net.Search
 
             BooleanScorer bs = new BooleanScorer(weight, false, 1, scorers, Collections.EmptyList<BulkScorer>(), scorers.Length);
 
-            IList<int> hits = new List<int>();
+            IList<int> hits = new JCG.List<int>();
             bs.Score(new CollectorAnonymousClass(this, hits));
 
             Assert.AreEqual(1, hits.Count, "should have only 1 hit");
diff --git a/src/Lucene.Net.Tests/Search/TestControlledRealTimeReopenThread.cs b/src/Lucene.Net.Tests/Search/TestControlledRealTimeReopenThread.cs
index 8486cb8..c2393f9 100644
--- a/src/Lucene.Net.Tests/Search/TestControlledRealTimeReopenThread.cs
+++ b/src/Lucene.Net.Tests/Search/TestControlledRealTimeReopenThread.cs
@@ -11,6 +11,7 @@ using System.Linq;
 using System.Text;
 using System.Threading;
 using System.Threading.Tasks;
+using JCG = J2N.Collections.Generic;
 using Assert = Lucene.Net.TestFramework.Assert;
 using Console = Lucene.Net.Util.SystemConsole;
 
@@ -672,7 +673,7 @@ namespace Lucene.Net.Search
             controlledRealTimeReopenThread.IsBackground = (true);
             controlledRealTimeReopenThread.Start();
 
-            IList<ThreadJob> commitThreads = new List<ThreadJob>();
+            IList<ThreadJob> commitThreads = new JCG.List<ThreadJob>();
 
             for (int i = 0; i < 500; i++)
             {
diff --git a/src/Lucene.Net.Tests/Search/TestDocIdSet.cs b/src/Lucene.Net.Tests/Search/TestDocIdSet.cs
index ff795c3..db57a7a 100644
--- a/src/Lucene.Net.Tests/Search/TestDocIdSet.cs
+++ b/src/Lucene.Net.Tests/Search/TestDocIdSet.cs
@@ -1,8 +1,9 @@
-using Lucene.Net.Documents;
+using Lucene.Net.Documents;
 using Lucene.Net.Support;
 using NUnit.Framework;
 using System;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 using Assert = Lucene.Net.TestFramework.Assert;
 using Console = Lucene.Net.Util.SystemConsole;
 
@@ -46,7 +47,7 @@ namespace Lucene.Net.Search
             DocIdSet filteredSet = new FilteredDocIdSetAnonymousClass(this, innerSet);
 
             DocIdSetIterator iter = filteredSet.GetIterator();
-            List<int?> list = new List<int?>();
+            IList<int?> list = new JCG.List<int?>();
             int doc = iter.Advance(3);
             if (doc != DocIdSetIterator.NO_MORE_DOCS)
             {
diff --git a/src/Lucene.Net.Tests/Search/TestDocTermOrdsRangeFilter.cs b/src/Lucene.Net.Tests/Search/TestDocTermOrdsRangeFilter.cs
index 3d701f4..edc4632 100644
--- a/src/Lucene.Net.Tests/Search/TestDocTermOrdsRangeFilter.cs
+++ b/src/Lucene.Net.Tests/Search/TestDocTermOrdsRangeFilter.cs
@@ -4,6 +4,7 @@ using NUnit.Framework;
 using RandomizedTesting.Generators;
 using System;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 using Console = Lucene.Net.Util.SystemConsole;
 
 namespace Lucene.Net.Search
@@ -58,7 +59,7 @@ namespace Lucene.Net.Search
             dir = NewDirectory();
             fieldName = Random.NextBoolean() ? "field" : ""; // sometimes use an empty string as field name
             RandomIndexWriter writer = new RandomIndexWriter(Random, dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random, MockTokenizer.KEYWORD, false)).SetMaxBufferedDocs(TestUtil.NextInt32(Random, 50, 1000)));
-            List<string> terms = new List<string>();
+            JCG.List<string> terms = new JCG.List<string>();
             int num = AtLeast(200);
             for (int i = 0; i < num; i++)
             {
diff --git a/src/Lucene.Net.Tests/Search/TestDocTermOrdsRewriteMethod.cs b/src/Lucene.Net.Tests/Search/TestDocTermOrdsRewriteMethod.cs
index 622e9f3..5e939b4 100644
--- a/src/Lucene.Net.Tests/Search/TestDocTermOrdsRewriteMethod.cs
+++ b/src/Lucene.Net.Tests/Search/TestDocTermOrdsRewriteMethod.cs
@@ -5,6 +5,7 @@ using NUnit.Framework;
 using RandomizedTesting.Generators;
 using System;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 using Assert = Lucene.Net.TestFramework.Assert;
 using Console = Lucene.Net.Util.SystemConsole;
 
@@ -62,7 +63,7 @@ namespace Lucene.Net.Search
             dir = NewDirectory();
             fieldName = Random.NextBoolean() ? "field" : ""; // sometimes use an empty string as field name
             RandomIndexWriter writer = new RandomIndexWriter(Random, dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random, MockTokenizer.KEYWORD, false)).SetMaxBufferedDocs(TestUtil.NextInt32(Random, 50, 1000)));
-            List<string> terms = new List<string>();
+            JCG.List<string> terms = new JCG.List<string>();
             int num = AtLeast(200);
             for (int i = 0; i < num; i++)
             {
diff --git a/src/Lucene.Net.Tests/Search/TestFieldCacheTermsFilter.cs b/src/Lucene.Net.Tests/Search/TestFieldCacheTermsFilter.cs
index 9aac573..b41590b 100644
--- a/src/Lucene.Net.Tests/Search/TestFieldCacheTermsFilter.cs
+++ b/src/Lucene.Net.Tests/Search/TestFieldCacheTermsFilter.cs
@@ -1,6 +1,6 @@
-using Lucene.Net.Documents;
+using Lucene.Net.Documents;
 using NUnit.Framework;
-using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 using Assert = Lucene.Net.TestFramework.Assert;
 
 namespace Lucene.Net.Search
@@ -61,17 +61,17 @@ namespace Lucene.Net.Search
             ScoreDoc[] results;
             MatchAllDocsQuery q = new MatchAllDocsQuery();
 
-            List<string> terms = new List<string>();
+            JCG.List<string> terms = new JCG.List<string>();
             terms.Add("5");
             results = searcher.Search(q, new FieldCacheTermsFilter(fieldName, terms.ToArray()), numDocs).ScoreDocs;
             Assert.AreEqual(0, results.Length, "Must match nothing");
 
-            terms = new List<string>();
+            terms = new JCG.List<string>();
             terms.Add("10");
             results = searcher.Search(q, new FieldCacheTermsFilter(fieldName, terms.ToArray()), numDocs).ScoreDocs;
             Assert.AreEqual(1, results.Length, "Must match 1");
 
-            terms = new List<string>();
+            terms = new JCG.List<string>();
             terms.Add("10");
             terms.Add("20");
             results = searcher.Search(q, new FieldCacheTermsFilter(fieldName, terms.ToArray()), numDocs).ScoreDocs;
diff --git a/src/Lucene.Net.Tests/Search/TestFuzzyQuery.cs b/src/Lucene.Net.Tests/Search/TestFuzzyQuery.cs
index 989118f..069c8e0 100644
--- a/src/Lucene.Net.Tests/Search/TestFuzzyQuery.cs
+++ b/src/Lucene.Net.Tests/Search/TestFuzzyQuery.cs
@@ -2,6 +2,7 @@
 using NUnit.Framework;
 using System;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 using Assert = Lucene.Net.TestFramework.Assert;
 
 namespace Lucene.Net.Search
@@ -90,7 +91,7 @@ namespace Lucene.Net.Search
             query = new FuzzyQuery(new Term("field", "bbbbb"), FuzzyQuery.DefaultMaxEdits, 0);
             hits = searcher.Search(query, null, 1000).ScoreDocs;
             Assert.AreEqual(3, hits.Length, "3 documents should match");
-            IList<string> order = new List<string> { "bbbbb", "abbbb", "aabbb" };
+            IList<string> order = new JCG.List<string> { "bbbbb", "abbbb", "aabbb" };
             for (int i = 0; i < hits.Length; i++)
             {
                 string term = searcher.Doc(hits[i].Doc).Get("field");
@@ -103,7 +104,7 @@ namespace Lucene.Net.Search
             query = new FuzzyQuery(new Term("field", "bbbbb"), FuzzyQuery.DefaultMaxEdits, 0, 2, false);
             hits = searcher.Search(query, null, 1000).ScoreDocs;
             Assert.AreEqual(2, hits.Length, "only 2 documents should match");
-            order = new List<string> { "bbbbb", "abbbb" };
+            order = new JCG.List<string> { "bbbbb", "abbbb" };
             for (int i = 0; i < hits.Length; i++)
             {
                 string term = searcher.Doc(hits[i].Doc).Get("field");
diff --git a/src/Lucene.Net.Tests/Search/TestLiveFieldValues.cs b/src/Lucene.Net.Tests/Search/TestLiveFieldValues.cs
index 6c36474..9ae5da9 100644
--- a/src/Lucene.Net.Tests/Search/TestLiveFieldValues.cs
+++ b/src/Lucene.Net.Tests/Search/TestLiveFieldValues.cs
@@ -6,6 +6,7 @@ using System;
 using System.Collections.Generic;
 using System.Globalization;
 using System.Threading;
+using JCG = J2N.Collections.Generic;
 using Assert = Lucene.Net.TestFramework.Assert;
 using Console = Lucene.Net.Util.SystemConsole;
 
@@ -65,7 +66,7 @@ namespace Lucene.Net.Search
             }
 
             CountdownEvent startingGun = new CountdownEvent(1);
-            IList<ThreadJob> threads = new List<ThreadJob>();
+            IList<ThreadJob> threads = new JCG.List<ThreadJob>();
 
             int iters = AtLeast(1000);
             int idCount = TestUtil.NextInt32(Random, 100, 10000);
diff --git a/src/Lucene.Net.Tests/Search/TestMinShouldMatch2.cs b/src/Lucene.Net.Tests/Search/TestMinShouldMatch2.cs
index 534fa55..f621a36 100644
--- a/src/Lucene.Net.Tests/Search/TestMinShouldMatch2.cs
+++ b/src/Lucene.Net.Tests/Search/TestMinShouldMatch2.cs
@@ -251,7 +251,7 @@ namespace Lucene.Net.Search
         [Test]
         public virtual void TestNextAllTerms()
         {
-            IList<string> termsList = new List<string>(commonTerms.Length + mediumTerms.Length + rareTerms.Length);
+            IList<string> termsList = new JCG.List<string>(commonTerms.Length + mediumTerms.Length + rareTerms.Length);
             termsList.AddRange(commonTerms);
             termsList.AddRange(mediumTerms);
             termsList.AddRange(rareTerms);
@@ -270,7 +270,7 @@ namespace Lucene.Net.Search
         [Test]
         public virtual void TestAdvanceAllTerms()
         {
-            IList<string> termsList = new List<string>(commonTerms.Length + mediumTerms.Length + rareTerms.Length);
+            IList<string> termsList = new JCG.List<string>(commonTerms.Length + mediumTerms.Length + rareTerms.Length);
             termsList.AddRange(commonTerms);
             termsList.AddRange(mediumTerms);
             termsList.AddRange(rareTerms);
diff --git a/src/Lucene.Net.Tests/Search/TestPhraseQuery.cs b/src/Lucene.Net.Tests/Search/TestPhraseQuery.cs
index 10cddc8..cbae932 100644
--- a/src/Lucene.Net.Tests/Search/TestPhraseQuery.cs
+++ b/src/Lucene.Net.Tests/Search/TestPhraseQuery.cs
@@ -9,6 +9,7 @@ using System;
 using System.Collections.Generic;
 using System.IO;
 using System.Text;
+using JCG = J2N.Collections.Generic;
 using Assert = Lucene.Net.TestFramework.Assert;
 
 namespace Lucene.Net.Search
@@ -758,7 +759,7 @@ namespace Lucene.Net.Search
             Analyzer analyzer = new MockAnalyzer(Random);
 
             RandomIndexWriter w = new RandomIndexWriter(Random, dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).SetMergePolicy(NewLogMergePolicy()));
-            IList<IList<string>> docs = new List<IList<string>>();
+            IList<IList<string>> docs = new JCG.List<IList<string>>();
             Documents.Document d = new Documents.Document();
             Field f = NewTextField("f", "", Field.Store.NO);
             d.Add(f);
@@ -771,7 +772,7 @@ namespace Lucene.Net.Search
                 // must be > 4096 so it spans multiple chunks
                 int termCount = TestUtil.NextInt32(Random, 4097, 8200);
 
-                IList<string> doc = new List<string>();
+                IList<string> doc = new JCG.List<string>();
 
                 StringBuilder sb = new StringBuilder();
                 while (doc.Count < termCount)
diff --git a/src/Lucene.Net.Tests/Search/TestRegexpRandom2.cs b/src/Lucene.Net.Tests/Search/TestRegexpRandom2.cs
... 2885 lines suppressed ...

[lucenenet] 01/06: BREAKING: Lucene.Net.Analysis.Stempel.Egothor.Stemmer.MultiTrie: Changed protected m_tries field from List to IList

Posted by ni...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

nightowl888 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/lucenenet.git

commit 5736c97b8f5ae819a90691b7f201c50cc4f34f0d
Author: Shad Storhaug <sh...@shadstorhaug.com>
AuthorDate: Tue Oct 19 05:03:09 2021 +0700

    BREAKING: Lucene.Net.Analysis.Stempel.Egothor.Stemmer.MultiTrie: Changed protected m_tries field from List<Trie> to IList<Trie>
---
 src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/MultiTrie.cs | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/MultiTrie.cs b/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/MultiTrie.cs
index 80e152e..ab4bef6 100644
--- a/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/MultiTrie.cs
+++ b/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/MultiTrie.cs
@@ -70,7 +70,7 @@ namespace Egothor.Stemmer
         internal static char EOM = '*';
         internal static string EOM_NODE = "" + EOM;
 
-        protected List<Trie> m_tries = new List<Trie>();
+        protected IList<Trie> m_tries = new List<Trie>();
 
         private readonly int BY = 1; // LUCENENET: marked readonly
 

[lucenenet] 05/06: Lucene.Net.Util.ListExtensions: Added optimized path for J2N.Collections.Generic.List in AddRange and Sort methods

Posted by ni...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

nightowl888 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/lucenenet.git

commit 2e8fb68a1e903dadd3b9c11d39cc9a00b46f2f1a
Author: Shad Storhaug <sh...@shadstorhaug.com>
AuthorDate: Mon Oct 18 03:16:44 2021 +0700

    Lucene.Net.Util.ListExtensions: Added optimized path for J2N.Collections.Generic.List<T> in AddRange and Sort methods
---
 src/Lucene.Net/Support/Util/ListExtensions.cs | 15 +++++++++++++++
 1 file changed, 15 insertions(+)

diff --git a/src/Lucene.Net/Support/Util/ListExtensions.cs b/src/Lucene.Net/Support/Util/ListExtensions.cs
index 14b275f..786b864 100644
--- a/src/Lucene.Net/Support/Util/ListExtensions.cs
+++ b/src/Lucene.Net/Support/Util/ListExtensions.cs
@@ -1,5 +1,6 @@
 using System;
 using System.Collections.Generic;
+using JCG = J2N.Collections.Generic;
 
 namespace Lucene.Net.Util
 {
@@ -42,7 +43,13 @@ namespace Lucene.Net.Util
                 throw new ArgumentNullException(nameof(collection));
 
             if (list is List<T> thisList)
+            {
                 thisList.AddRange(collection);
+            }
+            else if (list is JCG.List<T> jcgList)
+            {
+                jcgList.AddRange(collection);
+            }
             else
             {
                 foreach (var item in collection)
@@ -65,6 +72,10 @@ namespace Lucene.Net.Util
             {
                 listToSort.Sort();
             }
+            else if (list is JCG.List<T> jcgListToSort)
+            {
+                jcgListToSort.Sort();
+            }
             else
             {
                 CollectionUtil.TimSort(list);
@@ -85,6 +96,10 @@ namespace Lucene.Net.Util
             {
                 listToSort.Sort(comparer);
             }
+            else if (list is JCG.List<T> jcgListToSort)
+            {
+                jcgListToSort.Sort(comparer);
+            }
             else
             {
                 CollectionUtil.TimSort(list, comparer);

[lucenenet] 06/06: BUG: Lucene.Net.Tests.Suggest.Suggest.Analyzing.TestFreeTextSuggester::TestRandom(): LookupResult calculation in the test was using different order of parentheses than the production code. This bug existed in Java, but apparently the order makes no difference on that platform. This test was getting a false positive because it was using List.ToString() to make the result comparison, which J2N's List corrects.

Posted by ni...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

nightowl888 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/lucenenet.git

commit dd7ed62e9bfc455c9b39ea5d33a783a93280b739
Author: Shad Storhaug <sh...@shadstorhaug.com>
AuthorDate: Mon Oct 18 23:01:21 2021 +0700

    BUG: Lucene.Net.Tests.Suggest.Suggest.Analyzing.TestFreeTextSuggester::TestRandom(): LookupResult calculation in the test was using different order of parentheses than the production code. This bug existed in Java, but apparently the order makes no difference on that platform. This test was getting a false positive because it was using List<T>.ToString() to make the result comparison, which J2N's List<T> corrects.
---
 .../Suggest/Analyzing/TestFreeTextSuggester.cs                      | 6 +++++-
 1 file changed, 5 insertions(+), 1 deletion(-)

diff --git a/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/TestFreeTextSuggester.cs b/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/TestFreeTextSuggester.cs
index 7bab0b0..8c5fb98 100644
--- a/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/TestFreeTextSuggester.cs
+++ b/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/TestFreeTextSuggester.cs
@@ -605,7 +605,11 @@ namespace Lucene.Net.Search.Suggest.Analyzing
                                 // LUCENENET NOTE: We need to calculate this as decimal because when using double it can sometimes 
                                 // return numbers that are greater than long.MaxValue, which results in a negative long number.
                                 // This is also the way it is being done in the FreeTextSuggester to work around the issue.
-                                Lookup.LookupResult lr = new Lookup.LookupResult(ngram, (long)(long.MaxValue * ((decimal)backoff * (decimal)count / contextCount)));
+
+                                // LUCENENET NOTE: The order of parentheses in the Java test didn't match the production code. This apparently doesn't affect the
+                                // result in Java, but does in .NET, so we changed the test to match the production code.
+                                //Lookup.LookupResult lr = new Lookup.LookupResult(ngram, (long)(long.MaxValue * ((decimal)backoff * (decimal)count / contextCount)));
+                                Lookup.LookupResult lr = new Lookup.LookupResult(ngram, (long)(long.MaxValue * (decimal)backoff * ((decimal)count) / contextCount));
                                 tmp.Add(lr);
                                 if (Verbose)
                                 {