You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucenenet.apache.org by mh...@apache.org on 2013/09/24 20:32:54 UTC

[18/50] [abbrv] Massive cleanup, reducing compiler errors

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/80561f72/src/core/Index/MultiDocValues.cs
----------------------------------------------------------------------
diff --git a/src/core/Index/MultiDocValues.cs b/src/core/Index/MultiDocValues.cs
index 54ff6ec..1c732d8 100644
--- a/src/core/Index/MultiDocValues.cs
+++ b/src/core/Index/MultiDocValues.cs
@@ -39,7 +39,7 @@ namespace Lucene.Net.Index
             }
             else if (size == 1)
             {
-                return leaves[0].Reader.GetNormValues(field);
+                return ((AtomicReader)leaves[0].Reader).GetNormValues(field);
             }
             FieldInfo fi = MultiFields.GetMergedFieldInfos(r).FieldInfo(field);
             if (fi == null || fi.HasNorms == false)
@@ -53,7 +53,7 @@ namespace Lucene.Net.Index
             for (int i = 0; i < size; i++)
             {
                 AtomicReaderContext context = leaves[i];
-                NumericDocValues v = context.Reader.GetNormValues(field);
+                NumericDocValues v = ((AtomicReader)context.Reader).GetNormValues(field);
                 if (v == null)
                 {
                     v = NumericDocValues.EMPTY;
@@ -100,7 +100,7 @@ namespace Lucene.Net.Index
             }
             else if (size == 1)
             {
-                return leaves[0].Reader.GetNumericDocValues(field);
+                return ((AtomicReader)leaves[0].Reader).GetNumericDocValues(field);
             }
 
             bool anyReal = false;
@@ -109,7 +109,7 @@ namespace Lucene.Net.Index
             for (int i = 0; i < size; i++)
             {
                 AtomicReaderContext context = leaves[i];
-                NumericDocValues v = context.Reader.GetNumericDocValues(field);
+                NumericDocValues v = ((AtomicReader)context.Reader).GetNumericDocValues(field);
                 if (v == null)
                 {
                     v = NumericDocValues.EMPTY;
@@ -162,7 +162,7 @@ namespace Lucene.Net.Index
             }
             else if (size == 1)
             {
-                return leaves[0].Reader.GetBinaryDocValues(field);
+                return ((AtomicReader)leaves[0].Reader).GetBinaryDocValues(field);
             }
 
             bool anyReal = false;
@@ -171,7 +171,7 @@ namespace Lucene.Net.Index
             for (int i = 0; i < size; i++)
             {
                 AtomicReaderContext context = leaves[i];
-                BinaryDocValues v = context.Reader.GetBinaryDocValues(field);
+                BinaryDocValues v = ((AtomicReader)context.Reader).GetBinaryDocValues(field);
                 if (v == null)
                 {
                     v = BinaryDocValues.EMPTY;
@@ -206,7 +206,7 @@ namespace Lucene.Net.Index
             }
             else if (size == 1)
             {
-                return leaves[0].Reader.GetSortedDocValues(field);
+                return ((AtomicReader)leaves[0].Reader).GetSortedDocValues(field);
             }
 
             bool anyReal = false;
@@ -215,7 +215,7 @@ namespace Lucene.Net.Index
             for (int i = 0; i < size; i++)
             {
                 AtomicReaderContext context = leaves[i];
-                SortedDocValues v = context.Reader.GetSortedDocValues(field);
+                SortedDocValues v = ((AtomicReader)context.Reader).GetSortedDocValues(field);
                 if (v == null)
                 {
                     v = SortedDocValues.EMPTY;
@@ -256,7 +256,7 @@ namespace Lucene.Net.Index
             }
             else if (size == 1)
             {
-                return leaves[0].Reader.GetSortedSetDocValues(field);
+                return ((AtomicReader)leaves[0].Reader).GetSortedSetDocValues(field);
             }
 
             bool anyReal = false;
@@ -265,7 +265,7 @@ namespace Lucene.Net.Index
             for (int i = 0; i < size; i++)
             {
                 AtomicReaderContext context = leaves[i];
-                SortedSetDocValues v = context.Reader.GetSortedSetDocValues(field);
+                SortedSetDocValues v = ((AtomicReader)context.Reader).GetSortedSetDocValues(field);
                 if (v == null)
                 {
                     v = SortedSetDocValues.EMPTY;

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/80561f72/src/core/Index/MultiReader.cs
----------------------------------------------------------------------
diff --git a/src/core/Index/MultiReader.cs b/src/core/Index/MultiReader.cs
index c6c3778..f1c31cc 100644
--- a/src/core/Index/MultiReader.cs
+++ b/src/core/Index/MultiReader.cs
@@ -64,7 +64,7 @@ namespace Lucene.Net.Index
             }
         }
 
-        protected internal override void DoClose()
+        protected override void DoClose()
         {
             System.IO.IOException ioe = null;
             foreach (IndexReader r in GetSequentialSubReaders())

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/80561f72/src/core/Index/NormsConsumerPerField.cs
----------------------------------------------------------------------
diff --git a/src/core/Index/NormsConsumerPerField.cs b/src/core/Index/NormsConsumerPerField.cs
index 7371f57..40df7ac 100644
--- a/src/core/Index/NormsConsumerPerField.cs
+++ b/src/core/Index/NormsConsumerPerField.cs
@@ -58,5 +58,9 @@ namespace Lucene.Net.Index
         {
             get { return consumer == null; }
         }
+
+        public override void Abort()
+        {
+        }
     }
 }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/80561f72/src/core/Index/ParallelCompositeReader.cs
----------------------------------------------------------------------
diff --git a/src/core/Index/ParallelCompositeReader.cs b/src/core/Index/ParallelCompositeReader.cs
index 6168ab2..c3452b0 100644
--- a/src/core/Index/ParallelCompositeReader.cs
+++ b/src/core/Index/ParallelCompositeReader.cs
@@ -125,7 +125,7 @@ namespace Lucene.Net.Index
             {
             }
 
-            protected internal override void DoClose()
+            protected override void DoClose()
             {
             }
         }
@@ -160,7 +160,7 @@ namespace Lucene.Net.Index
             }
         }
 
-        protected internal override void DoClose()
+        protected override void DoClose()
         {
             System.IO.IOException ioe = null;
             foreach (IndexReader reader in completeReaderSet)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/80561f72/src/core/Index/SegmentInfos.cs
----------------------------------------------------------------------
diff --git a/src/core/Index/SegmentInfos.cs b/src/core/Index/SegmentInfos.cs
index cc47b2f..569b19e 100644
--- a/src/core/Index/SegmentInfos.cs
+++ b/src/core/Index/SegmentInfos.cs
@@ -286,7 +286,7 @@ namespace Lucene.Net.Index
             }
 
 
-            public override object DoBody(string segmentFileName)
+            protected override object DoBody(string segmentFileName)
             {
                 enclosingInstance.Read(directory, segmentFileName);
                 return null;

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/80561f72/src/core/Index/SlowCompositeReaderWrapper.cs
----------------------------------------------------------------------
diff --git a/src/core/Index/SlowCompositeReaderWrapper.cs b/src/core/Index/SlowCompositeReaderWrapper.cs
index 10dc3f6..6542b13 100644
--- a/src/core/Index/SlowCompositeReaderWrapper.cs
+++ b/src/core/Index/SlowCompositeReaderWrapper.cs
@@ -97,7 +97,7 @@ namespace Lucene.Net.Index
             for (int i = 0; i < size; i++)
             {
                 AtomicReaderContext context = in_renamed.Leaves[i];
-                SortedDocValues v = context.Reader.GetSortedDocValues(field);
+                SortedDocValues v = ((AtomicReader)context.Reader).GetSortedDocValues(field);
                 if (v == null)
                 {
                     v = SortedDocValues.EMPTY;
@@ -109,6 +109,52 @@ namespace Lucene.Net.Index
             return new MultiSortedDocValues(values, starts, map);
         }
 
+        public override SortedSetDocValues GetSortedSetDocValues(string field)
+        {
+            EnsureOpen();
+            OrdinalMap map = null;
+            lock (cachedOrdMaps)
+            {
+                map = cachedOrdMaps[field];
+                if (map == null)
+                {
+                    // uncached, or not a multi dv
+                    SortedSetDocValues dv = MultiDocValues.GetSortedSetValues(in_renamed, field);
+                    if (dv is MultiDocValues.MultiSortedSetDocValues)
+                    {
+                        map = ((MultiDocValues.MultiSortedSetDocValues)dv).mapping;
+                        if (map.owner == CoreCacheKey)
+                        {
+                            cachedOrdMaps[field] = map;
+                        }
+                    }
+                    return dv;
+                }
+            }
+            // cached ordinal map
+            if (FieldInfos.FieldInfo(field).DocValuesTypeValue != DocValuesType.SORTED_SET)
+            {
+                return null;
+            }
+            //assert map != null;
+            int size = in_renamed.Leaves.Count;
+            SortedSetDocValues[] values = new SortedSetDocValues[size];
+            int[] starts = new int[size + 1];
+            for (int i = 0; i < size; i++)
+            {
+                AtomicReaderContext context = in_renamed.Leaves[i];
+                SortedSetDocValues v = ((AtomicReader)context.Reader).GetSortedSetDocValues(field);
+                if (v == null)
+                {
+                    v = SortedSetDocValues.EMPTY;
+                }
+                values[i] = v;
+                starts[i] = context.docBase;
+            }
+            starts[size] = MaxDoc;
+            return new MultiDocValues.MultiSortedSetDocValues(values, starts, map);
+        }
+
         // TODO: this could really be a weak map somewhere else on the coreCacheKey,
         // but do we really need to optimize slow-wrapper any more?
         private readonly IDictionary<String, OrdinalMap> cachedOrdMaps = new HashMap<String, OrdinalMap>();

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/80561f72/src/core/Index/StandardDirectoryReader.cs
----------------------------------------------------------------------
diff --git a/src/core/Index/StandardDirectoryReader.cs b/src/core/Index/StandardDirectoryReader.cs
index fb3e0c8..dd9c006 100644
--- a/src/core/Index/StandardDirectoryReader.cs
+++ b/src/core/Index/StandardDirectoryReader.cs
@@ -33,7 +33,7 @@ namespace Lucene.Net.Index
             {
             }
 
-            public override object DoBody(string segmentFileName)
+            protected override object DoBody(string segmentFileName)
             {
                 SegmentInfos sis = new SegmentInfos();
                 sis.Read(directory, segmentFileName);
@@ -271,12 +271,12 @@ namespace Lucene.Net.Index
             return buffer.ToString();
         }
 
-        protected override DirectoryReader DoOpenIfChanged()
+        protected internal override DirectoryReader DoOpenIfChanged()
         {
             return DoOpenIfChanged((IndexCommit)null);
         }
 
-        protected override DirectoryReader DoOpenIfChanged(IndexCommit commit)
+        protected internal override DirectoryReader DoOpenIfChanged(IndexCommit commit)
         {
             EnsureOpen();
 
@@ -292,7 +292,7 @@ namespace Lucene.Net.Index
             }
         }
 
-        protected override DirectoryReader DoOpenIfChanged(IndexWriter writer, bool applyAllDeletes)
+        protected internal override DirectoryReader DoOpenIfChanged(IndexWriter writer, bool applyAllDeletes)
         {
             EnsureOpen();
             if (writer == this.writer && applyAllDeletes == this.applyAllDeletes)
@@ -363,7 +363,7 @@ namespace Lucene.Net.Index
                 this.parent = parent;
             }
 
-            public override object DoBody(string segmentFileName)
+            protected override object DoBody(string segmentFileName)
             {
                 SegmentInfos infos = new SegmentInfos();
                 infos.Read(directory, segmentFileName);

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/80561f72/src/core/Index/TermVectorsConsumerPerField.cs
----------------------------------------------------------------------
diff --git a/src/core/Index/TermVectorsConsumerPerField.cs b/src/core/Index/TermVectorsConsumerPerField.cs
index a6aaec3..adda8ec 100644
--- a/src/core/Index/TermVectorsConsumerPerField.cs
+++ b/src/core/Index/TermVectorsConsumerPerField.cs
@@ -51,18 +51,18 @@ namespace Lucene.Net.Index
             for (int i = 0; i < count; i++)
             {
                 IIndexableField field = fields[i];
-                if (field.FieldType.Indexed)
+                if (field.FieldTypeValue.Indexed)
                 {
-                    if (field.FieldType.StoreTermVectors)
+                    if (field.FieldTypeValue.StoreTermVectors)
                     {
                         doVectors = true;
-                        doVectorPositions |= field.FieldType.StoreTermVectorPositions;
-                        doVectorOffsets |= field.FieldType.StoreTermVectorOffsets;
+                        doVectorPositions |= field.FieldTypeValue.StoreTermVectorPositions;
+                        doVectorOffsets |= field.FieldTypeValue.StoreTermVectorOffsets;
                         if (doVectorPositions)
                         {
-                            doVectorPayloads |= field.FieldType.StoreTermVectorPayloads;
+                            doVectorPayloads |= field.FieldTypeValue.StoreTermVectorPayloads;
                         }
-                        else if (field.FieldType.StoreTermVectorPayloads)
+                        else if (field.FieldTypeValue.StoreTermVectorPayloads)
                         {
                             // TODO: move this check somewhere else, and impl the other missing ones
                             throw new ArgumentException("cannot index term vector payloads for field: " + field + " without term vector positions");
@@ -125,7 +125,7 @@ namespace Lucene.Net.Index
 
         public void Abort() { }
 
-        internal void Finish()
+        public override void Finish()
         {
             if (!doVectors || termsHashPerField.bytesHash.Size == 0)
             {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/80561f72/src/core/Index/TermsEnum.cs
----------------------------------------------------------------------
diff --git a/src/core/Index/TermsEnum.cs b/src/core/Index/TermsEnum.cs
index 49b003b..47fa599 100644
--- a/src/core/Index/TermsEnum.cs
+++ b/src/core/Index/TermsEnum.cs
@@ -107,7 +107,7 @@ namespace Lucene.Net.Index
                 get { throw new InvalidOperationException("this property should never be called."); }
             }
 
-            public IComparer<BytesRef> Comparator
+            public override IComparer<BytesRef> Comparator
             {
                 get { return null; }
             }
@@ -137,7 +137,7 @@ namespace Lucene.Net.Index
                 throw new InvalidOperationException("this method should never be called.");
             }
 
-            public BytesRef Next()
+            public override BytesRef Next()
             {
                 return null;
             }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/80561f72/src/core/Index/TermsHash.cs
----------------------------------------------------------------------
diff --git a/src/core/Index/TermsHash.cs b/src/core/Index/TermsHash.cs
index 5799ed4..e6e2d00 100644
--- a/src/core/Index/TermsHash.cs
+++ b/src/core/Index/TermsHash.cs
@@ -101,7 +101,7 @@ namespace Lucene.Net.Index
             bytePool.Reset(false, false);
         }
 
-        internal override void Flush(IDictionary<string, InvertedDocConsumerPerField> fieldsToFlush, SegmentWriteState state)
+        public override void Flush(IDictionary<string, InvertedDocConsumerPerField> fieldsToFlush, SegmentWriteState state)
         {
             IDictionary<String, TermsHashConsumerPerField> childFields = new HashMap<String, TermsHashConsumerPerField>();
             IDictionary<String, InvertedDocConsumerPerField> nextChildFields;

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/80561f72/src/core/Index/TermsHashPerField.cs
----------------------------------------------------------------------
diff --git a/src/core/Index/TermsHashPerField.cs b/src/core/Index/TermsHashPerField.cs
index a9428c8..b4c3ebc 100644
--- a/src/core/Index/TermsHashPerField.cs
+++ b/src/core/Index/TermsHashPerField.cs
@@ -118,7 +118,7 @@ namespace Lucene.Net.Index
         private bool doCall;
         private bool doNextCall;
 
-        internal override void Start(IIndexableField f)
+        public override void Start(IIndexableField f)
         {
             termAtt = fieldState.attributeSource.AddAttribute<ITermToBytesRefAttribute>();
             termBytesRef = termAtt.BytesRef;
@@ -129,7 +129,7 @@ namespace Lucene.Net.Index
             }
         }
 
-        internal override bool Start(IIndexableField[] fields, int count)
+        public override bool Start(IIndexableField[] fields, int count)
         {
             doCall = consumer.Start(fields, count);
             bytesHash.Reinit();
@@ -186,7 +186,7 @@ namespace Lucene.Net.Index
         }
 
         // Primary entry point (for first TermsHash)
-        internal override void Add()
+        public override void Add()
         {
             // We are first in the chain so we must "intern" the
             // term text into textStart address
@@ -302,7 +302,7 @@ namespace Lucene.Net.Index
             WriteByte(stream, (byte)i);
         }
 
-        internal override void Finish()
+        public override void Finish()
         {
             consumer.Finish();
             if (nextPerField != null)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/80561f72/src/core/Lucene.Net.csproj
----------------------------------------------------------------------
diff --git a/src/core/Lucene.Net.csproj b/src/core/Lucene.Net.csproj
index 306396c..de68b1d 100644
--- a/src/core/Lucene.Net.csproj
+++ b/src/core/Lucene.Net.csproj
@@ -193,6 +193,7 @@
     <Compile Include="Codecs\Compressing\CompressingStoredFieldsWriter.cs" />
     <Compile Include="Codecs\Compressing\CompressingTermVectorsFormat.cs" />
     <Compile Include="Codecs\Compressing\CompressingTermVectorsReader.cs" />
+    <Compile Include="Codecs\Compressing\CompressingTermVectorsWriter.cs" />
     <Compile Include="Codecs\Compressing\CompressionMode.cs" />
     <Compile Include="Codecs\Compressing\Compressor.cs" />
     <Compile Include="Codecs\Compressing\Decompressor.cs" />

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/80561f72/src/core/Search/AutomatonQuery.cs
----------------------------------------------------------------------
diff --git a/src/core/Search/AutomatonQuery.cs b/src/core/Search/AutomatonQuery.cs
index 91da42f..6a5440f 100644
--- a/src/core/Search/AutomatonQuery.cs
+++ b/src/core/Search/AutomatonQuery.cs
@@ -23,7 +23,7 @@ namespace Lucene.Net.Search
             this.compiled = new CompiledAutomaton(automaton);
         }
 
-        protected override TermsEnum GetTermsEnum(Terms terms, AttributeSource atts)
+        protected internal override TermsEnum GetTermsEnum(Terms terms, AttributeSource atts)
         {
             return compiled.GetTermsEnum(terms);
         }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/80561f72/src/core/Search/BitsFilteredDocIdSet.cs
----------------------------------------------------------------------
diff --git a/src/core/Search/BitsFilteredDocIdSet.cs b/src/core/Search/BitsFilteredDocIdSet.cs
index d3f607f..6db4a39 100644
--- a/src/core/Search/BitsFilteredDocIdSet.cs
+++ b/src/core/Search/BitsFilteredDocIdSet.cs
@@ -34,7 +34,7 @@ namespace Lucene.Net.Search
 			this.acceptDocs = acceptDocs;
 		}
         
-		protected override bool Match(int docid)
+		public override bool Match(int docid)
 		{
 			return acceptDocs[docid];
 		}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/80561f72/src/core/Search/ConstantScoreAutoRewrite.cs
----------------------------------------------------------------------
diff --git a/src/core/Search/ConstantScoreAutoRewrite.cs b/src/core/Search/ConstantScoreAutoRewrite.cs
index 28478e9..f12f920 100644
--- a/src/core/Search/ConstantScoreAutoRewrite.cs
+++ b/src/core/Search/ConstantScoreAutoRewrite.cs
@@ -59,11 +59,11 @@ namespace Lucene.Net.Search
             }
             else if (size == 0)
             {
-                return GetTopLevelQuery();
+                return TopLevelQuery;
             }
             else
             {
-                BooleanQuery bq = GetTopLevelQuery();
+                BooleanQuery bq = TopLevelQuery;
                 BytesRefHash pendingTerms = col.pendingTerms;
                 int[] sort = pendingTerms.Sort(col.termsEnum.Comparator);
                 for (int i = 0; i < size; i++)
@@ -86,6 +86,9 @@ namespace Lucene.Net.Search
             {
                 this.docCountCutoff = docCountCutoff;
                 this.termCountLimit = termCountLimit;
+
+                // .NET Port: moved from inline here
+                this.pendingTerms = new BytesRefHash(new ByteBlockPool(new ByteBlockPool.DirectAllocator()), 16, array);
             }
 
             public override void SetNextEnum(TermsEnum termsEnum)
@@ -108,7 +111,7 @@ namespace Lucene.Net.Search
                 if (pos < 0)
                 {
                     pos = (-pos) - 1;
-                    array.termState[pos].register(termState, readerContext.ord, termsEnum.DocFreq, termsEnum.TotalTermFreq);
+                    array.termState[pos].Register(termState, readerContext.ord, termsEnum.DocFreq, termsEnum.TotalTermFreq);
                 }
                 else
                 {
@@ -123,7 +126,7 @@ namespace Lucene.Net.Search
 
             internal int docCountCutoff, termCountLimit;
             internal TermStateByteStart array = new TermStateByteStart(16);
-            internal BytesRefHash pendingTerms = new BytesRefHash(new ByteBlockPool(new ByteBlockPool.DirectAllocator()), 16, array);
+            internal BytesRefHash pendingTerms; // .NET port: initialization moved to ctor
         }
 
         public override int GetHashCode()

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/80561f72/src/core/Search/DocTermOrdsRangeFilter.cs
----------------------------------------------------------------------
diff --git a/src/core/Search/DocTermOrdsRangeFilter.cs b/src/core/Search/DocTermOrdsRangeFilter.cs
index 1963348..1f2a2ea 100644
--- a/src/core/Search/DocTermOrdsRangeFilter.cs
+++ b/src/core/Search/DocTermOrdsRangeFilter.cs
@@ -25,7 +25,7 @@ namespace Lucene.Net.Search
             this.includeUpper = includeUpper;
         }
 
-        public abstract DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDocs);
+        public abstract override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDocs);
 
         public static DocTermOrdsRangeFilter NewBytesRefRange(string field, BytesRef lowerVal, BytesRef upperVal, bool includeLower, bool includeUpper)
         {
@@ -41,7 +41,7 @@ namespace Lucene.Net.Search
 
             public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDocs)
             {
-                SortedSetDocValues docTermOrds = FieldCache.DEFAULT.GetDocTermOrds(context.Reader, field);
+                SortedSetDocValues docTermOrds = FieldCache.DEFAULT.GetDocTermOrds((AtomicReader)context.Reader, field);
                 long lowerPoint = lowerVal == null ? -1 : docTermOrds.LookupTerm(lowerVal);
                 long upperPoint = upperVal == null ? -1 : docTermOrds.LookupTerm(upperVal);
 
@@ -92,7 +92,7 @@ namespace Lucene.Net.Search
 
                 //assert inclusiveLowerPoint >= 0 && inclusiveUpperPoint >= 0;
 
-                return new AnonymousFieldCacheDocIdSet(context.Reader.MaxDoc, acceptDocs);
+                return new AnonymousFieldCacheDocIdSet(context.Reader.MaxDoc, acceptDocs, docTermOrds, inclusiveLowerPoint, inclusiveUpperPoint);
             }
 
             private sealed class AnonymousFieldCacheDocIdSet : FieldCacheDocIdSet

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/80561f72/src/core/Search/FieldCacheRangeFilter.cs
----------------------------------------------------------------------
diff --git a/src/core/Search/FieldCacheRangeFilter.cs b/src/core/Search/FieldCacheRangeFilter.cs
index bcb82b3..21b842a 100644
--- a/src/core/Search/FieldCacheRangeFilter.cs
+++ b/src/core/Search/FieldCacheRangeFilter.cs
@@ -73,7 +73,7 @@ namespace Lucene.Net.Search
                     this.inclusiveUpperPoint = inclusiveUpperPoint;
                 }
 
-                internal override bool MatchDoc(int doc)
+                protected override bool MatchDoc(int doc)
                 {
                     int docOrd = fcsi.GetOrd(doc);
                     return docOrd >= inclusiveLowerPoint && docOrd <= inclusiveUpperPoint;
@@ -87,7 +87,7 @@ namespace Lucene.Net.Search
 
             public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDocs)
             {
-                SortedDocValues fcsi = FieldCache.DEFAULT.GetTermsIndex(context.Reader, field);
+                SortedDocValues fcsi = FieldCache.DEFAULT.GetTermsIndex((AtomicReader)context.Reader, field);
                 int lowerPoint = lowerVal == null ? -1 : fcsi.LookupTerm(new BytesRef(lowerVal));
                 int upperPoint = upperVal == null ? -1 : fcsi.LookupTerm(new BytesRef(upperVal));
 
@@ -161,7 +161,7 @@ namespace Lucene.Net.Search
                     this.inclusiveUpperPoint = inclusiveUpperPoint;
                 }
 
-                internal override bool MatchDoc(int doc)
+                protected override bool MatchDoc(int doc)
                 {
                     int docOrd = fcsi.GetOrd(doc);
                     return docOrd >= inclusiveLowerPoint && docOrd <= inclusiveUpperPoint;
@@ -175,7 +175,7 @@ namespace Lucene.Net.Search
 
             public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDocs)
             {
-                SortedDocValues fcsi = FieldCache.DEFAULT.GetTermsIndex(context.Reader, field);
+                SortedDocValues fcsi = FieldCache.DEFAULT.GetTermsIndex((AtomicReader)context.Reader, field);
                 int lowerPoint = lowerVal == null ? -1 : fcsi.LookupTerm(lowerVal);
                 int upperPoint = upperVal == null ? -1 : fcsi.LookupTerm(upperVal);
 
@@ -246,7 +246,7 @@ namespace Lucene.Net.Search
                     this.inclusiveUpperPoint = inclusiveUpperPoint;
                 }
 
-                internal override bool MatchDoc(int doc)
+                protected override bool MatchDoc(int doc)
                 {
                     sbyte value = values.Get(doc);
                     return value >= inclusiveLowerPoint && value <= inclusiveUpperPoint;
@@ -287,7 +287,7 @@ namespace Lucene.Net.Search
                 if (inclusiveLowerPoint > inclusiveUpperPoint)
                     return DocIdSet.EMPTY_DOCIDSET;
 
-                FieldCache.Bytes values = FieldCache.DEFAULT.GetBytes(context.Reader, field, (FieldCache.IByteParser)parser, false);
+                FieldCache.Bytes values = FieldCache.DEFAULT.GetBytes((AtomicReader)context.Reader, field, (FieldCache.IByteParser)parser, false);
 
                 // we only request the usage of termDocs, if the range contains 0
                 return new AnonymousClassFieldCacheDocIdSet(values, inclusiveLowerPoint, inclusiveUpperPoint, context.Reader.MaxDoc, acceptDocs);
@@ -311,7 +311,7 @@ namespace Lucene.Net.Search
                     this.inclusiveUpperPoint = inclusiveUpperPoint;
                 }
 
-                internal override bool MatchDoc(int doc)
+                protected override bool MatchDoc(int doc)
                 {
                     short value = values.Get(doc);
                     return value >= inclusiveLowerPoint && value <= inclusiveUpperPoint;
@@ -353,7 +353,7 @@ namespace Lucene.Net.Search
                 if (inclusiveLowerPoint > inclusiveUpperPoint)
                     return DocIdSet.EMPTY_DOCIDSET;
 
-                FieldCache.Shorts values = FieldCache.DEFAULT.GetShorts(context.Reader, field, (FieldCache.IShortParser)parser, false);
+                FieldCache.Shorts values = FieldCache.DEFAULT.GetShorts((AtomicReader)context.Reader, field, (FieldCache.IShortParser)parser, false);
 
                 // we only request the usage of termDocs, if the range contains 0
                 return new AnonymousClassFieldCacheDocIdSet(values, inclusiveLowerPoint, inclusiveUpperPoint, context.Reader.MaxDoc, acceptDocs);
@@ -378,7 +378,7 @@ namespace Lucene.Net.Search
                     this.inclusiveUpperPoint = inclusiveUpperPoint;
                 }
 
-                internal override bool MatchDoc(int doc)
+                protected override bool MatchDoc(int doc)
                 {
                     int value = values.Get(doc);
                     return value >= inclusiveLowerPoint && value <= inclusiveUpperPoint;
@@ -420,7 +420,7 @@ namespace Lucene.Net.Search
                 if (inclusiveLowerPoint > inclusiveUpperPoint)
                     return DocIdSet.EMPTY_DOCIDSET;
 
-                FieldCache.Ints values = FieldCache.DEFAULT.GetInts(context.Reader, field, (FieldCache.IIntParser)parser, false);
+                FieldCache.Ints values = FieldCache.DEFAULT.GetInts((AtomicReader)context.Reader, field, (FieldCache.IIntParser)parser, false);
                 // we only request the usage of termDocs, if the range contains 0
                 return new AnonymousClassFieldCacheDocIdSet(values, inclusiveLowerPoint, inclusiveUpperPoint, context.Reader.MaxDoc, acceptDocs);
             }
@@ -444,7 +444,7 @@ namespace Lucene.Net.Search
                     this.inclusiveUpperPoint = inclusiveUpperPoint;
                 }
 
-                internal override bool MatchDoc(int doc)
+                protected override bool MatchDoc(int doc)
                 {
                     long value = values.Get(doc);
                     return value >= inclusiveLowerPoint && value <= inclusiveUpperPoint;
@@ -486,7 +486,7 @@ namespace Lucene.Net.Search
                 if (inclusiveLowerPoint > inclusiveUpperPoint)
                     return DocIdSet.EMPTY_DOCIDSET;
 
-                FieldCache.Longs values = FieldCache.DEFAULT.GetLongs(context.Reader, field, (FieldCache.ILongParser)parser, false);
+                FieldCache.Longs values = FieldCache.DEFAULT.GetLongs((AtomicReader)context.Reader, field, (FieldCache.ILongParser)parser, false);
                 // we only request the usage of termDocs, if the range contains 0
                 return new AnonymousClassFieldCacheDocIdSet(values, inclusiveLowerPoint, inclusiveUpperPoint, context.Reader.MaxDoc, acceptDocs);
             }
@@ -510,7 +510,7 @@ namespace Lucene.Net.Search
                     this.inclusiveUpperPoint = inclusiveUpperPoint;
                 }
 
-                internal override bool MatchDoc(int doc)
+                protected override bool MatchDoc(int doc)
                 {
                     float value = values.Get(doc);
                     return value >= inclusiveLowerPoint && value <= inclusiveUpperPoint;
@@ -556,7 +556,7 @@ namespace Lucene.Net.Search
                 if (inclusiveLowerPoint > inclusiveUpperPoint)
                     return DocIdSet.EMPTY_DOCIDSET;
 
-                FieldCache.Floats values = FieldCache.DEFAULT.GetFloats(context.Reader, field, (FieldCache.IFloatParser)parser, false);
+                FieldCache.Floats values = FieldCache.DEFAULT.GetFloats((AtomicReader)context.Reader, field, (FieldCache.IFloatParser)parser, false);
 
                 // we only request the usage of termDocs, if the range contains 0
                 return new AnonymousClassFieldCacheDocIdSet(values, inclusiveLowerPoint, inclusiveUpperPoint, context.Reader.MaxDoc, acceptDocs);
@@ -581,7 +581,7 @@ namespace Lucene.Net.Search
                     this.inclusiveUpperPoint = inclusiveUpperPoint;
                 }
 
-                internal override bool MatchDoc(int doc)
+                protected override bool MatchDoc(int doc)
                 {
                     double value = values.Get(doc);
                     return value >= inclusiveLowerPoint && value <= inclusiveUpperPoint;
@@ -627,7 +627,7 @@ namespace Lucene.Net.Search
                 if (inclusiveLowerPoint > inclusiveUpperPoint)
                     return DocIdSet.EMPTY_DOCIDSET;
 
-                FieldCache.Doubles values = FieldCache.DEFAULT.GetDoubles(context.Reader, field, (FieldCache.IDoubleParser)parser, false);
+                FieldCache.Doubles values = FieldCache.DEFAULT.GetDoubles((AtomicReader)context.Reader, field, (FieldCache.IDoubleParser)parser, false);
 
                 // we only request the usage of termDocs, if the range contains 0
                 return new AnonymousClassFieldCacheDocIdSet(values, inclusiveLowerPoint, inclusiveUpperPoint, context.Reader.MaxDoc, acceptDocs);
@@ -779,7 +779,7 @@ namespace Lucene.Net.Search
         }
 
         /// <summary>This method is implemented for each data type </summary>
-        public abstract DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDocs);
+        public abstract override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDocs);
 
         public override string ToString()
         {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/80561f72/src/core/Search/FieldComparator.cs
----------------------------------------------------------------------
diff --git a/src/core/Search/FieldComparator.cs b/src/core/Search/FieldComparator.cs
index 7bdf4be..59ab91e 100644
--- a/src/core/Search/FieldComparator.cs
+++ b/src/core/Search/FieldComparator.cs
@@ -98,11 +98,11 @@ namespace Lucene.Net.Search
         /// </param>
         /// <returns> value in this slot upgraded to Comparable
         /// </returns>
-        public abstract override T Value(int slot);
+        public abstract override object Value(int slot);
 
         public T this[int slot]
         {
-            get { return Value(slot); }
+            get { return (T)Value(slot); }
         }
 
         public virtual int CompareValues(T first, T second)
@@ -298,7 +298,7 @@ namespace Lucene.Net.Search
                 this.bottom = values[bottom];
             }
 
-            public override sbyte Value(int slot)
+            public override object Value(int slot)
             {
                 return values[slot];
             }
@@ -378,7 +378,7 @@ namespace Lucene.Net.Search
                 this.bottom = values[bottom];
             }
 
-            public override double Value(int slot)
+            public override object Value(int slot)
             {
                 return values[slot];
             }
@@ -459,7 +459,7 @@ namespace Lucene.Net.Search
                 this.bottom = values[bottom];
             }
 
-            public override float Value(int slot)
+            public override object Value(int slot)
             {
                 return values[slot];
             }
@@ -539,7 +539,7 @@ namespace Lucene.Net.Search
                 this.bottom = values[bottom];
             }
 
-            public override short Value(int slot)
+            public override object Value(int slot)
             {
                 return values[slot];
             }
@@ -651,7 +651,7 @@ namespace Lucene.Net.Search
                 this.bottom = values[bottom];
             }
 
-            public override int Value(int slot)
+            public override object Value(int slot)
             {
                 return values[slot];
             }
@@ -770,7 +770,7 @@ namespace Lucene.Net.Search
                 this.bottom = values[bottom];
             }
 
-            public override long Value(int slot)
+            public override object Value(int slot)
             {
                 return values[slot];
             }
@@ -861,7 +861,7 @@ namespace Lucene.Net.Search
                 }
             }
 
-            public override float Value(int slot)
+            public override object Value(int slot)
             {
                 return scores[slot];
             }
@@ -925,7 +925,7 @@ namespace Lucene.Net.Search
                 this.bottom = docIDs[bottom];
             }
 
-            public override int Value(int slot)
+            public override object Value(int slot)
             {
                 return docIDs[slot];
             }
@@ -1048,7 +1048,7 @@ namespace Lucene.Net.Search
                     parent.SetBottom(slot);
                 }
 
-                public override BytesRef Value(int slot)
+                public override object Value(int slot)
                 {
                     return parent.Value(slot);
                 }
@@ -1185,7 +1185,7 @@ namespace Lucene.Net.Search
                 }
             }
 
-            public override BytesRef Value(int slot)
+            public override object Value(int slot)
             {
                 return values[slot];
             }
@@ -1263,7 +1263,7 @@ namespace Lucene.Net.Search
                 this.bottom = values[bottom];
             }
 
-            public override BytesRef Value(int slot)
+            public override object Value(int slot)
             {
                 return values[slot];
             }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/80561f72/src/core/Search/FieldValueHitQueue.cs
----------------------------------------------------------------------
diff --git a/src/core/Search/FieldValueHitQueue.cs b/src/core/Search/FieldValueHitQueue.cs
index 1fea37e..fc162ae 100644
--- a/src/core/Search/FieldValueHitQueue.cs
+++ b/src/core/Search/FieldValueHitQueue.cs
@@ -70,7 +70,7 @@ namespace Lucene.Net.Search
             /// <param name="hitA">ScoreDoc</param>
             /// <param name="hitB">ScoreDoc</param>
             /// <returns><c>true</c> if document <c>a</c> should be sorted after document <c>b</c>.</returns>
-            public override bool LessThan(Entry hitA, Entry hitB)
+            public override bool LessThan(T hitA, T hitB)
             {
                 Debug.Assert(hitA != hitB);
                 Debug.Assert(hitA.slot != hitB.slot);
@@ -84,6 +84,11 @@ namespace Lucene.Net.Search
                 // avoid random sort order that could lead to duplicates (bug #31241):
                 return hitA.Doc > hitB.Doc;
             }
+
+            public override bool LessThan(Entry a, Entry b)
+            {
+                return LessThan(a, b);
+            }
         }
 
         /// <summary> An implementation of <see cref="FieldValueHitQueue" /> which is optimized in case
@@ -106,7 +111,7 @@ namespace Lucene.Net.Search
                 }
             }
 
-            public override bool LessThan(Entry hitA, Entry hitB)
+            public override bool LessThan(T hitA, T hitB)
             {
                 Debug.Assert(hitA != hitB);
                 Debug.Assert(hitA.slot != hitB.slot);
@@ -125,6 +130,11 @@ namespace Lucene.Net.Search
                 // avoid random sort order that could lead to duplicates (bug #31241):
                 return hitA.Doc > hitB.Doc;
             }
+
+            public override bool LessThan(Entry a, Entry b)
+            {
+                return LessThan(a, b);
+            }
         }
 
 
@@ -211,7 +221,7 @@ namespace Lucene.Net.Search
         protected internal FieldComparator firstComparator;
         protected internal int[] reverseMul;
 
-        public abstract override bool LessThan(FieldValueHitQueue.Entry a, FieldValueHitQueue.Entry b);
+        public abstract bool LessThan(FieldValueHitQueue.Entry a, FieldValueHitQueue.Entry b);
 
         /// <summary> Given a queue Entry, creates a corresponding FieldDoc
         /// that contains the values used to sort the given document.

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/80561f72/src/core/Search/NumericRangeQuery.cs
----------------------------------------------------------------------
diff --git a/src/core/Search/NumericRangeQuery.cs b/src/core/Search/NumericRangeQuery.cs
index a5269bb..1b4e90c 100644
--- a/src/core/Search/NumericRangeQuery.cs
+++ b/src/core/Search/NumericRangeQuery.cs
@@ -171,7 +171,7 @@ namespace Lucene.Net.Search
             this.maxInclusive = maxInclusive;
         }
 
-        protected override TermsEnum GetTermsEnum(Terms terms, Util.AttributeSource atts)
+        protected internal override TermsEnum GetTermsEnum(Terms terms, Util.AttributeSource atts)
         {
             if (min.HasValue && max.HasValue && (min.Value).CompareTo(max.Value) > 0)
             {
@@ -181,7 +181,7 @@ namespace Lucene.Net.Search
         }
 
 	    /// <summary>Returns the field name for this query </summary>
-	    public string Field
+	    public override string Field
 	    {
 	        get { return field; }
 	    }
@@ -258,7 +258,7 @@ namespace Lucene.Net.Search
         [System.Runtime.Serialization.OnDeserialized]
         internal void OnDeserialized(System.Runtime.Serialization.StreamingContext context)
         {
-            field = StringHelper.Intern(field);
+            field = string.Intern(field);
         }
 		
 		// members (package private, to be also fast accessible by NumericRangeTermsEnum)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/80561f72/src/core/Search/Payloads/PayloadNearQuery.cs
----------------------------------------------------------------------
diff --git a/src/core/Search/Payloads/PayloadNearQuery.cs b/src/core/Search/Payloads/PayloadNearQuery.cs
index 08b201c..ddef9cb 100644
--- a/src/core/Search/Payloads/PayloadNearQuery.cs
+++ b/src/core/Search/Payloads/PayloadNearQuery.cs
@@ -8,232 +8,237 @@ using Lucene.Net.Search.Similarities;
 using Lucene.Net.Search.Spans;
 using Lucene.Net.Util;
 
-public class PayloadNearQuery : SpanNearQuery
+namespace Lucene.Net.Search.Payloads
 {
-    protected string fieldName;
-    protected PayloadFunction function;
-
-    public PayloadNearQuery(SpanQuery[] clauses, int slop, bool inOrder)
-        : this(clauses, slop, inOrder, new AveragePayloadFunction())
+    public class PayloadNearQuery : SpanNearQuery
     {
-    }
-
-    public PayloadNearQuery(SpanQuery[] clauses, int slop, bool inOrder,
-                            PayloadFunction function)
-        : base(clauses, slop, inOrder)
-    {
-        fieldName = clauses[0].Field; // all clauses must have same field
-        this.function = function;
-    }
+        protected string fieldName;
+        protected PayloadFunction function;
 
-    public override Weight CreateWeight(IndexSearcher searcher)
-    {
-        return new PayloadNearSpanWeight(this, searcher);
-    }
+        public PayloadNearQuery(SpanQuery[] clauses, int slop, bool inOrder)
+            : this(clauses, slop, inOrder, new AveragePayloadFunction())
+        {
+        }
 
-    public override PayloadNearQuery Clone()
-    {
-        int sz = clauses.Count;
-        var newClauses = new SpanQuery[sz];
+        public PayloadNearQuery(SpanQuery[] clauses, int slop, bool inOrder,
+                                PayloadFunction function)
+            : base(clauses, slop, inOrder)
+        {
+            fieldName = clauses[0].Field; // all clauses must have same field
+            this.function = function;
+        }
 
-        for (int i = 0; i < sz; i++)
+        public override Weight CreateWeight(IndexSearcher searcher)
         {
-            newClauses[i] = (SpanQuery) clauses.[i].clone();
+            return new PayloadNearSpanWeight(this, searcher);
         }
-        var boostingNearQuery = new PayloadNearQuery(newClauses, Slop,
-                                                     inOrder, function);
-        boostingNearQuery.Boost = Boost)
-        ;
-        return boostingNearQuery;
-    }
 
-    public override string ToString(string field)
-    {
-        var buffer = new StringBuilder();
-        buffer.Append("payloadNear([");
+        public override object Clone()
+        {
+            int sz = clauses.Count;
+            var newClauses = new SpanQuery[sz];
+
+            for (int i = 0; i < sz; i++)
+            {
+                newClauses[i] = (SpanQuery)clauses[i].Clone();
+            }
+            var boostingNearQuery = new PayloadNearQuery(newClauses, Slop,
+                                                         inOrder, function);
+            boostingNearQuery.Boost = Boost;
+            return boostingNearQuery;
+        }
 
-        IEnumerator<SpanQuery> i = clauses.GetEnumerator();
-        while (i.MoveNext())
+        public override string ToString(string field)
         {
-            SpanQuery clause = i.Current;
-            buffer.Append(clause.ToString(field));
-            if (i.hasNext())
+            var buffer = new StringBuilder();
+            buffer.Append("payloadNear([");
+
+            IEnumerator<SpanQuery> i = clauses.GetEnumerator();
+            bool hasCommaSpace = false;
+            while (i.MoveNext())
             {
+                SpanQuery clause = i.Current;
+                buffer.Append(clause.ToString(field));
                 buffer.Append(", ");
+                hasCommaSpace = true;
             }
-        }
-        buffer.Append("], ");
-        buffer.Append(Slop);
-        buffer.Append(", ");
-        buffer.Append(inOrder);
-        buffer.Append(")");
-        buffer.Append(ToStringUtils.Boost(Boost));
-        return buffer.ToString();
-    }
 
-    public override int GetHashCode()
-    {
-        int prime = 31;
-        int result = base.GetHashCode();
-        result = prime * result + ((fieldName == null) ? 0 : fieldName.GetHashCode());
-        result = prime * result + ((function == null) ? 0 : function.GetHashCode());
-        return result;
-    }
+            if (hasCommaSpace)
+                buffer.Remove(buffer.Length - 2, 2);
 
-    public override bool Equals(Object obj)
-    {
-        if (this == obj)
-            return true;
-        if (!base.Equals(obj))
-            return false;
-        if (GetType() != obj.GetType())
-            return false;
-        var other = (PayloadNearQuery)obj;
-        if (fieldName == null)
-        {
-            if (other.fieldName != null)
-                return false;
+            buffer.Append("], ");
+            buffer.Append(Slop);
+            buffer.Append(", ");
+            buffer.Append(inOrder);
+            buffer.Append(")");
+            buffer.Append(ToStringUtils.Boost(Boost));
+            return buffer.ToString();
         }
-        else if (!fieldName.Equals(other.fieldName))
-            return false;
-        if (function == null)
+
+        public override int GetHashCode()
         {
-            if (other.function != null)
-                return false;
+            int prime = 31;
+            int result = base.GetHashCode();
+            result = prime * result + ((fieldName == null) ? 0 : fieldName.GetHashCode());
+            result = prime * result + ((function == null) ? 0 : function.GetHashCode());
+            return result;
         }
-        else if (!function.Equals(other.function))
-            return false;
-        return true;
-    }
 
-    public class PayloadNearSpanScorer : SpanScorer
-    {
-        private readonly BytesRef scratch = new BytesRef();
-        protected float payloadScore;
-        private int payloadsSeen;
-        private Spans spans;
-
-        protected PayloadNearSpanScorer(Spans spans, Weight weight,
-                                        Similarity similarity, Similarity.SloppySimScorer docScorer)
-            : base(spans, weight, docScorer)
+        public override bool Equals(Object obj)
         {
-            this.spans = spans;
+            if (this == obj)
+                return true;
+            if (!base.Equals(obj))
+                return false;
+            if (GetType() != obj.GetType())
+                return false;
+            var other = (PayloadNearQuery)obj;
+            if (fieldName == null)
+            {
+                if (other.fieldName != null)
+                    return false;
+            }
+            else if (!fieldName.Equals(other.fieldName))
+                return false;
+            if (function == null)
+            {
+                if (other.function != null)
+                    return false;
+            }
+            else if (!function.Equals(other.function))
+                return false;
+            return true;
         }
 
-        // Get the payloads associated with all underlying subspans
-        public void GetPayloads(Spans[] subSpans)
+        public class PayloadNearSpanScorer : SpanScorer
         {
-            for (int i = 0; i < subSpans.Length; i++)
+            private readonly BytesRef scratch = new BytesRef();
+            protected float payloadScore;
+            internal int payloadsSeen;
+            private SpansBase spans;
+
+            protected PayloadNearSpanScorer(SpansBase spans, Weight weight,
+                                            Similarity similarity, Similarity.SloppySimScorer docScorer)
+                : base(spans, weight, docScorer)
+            {
+                this.spans = spans;
+            }
+
+            // Get the payloads associated with all underlying subspans
+            public void GetPayloads(SpansBase[] subSpans)
             {
-                if (subSpans[i] is NearSpansOrdered)
+                for (int i = 0; i < subSpans.Length; i++)
                 {
-                    if ((subSpans[i]).IsPayloadAvailable())
+                    if (subSpans[i] is NearSpansOrdered)
                     {
-                        ProcessPayloads((subSpans[i]).GetPayload(),
-                                        subSpans[i].Start, subSpans[i].End);
+                        if ((subSpans[i]).IsPayloadAvailable())
+                        {
+                            ProcessPayloads((subSpans[i]).GetPayload(),
+                                            subSpans[i].Start, subSpans[i].End);
+                        }
+                        GetPayloads(((NearSpansOrdered)subSpans[i]).GetSubSpans());
                     }
-                    GetPayloads(((NearSpansOrdered)subSpans[i]).GetSubSpans());
-                }
-                else if (subSpans[i] is NearSpansUnordered)
-                {
-                    if ((subSpans[i]).IsPayloadAvailable())
+                    else if (subSpans[i] is NearSpansUnordered)
                     {
-                        ProcessPayloads((subSpans[i]).GetPayload(),
-                                        subSpans[i].Start, subSpans[i].End);
+                        if ((subSpans[i]).IsPayloadAvailable())
+                        {
+                            ProcessPayloads((subSpans[i]).GetPayload(),
+                                            subSpans[i].Start, subSpans[i].End);
+                        }
+                        GetPayloads(((NearSpansUnordered)subSpans[i]).GetSubSpans());
                     }
-                    GetPayloads(((NearSpansUnordered)subSpans[i]).GetSubSpans());
                 }
             }
-        }
 
-        // TODO change the whole spans api to use bytesRef, or nuke spans
+            // TODO change the whole spans api to use bytesRef, or nuke spans
 
-        protected void ProcessPayloads(ICollection<sbyte[]> payLoads, int start, int end)
-        {
-            foreach (var thePayload in payLoads)
+            protected void ProcessPayloads(ICollection<sbyte[]> payLoads, int start, int end)
             {
-                scratch.bytes = thePayload;
-                scratch.offset = 0;
-                scratch.length = thePayload.Length;
-                payloadScore = function.CurrentScore(doc, fieldName, start, end,
-                                                     payloadsSeen, payloadScore, docScorer.ComputePayloadFactor(doc, spans.Start, spans.End, scratch));
-                ++payloadsSeen;
+                foreach (var thePayload in payLoads)
+                {
+                    scratch.bytes = thePayload;
+                    scratch.offset = 0;
+                    scratch.length = thePayload.Length;
+                    payloadScore = function.CurrentScore(doc, fieldName, start, end,
+                                                         payloadsSeen, payloadScore, docScorer.ComputePayloadFactor(doc, spans.Start, spans.End, scratch));
+                    ++payloadsSeen;
+                }
             }
-        }
 
-        protected override bool SetFreqCurrentDoc()
-        {
-            if (!more)
+            protected override bool SetFreqCurrentDoc()
             {
-                return false;
+                if (!more)
+                {
+                    return false;
+                }
+                doc = spans.Doc;
+                freq = 0.0f;
+                payloadScore = 0;
+                payloadsSeen = 0;
+                do
+                {
+                    int matchLength = spans.End - spans.Start;
+                    freq += docScorer.ComputeSlopFactor(matchLength);
+                    var spansArr = new Spans[1];
+                    spansArr[0] = spans;
+                    GetPayloads(spansArr);
+                    more = spans.Next();
+                } while (more && (doc == spans.Doc));
+                return true;
             }
-            doc = spans.Doc;
-            freq = 0.0f;
-            payloadScore = 0;
-            payloadsSeen = 0;
-            do
-            {
-                int matchLength = spans.End - spans.Start;
-                freq += docScorer.ComputeSlopFactor(matchLength);
-                var spansArr = new Spans[1];
-                spansArr[0] = spans;
-                GetPayloads(spansArr);
-                more = spans.Next();
-            } while (more && (doc == spans.Doc));
-            return true;
-        }
 
-        public float Score()
-        {
-            return base.Score()
-                   * function.DocScore(doc, fieldName, payloadsSeen, payloadScore);
+            public float Score()
+            {
+                return base.Score()
+                       * function.DocScore(doc, fieldName, payloadsSeen, payloadScore);
+            }
         }
-    }
 
-    public class PayloadNearSpanWeight : SpanWeight
-    {
-        public PayloadNearSpanWeight(SpanQuery query, IndexSearcher searcher)
-            : base(query, searcher)
+        public class PayloadNearSpanWeight : SpanWeight
         {
-        }
+            public PayloadNearSpanWeight(SpanQuery query, IndexSearcher searcher)
+                : base(query, searcher)
+            {
+            }
 
-        public override Scorer Scorer(AtomicReaderContext context, bool scoreDocsInOrder,
-                                      bool topScorer, IBits acceptDocs)
-        {
-            return new PayloadNearSpanScorer(query.GetSpans(context, acceptDocs, termContexts), this,
-                                             similarity, similarity.GetSloppySimScorer(stats, context));
-        }
+            public override Scorer Scorer(AtomicReaderContext context, bool scoreDocsInOrder,
+                                          bool topScorer, IBits acceptDocs)
+            {
+                return new PayloadNearSpanScorer(query.GetSpans(context, acceptDocs, termContexts), this,
+                                                 similarity, similarity.GetSloppySimScorer(stats, context));
+            }
 
-        public override Explanation Explain(AtomicReaderContext context, int doc)
-        {
-            var scorer = (PayloadNearSpanScorer)Scorer(context, true, false, context.Reader.LiveDocs);
-            if (scorer != null)
+            public override Explanation Explain(AtomicReaderContext context, int doc)
             {
-                int newDoc = scorer.Advance(doc);
-                if (newDoc == doc)
+                var scorer = (PayloadNearSpanScorer)Scorer(context, true, false, ((AtomicReader)context.Reader).LiveDocs);
+                if (scorer != null)
                 {
-                    float freq = scorer.freq();
-                    Similarity.SloppySimScorer docScorer = similarity.GetSloppySimScorer(stats, context);
-                    var expl = new Explanation();
-                    expl.Description = "weight(" + Query + " in " + doc + ") [" + similarity.GetType().Name +
-                                       "], result of:";
-                    Explanation scoreExplanation = docScorer.Explain(doc, new Explanation(freq, "phraseFreq=" + freq));
-                    expl.AddDetail(scoreExplanation);
-                    expl.Value = scoreExplanation.Value;
-                    String field = ((SpanQuery)Query).Field;
-                    // now the payloads part
-                    Explanation payloadExpl = function.Explain(doc, field, scorer.payloadsSeen, scorer.payloadScore);
-                    // combined
-                    var result = new ComplexExplanation();
-                    result.AddDetail(expl);
-                    result.AddDetail(payloadExpl);
-                    result.Value = expl.Value * payloadExpl.Value;
-                    result.Description = "PayloadNearQuery, product of:";
-                    return result;
+                    int newDoc = scorer.Advance(doc);
+                    if (newDoc == doc)
+                    {
+                        float freq = scorer.Freq;
+                        Similarity.SloppySimScorer docScorer = similarity.GetSloppySimScorer(stats, context);
+                        var expl = new Explanation();
+                        expl.Description = "weight(" + Query + " in " + doc + ") [" + similarity.GetType().Name +
+                                           "], result of:";
+                        Explanation scoreExplanation = docScorer.Explain(doc, new Explanation(freq, "phraseFreq=" + freq));
+                        expl.AddDetail(scoreExplanation);
+                        expl.Value = scoreExplanation.Value;
+                        String field = ((SpanQuery)Query).Field;
+                        // now the payloads part
+                        Explanation payloadExpl = function.Explain(doc, field, scorer.payloadsSeen, scorer.payloadScore);
+                        // combined
+                        var result = new ComplexExplanation();
+                        result.AddDetail(expl);
+                        result.AddDetail(payloadExpl);
+                        result.Value = expl.Value * payloadExpl.Value;
+                        result.Description = "PayloadNearQuery, product of:";
+                        return result;
+                    }
                 }
-            }
 
-            return new ComplexExplanation(false, 0.0f, "no matching term");
+                return new ComplexExplanation(false, 0.0f, "no matching term");
+            }
         }
     }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/80561f72/src/core/Search/Payloads/PayloadSpanUtil.cs
----------------------------------------------------------------------
diff --git a/src/core/Search/Payloads/PayloadSpanUtil.cs b/src/core/Search/Payloads/PayloadSpanUtil.cs
index 097f110..11f5249 100644
--- a/src/core/Search/Payloads/PayloadSpanUtil.cs
+++ b/src/core/Search/Payloads/PayloadSpanUtil.cs
@@ -137,7 +137,7 @@ public class PayloadSpanUtil
     private void GetPayloads(ICollection<sbyte[]> payloads, SpanQuery query)
     {
         var termContexts = new HashMap<Term, TermContext>();
-        var terms = new TreeSet<Term>();
+        var terms = new SortedSet<Term>();
         query.ExtractTerms(terms);
         foreach (var term in terms)
         {
@@ -145,7 +145,7 @@ public class PayloadSpanUtil
         }
         foreach (AtomicReaderContext atomicReaderContext in context.Leaves)
         {
-            Spans spans = query.GetSpans(atomicReaderContext, atomicReaderContext.Reader.LiveDocs, termContexts);
+            SpansBase spans = query.GetSpans(atomicReaderContext, atomicReaderContext.Reader.LiveDocs, termContexts);
             while (spans.Next())
             {
                 if (spans.IsPayloadAvailable())

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/80561f72/src/core/Search/PhraseQuery.cs
----------------------------------------------------------------------
diff --git a/src/core/Search/PhraseQuery.cs b/src/core/Search/PhraseQuery.cs
index 6e9f641..5e1340f 100644
--- a/src/core/Search/PhraseQuery.cs
+++ b/src/core/Search/PhraseQuery.cs
@@ -260,9 +260,12 @@ namespace Lucene.Net.Search
                 get { return parent; }
             }
 
-            public override float GetValueForNormalization()
+            public override float ValueForNormalization
             {
-                return stats.GetValueForNormalization();
+                get
+                {
+                    return stats.ValueForNormalization;
+                }
             }
 
             public override void Normalize(float queryNorm, float topLevelBoost)
@@ -338,7 +341,7 @@ namespace Lucene.Net.Search
 
             public override Explanation Explain(AtomicReaderContext context, int doc)
             {
-                var scorer = Scorer(context, true, false, context.Reader.LiveDocs);
+                var scorer = Scorer(context, true, false, ((AtomicReader)context.Reader).LiveDocs);
                 if (scorer != null)
                 {
                     var newDoc = scorer.Advance(doc);

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/80561f72/src/core/Search/PositiveScoresOnlyCollector.cs
----------------------------------------------------------------------
diff --git a/src/core/Search/PositiveScoresOnlyCollector.cs b/src/core/Search/PositiveScoresOnlyCollector.cs
index ac91d21..a490a08 100644
--- a/src/core/Search/PositiveScoresOnlyCollector.cs
+++ b/src/core/Search/PositiveScoresOnlyCollector.cs
@@ -20,46 +20,46 @@ using Lucene.Net.Index;
 
 namespace Lucene.Net.Search
 {
-	
-	/// <summary> A <see cref="Collector" /> implementation which wraps another
-	/// <see cref="Collector" /> and makes sure only documents with
-	/// scores &gt; 0 are collected.
-	/// </summary>
-	public class PositiveScoresOnlyCollector : Collector
-	{
-		
-		private readonly Collector c;
-		private Scorer scorer;
-		
-		public PositiveScoresOnlyCollector(Collector c)
-		{
-			this.c = c;
-		}
-		
-		public override void  Collect(int doc)
-		{
-			if (scorer.Score() > 0)
-			{
-				c.Collect(doc);
-			}
-		}
-		
-		public override void  SetNextReader(AtomicReaderContext context, int docBase)
-		{
-			c.SetNextReader(context);
-		}
-		
-		public override void  SetScorer(Scorer scorer)
-		{
-			// Set a ScoreCachingWrappingScorer in case the wrapped Collector will call
-			// score() also.
-			this.scorer = new ScoreCachingWrappingScorer(scorer);
-			c.SetScorer(this.scorer);
-		}
 
-	    public override bool AcceptsDocsOutOfOrder
-	    {
-	        get { return c.AcceptsDocsOutOfOrder; }
-	    }
-	}
+    /// <summary> A <see cref="Collector" /> implementation which wraps another
+    /// <see cref="Collector" /> and makes sure only documents with
+    /// scores &gt; 0 are collected.
+    /// </summary>
+    public class PositiveScoresOnlyCollector : Collector
+    {
+
+        private readonly Collector c;
+        private Scorer scorer;
+
+        public PositiveScoresOnlyCollector(Collector c)
+        {
+            this.c = c;
+        }
+
+        public override void Collect(int doc)
+        {
+            if (scorer.Score() > 0)
+            {
+                c.Collect(doc);
+            }
+        }
+
+        public override void SetNextReader(AtomicReaderContext context)
+        {
+            c.SetNextReader(context);
+        }
+
+        public override void SetScorer(Scorer scorer)
+        {
+            // Set a ScoreCachingWrappingScorer in case the wrapped Collector will call
+            // score() also.
+            this.scorer = new ScoreCachingWrappingScorer(scorer);
+            c.SetScorer(this.scorer);
+        }
+
+        public override bool AcceptsDocsOutOfOrder
+        {
+            get { return c.AcceptsDocsOutOfOrder; }
+        }
+    }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/80561f72/src/core/Search/PrefixQuery.cs
----------------------------------------------------------------------
diff --git a/src/core/Search/PrefixQuery.cs b/src/core/Search/PrefixQuery.cs
index 84bb767..108a2cc 100644
--- a/src/core/Search/PrefixQuery.cs
+++ b/src/core/Search/PrefixQuery.cs
@@ -48,7 +48,7 @@ namespace Lucene.Net.Search
 	        get { return prefix; }
 	    }
 
-        protected override TermsEnum GetTermsEnum(Terms terms, AttributeSource atts)
+        protected internal override TermsEnum GetTermsEnum(Terms terms, AttributeSource atts)
         {
             var tenum = terms.Iterator(null);
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/80561f72/src/core/Search/PrefixTermsEnum.cs
----------------------------------------------------------------------
diff --git a/src/core/Search/PrefixTermsEnum.cs b/src/core/Search/PrefixTermsEnum.cs
index c6d311b..854b992 100644
--- a/src/core/Search/PrefixTermsEnum.cs
+++ b/src/core/Search/PrefixTermsEnum.cs
@@ -38,7 +38,7 @@ namespace Lucene.Net.Search
             SetInitialSeekTerm(this.prefixRef = prefixText);
         }
 
-        protected override AcceptStatus accept(BytesRef term)
+        protected override AcceptStatus Accept(BytesRef term)
         {
             if (StringHelper.StartsWith(term, prefixRef))
             {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/80561f72/src/core/Search/ReqExclScorer.cs
----------------------------------------------------------------------
diff --git a/src/core/Search/ReqExclScorer.cs b/src/core/Search/ReqExclScorer.cs
index 2c5fa20..4c8fe5e 100644
--- a/src/core/Search/ReqExclScorer.cs
+++ b/src/core/Search/ReqExclScorer.cs
@@ -120,14 +120,20 @@ namespace Lucene.Net.Search
 			return reqScorer.Score(); // reqScorer may be null when next() or skipTo() already return false
 		}
 		
-        public override int Freq()
+        public override int Freq
         {
-            return reqScorer.Freq();
+            get
+            {
+                return reqScorer.Freq();
+            }
         }
 
-        public override ICollection<ChildScorer> GetChildren()
+        public override ICollection<ChildScorer> Children
         {
-            return new Collection<ChildScorer>(new [] {new ChildScorer(reqScorer, "FILTERED") } );
+            get
+            {
+                return new Collection<ChildScorer>(new[] { new ChildScorer(reqScorer, "FILTERED") });
+            }
         }  
 
 		public override int Advance(int target)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/80561f72/src/core/Search/ReqOptSumScorer.cs
----------------------------------------------------------------------
diff --git a/src/core/Search/ReqOptSumScorer.cs b/src/core/Search/ReqOptSumScorer.cs
index 9815bdb..632e90c 100644
--- a/src/core/Search/ReqOptSumScorer.cs
+++ b/src/core/Search/ReqOptSumScorer.cs
@@ -19,75 +19,79 @@ using System.Collections.Generic;
 
 namespace Lucene.Net.Search
 {
-	
-	/// <summary>A Scorer for queries with a required part and an optional part.
-	/// Delays skipTo() on the optional part until a score() is needed.
-	/// <br/>
-	/// This <c>Scorer</c> implements <see cref="DocIdSetIterator.Advance(int)" />.
-	/// </summary>
-	class ReqOptSumScorer:Scorer
-	{
-		/// <summary>The scorers passed from the constructor.
-		/// These are set to null as soon as their next() or skipTo() returns false.
-		/// </summary>
-		private Scorer reqScorer;
-		private Scorer optScorer;
-		
-		/// <summary>Construct a <c>ReqOptScorer</c>.</summary>
-		/// <param name="reqScorer">The required scorer. This must match.
-		/// </param>
-		/// <param name="optScorer">The optional scorer. This is used for scoring only.
-		/// </param>
-		public ReqOptSumScorer(Scorer reqScorer, Scorer optScorer):base(reqScorer.Weight)
-		{ // No similarity used.
-			this.reqScorer = reqScorer;
-			this.optScorer = optScorer;
-		}
-		
-		public override int NextDoc()
-		{
-			return reqScorer.NextDoc();
-		}
-		
-		public override int Advance(int target)
-		{
-			return reqScorer.Advance(target);
-		}
-		
-		public override int DocID
-		{
-		    get { return reqScorer.DocID; }
-		}
-		
-		/// <summary>Returns the score of the current document matching the query.
-		/// Initially invalid, until <see cref="NextDoc()" /> is called the first time.
-		/// </summary>
-		/// <returns> The score of the required scorer, eventually increased by the score
-		/// of the optional scorer when it also matches the current document.
-		/// </returns>
-		public override float Score()
-		{
-			int curDoc = reqScorer.DocID;
-			float reqScore = reqScorer.Score();
-			if (optScorer == null)
-			{
-				return reqScore;
-			}
-			
-			int optScorerDoc = optScorer.DocID;
-			if (optScorerDoc < curDoc && (optScorerDoc = optScorer.Advance(curDoc)) == NO_MORE_DOCS)
-			{
-				optScorer = null;
-				return reqScore;
-			}
-			
-			return optScorerDoc == curDoc?reqScore + optScorer.Score():reqScore;
-		}
 
-        public override int Freq()
+    /// <summary>A Scorer for queries with a required part and an optional part.
+    /// Delays skipTo() on the optional part until a score() is needed.
+    /// <br/>
+    /// This <c>Scorer</c> implements <see cref="DocIdSetIterator.Advance(int)" />.
+    /// </summary>
+    class ReqOptSumScorer : Scorer
+    {
+        /// <summary>The scorers passed from the constructor.
+        /// These are set to null as soon as their next() or skipTo() returns false.
+        /// </summary>
+        private Scorer reqScorer;
+        private Scorer optScorer;
+
+        /// <summary>Construct a <c>ReqOptScorer</c>.</summary>
+        /// <param name="reqScorer">The required scorer. This must match.
+        /// </param>
+        /// <param name="optScorer">The optional scorer. This is used for scoring only.
+        /// </param>
+        public ReqOptSumScorer(Scorer reqScorer, Scorer optScorer)
+            : base(reqScorer.Weight)
+        { // No similarity used.
+            this.reqScorer = reqScorer;
+            this.optScorer = optScorer;
+        }
+
+        public override int NextDoc()
+        {
+            return reqScorer.NextDoc();
+        }
+
+        public override int Advance(int target)
+        {
+            return reqScorer.Advance(target);
+        }
+
+        public override int DocID
+        {
+            get { return reqScorer.DocID; }
+        }
+
+        /// <summary>Returns the score of the current document matching the query.
+        /// Initially invalid, until <see cref="NextDoc()" /> is called the first time.
+        /// </summary>
+        /// <returns> The score of the required scorer, eventually increased by the score
+        /// of the optional scorer when it also matches the current document.
+        /// </returns>
+        public override float Score()
+        {
+            int curDoc = reqScorer.DocID;
+            float reqScore = reqScorer.Score();
+            if (optScorer == null)
+            {
+                return reqScore;
+            }
+
+            int optScorerDoc = optScorer.DocID;
+            if (optScorerDoc < curDoc && (optScorerDoc = optScorer.Advance(curDoc)) == NO_MORE_DOCS)
+            {
+                optScorer = null;
+                return reqScore;
+            }
+
+            return optScorerDoc == curDoc ? reqScore + optScorer.Score() : reqScore;
+        }
+
+        public override int Freq
         {
-            Score();
-            return (optScorer != null && optScorer.DocID == reqScorer.DocID) ? 2 : 1;
+            get
+            {
+                Score();
+                return (optScorer != null && optScorer.DocID == reqScorer.DocID) ? 2 : 1;
+            }
         }
 
         public ICollection<ChildScorer> GetChildren()
@@ -102,5 +106,5 @@ namespace Lucene.Net.Search
         {
             get { return reqScorer.Cost; }
         }
-	}
+    }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/80561f72/src/core/Search/ScoreCachingWrappingScorer.cs
----------------------------------------------------------------------
diff --git a/src/core/Search/ScoreCachingWrappingScorer.cs b/src/core/Search/ScoreCachingWrappingScorer.cs
index 6d9bfd6..aecfa92 100644
--- a/src/core/Search/ScoreCachingWrappingScorer.cs
+++ b/src/core/Search/ScoreCachingWrappingScorer.cs
@@ -61,9 +61,12 @@ namespace Lucene.Net.Search
 			return curScore;
 		}
 		
-        public override int Freq()
+        public override int Freq
         {
-            return scorer.Freq();
+            get
+            {
+                return scorer.Freq();
+            }
         }
 
 		public override int DocID
@@ -86,11 +89,14 @@ namespace Lucene.Net.Search
 			return scorer.Advance(target);
 		}
 
-        public override ICollection<ChildScorer> GetChildren()
+        public override ICollection<ChildScorer> Children
         {
-            var list = new List<ChildScorer>(1);
-            list.Add(new ChildScorer(scorer, "CACHED"));
-            return list;
+            get
+            {
+                var list = new List<ChildScorer>(1);
+                list.Add(new ChildScorer(scorer, "CACHED"));
+                return list;
+            }
         }
 
 	    public override long Cost

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/80561f72/src/core/Search/ScoringRewrite.cs
----------------------------------------------------------------------
diff --git a/src/core/Search/ScoringRewrite.cs b/src/core/Search/ScoringRewrite.cs
index 4f7f1a8..ae5c45a 100644
--- a/src/core/Search/ScoringRewrite.cs
+++ b/src/core/Search/ScoringRewrite.cs
@@ -11,9 +11,12 @@ namespace Lucene.Net.Search
 
         private class AnonymounsScoringBooleanQueryRewrite : ScoringRewrite<BooleanQuery>
         {
-            protected override BooleanQuery GetTopLevelQuery()
+            protected override BooleanQuery TopLevelQuery
             {
-                return new BooleanQuery(true);
+                get
+                {
+                    return new BooleanQuery(true);
+                }
             }
 
             protected override void AddClause(BooleanQuery topLevel, Term term, int docCount,

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/80561f72/src/core/Search/Similarities/BM25Similarity.cs
----------------------------------------------------------------------
diff --git a/src/core/Search/Similarities/BM25Similarity.cs b/src/core/Search/Similarities/BM25Similarity.cs
index bb4267b..7cfd013 100644
--- a/src/core/Search/Similarities/BM25Similarity.cs
+++ b/src/core/Search/Similarities/BM25Similarity.cs
@@ -15,8 +15,8 @@ namespace Lucene.Net.Search.Similarities
         {
             for (int i = 0; i < 256; i++)
             {
-                float f = SmallFloat.Byte315ToFloat((sbyte) i);
-                NORM_TABLE[i] = 1.0f/(f*f);
+                float f = SmallFloat.Byte315ToFloat((sbyte)i);
+                NORM_TABLE[i] = 1.0f / (f * f);
             }
         }
 
@@ -50,12 +50,12 @@ namespace Lucene.Net.Search.Similarities
 
         protected virtual float Idf(long docFreq, long numDocs)
         {
-            return (float) Math.Log(1 + (numDocs - docFreq + 0.5D)/(docFreq + 0.5D));
+            return (float)Math.Log(1 + (numDocs - docFreq + 0.5D) / (docFreq + 0.5D));
         }
 
         protected virtual float SloppyFreq(int distance)
         {
-            return 1.0f/(distance + 1);
+            return 1.0f / (distance + 1);
         }
 
         protected virtual float ScorePayload(int doc, int start, int end, BytesRef payload)
@@ -69,12 +69,12 @@ namespace Lucene.Net.Search.Similarities
             if (sumTotalTermFreq <= 0)
                 return 1f;
             else
-                return (float) (sumTotalTermFreq/(double) collectionStats.MaxDoc);
+                return (float)(sumTotalTermFreq / (double)collectionStats.MaxDoc);
         }
 
         protected virtual sbyte EncodeNormValue(float boost, int fieldLength)
         {
-            return SmallFloat.FloatToByte315(boost/(float) Math.Sqrt(fieldLength));
+            return SmallFloat.FloatToByte315(boost / (float)Math.Sqrt(fieldLength));
         }
 
         protected virtual float DecodeNormValue(sbyte b)
@@ -125,15 +125,15 @@ namespace Lucene.Net.Search.Similarities
             var cache = new float[256];
             for (int i = 0; i < cache.Length; i++)
             {
-                cache[i] = k1*((1 - b) + b*DecodeNormValue((sbyte) i)/avgdl);
+                cache[i] = k1 * ((1 - b) + b * DecodeNormValue((sbyte)i) / avgdl);
             }
             return new BM25Stats(collectionStats.Field, idf, queryBoost, avgdl, cache);
         }
 
         public override sealed ExactSimScorer GetExactSimScorer(SimWeight stats, AtomicReaderContext context)
         {
-            var bm25stats = (BM25Stats) stats;
-            NumericDocValues norms = context.Reader.GetNormValues(bm25stats.Field);
+            var bm25stats = (BM25Stats)stats;
+            NumericDocValues norms = ((AtomicReader)context.Reader).GetNormValues(bm25stats.Field);
             return norms == null
                        ? new ExactBM25DocScorerNoNorms(bm25stats, this)
                        : new ExactBM25DocScorer(bm25stats, norms, this) as ExactSimScorer;
@@ -141,38 +141,38 @@ namespace Lucene.Net.Search.Similarities
 
         public override sealed SloppySimScorer GetSloppySimScorer(SimWeight stats, AtomicReaderContext context)
         {
-            var bm25stats = (BM25Stats) stats;
-            return new SloppyBM25DocScorer(bm25stats, context.Reader.GetNormValues(bm25stats.Field), this);
+            var bm25stats = (BM25Stats)stats;
+            return new SloppyBM25DocScorer(bm25stats, ((AtomicReader)context.Reader).GetNormValues(bm25stats.Field), this);
         }
 
         private Explanation ExplainScore(int doc, Explanation freq, BM25Stats stats, NumericDocValues norms)
         {
-            var result = new Explanation {Description = "score(doc=" + doc + ",freq=" + freq + "), product of:"};
+            var result = new Explanation { Description = "score(doc=" + doc + ",freq=" + freq + "), product of:" };
 
-            var boostExpl = new Explanation(stats.QueryBoost*stats.TopLevelBoost, "boost");
+            var boostExpl = new Explanation(stats.QueryBoost * stats.TopLevelBoost, "boost");
             if (boostExpl.Value != 1.0f)
                 result.AddDetail(boostExpl);
 
             result.AddDetail(stats.Idf);
 
-            var tfNormExpl = new Explanation {Description = "tfNorm, computed from:"};
+            var tfNormExpl = new Explanation { Description = "tfNorm, computed from:" };
             tfNormExpl.AddDetail(freq);
             tfNormExpl.AddDetail(new Explanation(k1, "parameter k1"));
             if (norms == null)
             {
                 tfNormExpl.AddDetail(new Explanation(0, "parameter b (norms omitted for field)"));
-                tfNormExpl.Value = (freq.Value*(k1 + 1))/(freq.Value + k1);
+                tfNormExpl.Value = (freq.Value * (k1 + 1)) / (freq.Value + k1);
             }
             else
             {
-                float doclen = DecodeNormValue((sbyte) norms.Get(doc));
+                float doclen = DecodeNormValue((sbyte)norms.Get(doc));
                 tfNormExpl.AddDetail(new Explanation(b, "parameter b"));
                 tfNormExpl.AddDetail(new Explanation(stats.Avgdl, "avgFieldLength"));
                 tfNormExpl.AddDetail(new Explanation(doclen, "fieldLength"));
-                tfNormExpl.Value = (freq.Value*(k1 + 1))/(freq.Value + k1*(1 - b + b*doclen/stats.Avgdl));
+                tfNormExpl.Value = (freq.Value * (k1 + 1)) / (freq.Value + k1 * (1 - b + b * doclen / stats.Avgdl));
             }
             result.AddDetail(tfNormExpl);
-            result.Value = boostExpl.Value*stats.Idf.Value*tfNormExpl.Value;
+            result.Value = boostExpl.Value * stats.Idf.Value * tfNormExpl.Value;
             return result;
         }
 
@@ -248,18 +248,21 @@ namespace Lucene.Net.Search.Similarities
                 get { return cache; }
             }
 
-            public override float GetValueForNormalization()
+            public override float ValueForNormalization
             {
-                // we return a TF-IDF like normalization to be nice, but we don't actually normalize ourselves.
-                float queryWeight = idf.Value*queryBoost;
-                return queryWeight*queryWeight;
+                get
+                {
+                    // we return a TF-IDF like normalization to be nice, but we don't actually normalize ourselves.
+                    float queryWeight = idf.Value * queryBoost;
+                    return queryWeight * queryWeight;
+                }
             }
 
             public override void Normalize(float queryNorm, float topLevelBoost)
             {
                 // we don't normalize with queryNorm at all, we just capture the top-level boost
                 this.topLevelBoost = topLevelBoost;
-                weight = idf.Value*queryBoost*topLevelBoost;
+                weight = idf.Value * queryBoost * topLevelBoost;
             }
         }
 
@@ -275,7 +278,7 @@ namespace Lucene.Net.Search.Similarities
             {
                 //assert norms != null;
                 this.stats = stats;
-                weightValue = stats.Weight*(parent.k1 + 1); // boost * idf * (k1 + 1)
+                weightValue = stats.Weight * (parent.k1 + 1); // boost * idf * (k1 + 1)
                 cache = stats.Cache;
                 this.norms = norms;
                 this.parent = parent;
@@ -283,7 +286,7 @@ namespace Lucene.Net.Search.Similarities
 
             public override float Score(int doc, int freq)
             {
-                return weightValue*freq/(freq + cache[(byte) norms.Get(doc) & 0xFF]);
+                return weightValue * freq / (freq + cache[(byte)norms.Get(doc) & 0xFF]);
             }
 
             public override Explanation Explain(int doc, Explanation freq)
@@ -303,9 +306,9 @@ namespace Lucene.Net.Search.Similarities
             public ExactBM25DocScorerNoNorms(BM25Stats stats, BM25Similarity parent)
             {
                 this.stats = stats;
-                weightValue = stats.Weight*(parent.k1 + 1); // boost * idf * (k1 + 1)
+                weightValue = stats.Weight * (parent.k1 + 1); // boost * idf * (k1 + 1)
                 for (int i = 0; i < SCORE_CACHE_SIZE; i++)
-                    scoreCache[i] = weightValue*i/(i + parent.k1);
+                    scoreCache[i] = weightValue * i / (i + parent.k1);
                 this.parent = parent;
             }
 
@@ -314,7 +317,7 @@ namespace Lucene.Net.Search.Similarities
                 // TODO: maybe score cache is more trouble than its worth?
                 return freq < SCORE_CACHE_SIZE // check cache
                            ? scoreCache[freq] // cache hit
-                           : weightValue*freq/(freq + parent.k1); // cache miss
+                           : weightValue * freq / (freq + parent.k1); // cache miss
             }
 
             public override Explanation Explain(int doc, Explanation freq)
@@ -334,7 +337,7 @@ namespace Lucene.Net.Search.Similarities
             public SloppyBM25DocScorer(BM25Stats stats, NumericDocValues norms, BM25Similarity parent)
             {
                 this.stats = stats;
-                weightValue = stats.Weight*(parent.k1 + 1);
+                weightValue = stats.Weight * (parent.k1 + 1);
                 cache = stats.Cache;
                 this.norms = norms;
                 this.parent = parent;
@@ -343,8 +346,8 @@ namespace Lucene.Net.Search.Similarities
             public override float Score(int doc, float freq)
             {
                 // if there are no norms, we act as if b=0
-                float norm = norms == null ? parent.k1 : cache[(byte) norms.Get(doc) & 0xFF];
-                return weightValue*freq/(freq + norm);
+                float norm = norms == null ? parent.k1 : cache[(byte)norms.Get(doc) & 0xFF];
+                return weightValue * freq / (freq + norm);
             }
 
             public override Explanation Explain(int doc, Explanation freq)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/80561f72/src/core/Search/Similarities/BasicStats.cs
----------------------------------------------------------------------
diff --git a/src/core/Search/Similarities/BasicStats.cs b/src/core/Search/Similarities/BasicStats.cs
index 4739c7a..9b2e6bc 100644
--- a/src/core/Search/Similarities/BasicStats.cs
+++ b/src/core/Search/Similarities/BasicStats.cs
@@ -37,9 +37,12 @@ namespace Lucene.Net.Search.Similarities
             get { return queryBoost; }
         }
 
-        public override float GetValueForNormalization()
+        public override float ValueForNormalization
         {
-            return RawNormalizationValue*RawNormalizationValue;
+            get
+            {
+                return RawNormalizationValue * RawNormalizationValue;
+            }
         }
 
         public override void Normalize(float queryNorm, float topLevelBoost)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/80561f72/src/core/Search/Similarities/MultiSimilarity.cs
----------------------------------------------------------------------
diff --git a/src/core/Search/Similarities/MultiSimilarity.cs b/src/core/Search/Similarities/MultiSimilarity.cs
index 4e7ef5a..9ec6c63 100644
--- a/src/core/Search/Similarities/MultiSimilarity.cs
+++ b/src/core/Search/Similarities/MultiSimilarity.cs
@@ -118,10 +118,13 @@ namespace Lucene.Net.Search.Similarities
                 this.subStats = subStats;
             }
 
-            public override float GetValueForNormalization()
+            public override float ValueForNormalization
             {
-                float sum = subStats.Sum(stat => stat.GetValueForNormalization());
-                return sum/subStats.Length;
+                get
+                {
+                    float sum = subStats.Sum(stat => stat.ValueForNormalization);
+                    return sum / subStats.Length;
+                }
             }
 
             public override void Normalize(float queryNorm, float topLevelBoost)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/80561f72/src/core/Search/Similarities/PerFieldSimilarityWrapper.cs
----------------------------------------------------------------------
diff --git a/src/core/Search/Similarities/PerFieldSimilarityWrapper.cs b/src/core/Search/Similarities/PerFieldSimilarityWrapper.cs
index 8caa8f9..35f85b5 100644
--- a/src/core/Search/Similarities/PerFieldSimilarityWrapper.cs
+++ b/src/core/Search/Similarities/PerFieldSimilarityWrapper.cs
@@ -37,9 +37,12 @@ namespace Lucene.Net.Search.Similarities
             internal Similarity Delegate;
             internal SimWeight DelegateWeight;
 
-            public override float GetValueForNormalization()
+            public override float ValueForNormalization
             {
-                return DelegateWeight.GetValueForNormalization();
+                get
+                {
+                    return DelegateWeight.ValueForNormalization;
+                }
             }
 
             public override void Normalize(float queryNorm, float topLevelBoost)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/80561f72/src/core/Search/Similarities/TFIDFSimilarity.cs
----------------------------------------------------------------------
diff --git a/src/core/Search/Similarities/TFIDFSimilarity.cs b/src/core/Search/Similarities/TFIDFSimilarity.cs
index 275c25f..232742a 100644
--- a/src/core/Search/Similarities/TFIDFSimilarity.cs
+++ b/src/core/Search/Similarities/TFIDFSimilarity.cs
@@ -227,10 +227,13 @@ namespace Lucene.Net.Search.Similarities
                 get { return value; }
             }
 
-            public override float GetValueForNormalization()
+            public override float ValueForNormalization
             {
-                // TODO: (sorta LUCENE-1907) make non-static class and expose this squaring via a nice method to subclasses?
-                return queryWeight*queryWeight; // sum of squared weights
+                get
+                {
+                    // TODO: (sorta LUCENE-1907) make non-static class and expose this squaring via a nice method to subclasses?
+                    return queryWeight * queryWeight; // sum of squared weights
+                }
             }
 
             public override void Normalize(float queryNorm, float topLevelBoost)