You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucenenet.apache.org by ni...@apache.org on 2017/02/05 16:51:36 UTC

[01/27] lucenenet git commit: Lucene.Net.Analysis.Miscellaneous.KeywordMarkerFilter refactor: IsKeyword > IsKeyword() (makes a conversion)

Repository: lucenenet
Updated Branches:
  refs/heads/api-work ab81d9131 -> f71dfb400


Lucene.Net.Analysis.Miscellaneous.KeywordMarkerFilter refactor: IsKeyword > IsKeyword() (makes a conversion)


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/2ae5a27e
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/2ae5a27e
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/2ae5a27e

Branch: refs/heads/api-work
Commit: 2ae5a27e4b087fd1f6a4e71c76cab2589e157c33
Parents: ab81d91
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Feb 5 14:45:23 2017 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Sun Feb 5 14:47:45 2017 +0700

----------------------------------------------------------------------
 .../Analysis/Miscellaneous/KeywordMarkerFilter.cs           | 4 ++--
 .../Analysis/Miscellaneous/PatternKeywordMarkerFilter.cs    | 9 +++------
 .../Analysis/Miscellaneous/SetKeywordMarkerFilter.cs        | 7 ++-----
 3 files changed, 7 insertions(+), 13 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/2ae5a27e/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/KeywordMarkerFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/KeywordMarkerFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/KeywordMarkerFilter.cs
index 83adbda..b5a8117 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/KeywordMarkerFilter.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/KeywordMarkerFilter.cs
@@ -40,7 +40,7 @@ namespace Lucene.Net.Analysis.Miscellaneous
         {
             if (m_input.IncrementToken())
             {
-                if (IsKeyword)
+                if (IsKeyword())
                 {
                     keywordAttr.IsKeyword = true;
                 }
@@ -52,6 +52,6 @@ namespace Lucene.Net.Analysis.Miscellaneous
             }
         }
 
-        protected abstract bool IsKeyword { get; } // LUCENENET TODO: Change to IsKeyword() ?
+        protected abstract bool IsKeyword();
     }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/2ae5a27e/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/PatternKeywordMarkerFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/PatternKeywordMarkerFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/PatternKeywordMarkerFilter.cs
index 10ea4a4..a9166dd 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/PatternKeywordMarkerFilter.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/PatternKeywordMarkerFilter.cs
@@ -50,13 +50,10 @@ namespace Lucene.Net.Analysis.Miscellaneous
             this.pattern = pattern;
         }
 
-        protected override bool IsKeyword
+        protected override bool IsKeyword()
         {
-            get
-            {
-                matcher = pattern.Match(termAtt.ToString()); 
-                return matcher.Success;
-            }
+            matcher = pattern.Match(termAtt.ToString()); 
+            return matcher.Success;
         }
     }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/2ae5a27e/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/SetKeywordMarkerFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/SetKeywordMarkerFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/SetKeywordMarkerFilter.cs
index 769de5e..ee13614 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/SetKeywordMarkerFilter.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/SetKeywordMarkerFilter.cs
@@ -46,12 +46,9 @@ namespace Lucene.Net.Analysis.Miscellaneous
             termAtt = AddAttribute<ICharTermAttribute>();
         }
 
-        protected override bool IsKeyword
+        protected override bool IsKeyword()
         {
-            get
-            {
-                return keywordSet.Contains(termAtt.Buffer, 0, termAtt.Length);
-            }
+            return keywordSet.Contains(termAtt.Buffer, 0, termAtt.Length);
         }
     }
 }
\ No newline at end of file


[11/27] lucenenet git commit: Lucene.Net.Sandbox: fix documentation comment formatting problems

Posted by ni...@apache.org.
Lucene.Net.Sandbox: fix documentation comment formatting problems


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/a538f19b
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/a538f19b
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/a538f19b

Branch: refs/heads/api-work
Commit: a538f19bfa8364a1a26394ab4d5a1dd9daa94b85
Parents: e32cb9e
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Feb 5 13:20:02 2017 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Sun Feb 5 14:47:53 2017 +0700

----------------------------------------------------------------------
 src/Lucene.Net.Sandbox/Queries/SlowFuzzyTermsEnum.cs | 2 +-
 src/Lucene.Net.Sandbox/Queries/SortedSetSortField.cs | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/a538f19b/src/Lucene.Net.Sandbox/Queries/SlowFuzzyTermsEnum.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Sandbox/Queries/SlowFuzzyTermsEnum.cs b/src/Lucene.Net.Sandbox/Queries/SlowFuzzyTermsEnum.cs
index 629de32..05121db 100644
--- a/src/Lucene.Net.Sandbox/Queries/SlowFuzzyTermsEnum.cs
+++ b/src/Lucene.Net.Sandbox/Queries/SlowFuzzyTermsEnum.cs
@@ -83,7 +83,7 @@ namespace Lucene.Net.Sandbox.Queries
             /// After calling the constructor the enumeration is already pointing to the first 
             /// valid term if such a term exists.
             /// </summary>
-            /// <exception cref="IOException">If there is a low-level I/O error.</exception>
+            /// <exception cref="System.IO.IOException">If there is a low-level I/O error.</exception>
             public LinearFuzzyTermsEnum(SlowFuzzyTermsEnum outerInstance)
                 : base(outerInstance.m_terms.GetIterator(null))
             {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/a538f19b/src/Lucene.Net.Sandbox/Queries/SortedSetSortField.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Sandbox/Queries/SortedSetSortField.cs b/src/Lucene.Net.Sandbox/Queries/SortedSetSortField.cs
index 9ef71ef..78a27db 100644
--- a/src/Lucene.Net.Sandbox/Queries/SortedSetSortField.cs
+++ b/src/Lucene.Net.Sandbox/Queries/SortedSetSortField.cs
@@ -35,7 +35,7 @@ namespace Lucene.Net.Sandbox.Queries
     /// (see below) to ensure that all selections happen in constant-time for performance.
     /// <para/>
     /// Like sorting by string, this also supports sorting missing values as first or last,
-    /// via <see cref="SetMissingValue(object)"/>.
+    /// via <see cref="MissingValue"/>.
     /// <para/>
     /// Limitations:
     /// <list type="bullet">


[23/27] lucenenet git commit: Lucene.Net.Core.Codecs.Lucene45.Lucene45DocValuesConsumer: changed from .ToArray() to .CopyTo() for better efficiency

Posted by ni...@apache.org.
Lucene.Net.Core.Codecs.Lucene45.Lucene45DocValuesConsumer: changed from .ToArray() to .CopyTo() for better efficiency


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/0c711c82
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/0c711c82
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/0c711c82

Branch: refs/heads/api-work
Commit: 0c711c82dbb07f6361226d9166c3d10bca3d6ed2
Parents: 3ec3e79
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Feb 5 18:35:06 2017 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Sun Feb 5 18:35:06 2017 +0700

----------------------------------------------------------------------
 src/Lucene.Net.Core/Codecs/Lucene45/Lucene45DocValuesConsumer.cs | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/0c711c82/src/Lucene.Net.Core/Codecs/Lucene45/Lucene45DocValuesConsumer.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Codecs/Lucene45/Lucene45DocValuesConsumer.cs b/src/Lucene.Net.Core/Codecs/Lucene45/Lucene45DocValuesConsumer.cs
index 9c151ba..5ae2379 100644
--- a/src/Lucene.Net.Core/Codecs/Lucene45/Lucene45DocValuesConsumer.cs
+++ b/src/Lucene.Net.Core/Codecs/Lucene45/Lucene45DocValuesConsumer.cs
@@ -236,7 +236,9 @@ namespace Lucene.Net.Codecs.Lucene45
                     break;
 
                 case TABLE_COMPRESSED:
-                    long[] decode = uniqueValues.ToArray();//LUCENE TO-DO Hadd oparamerter before
+                    // LUCENENET NOTE: diming an array and then using .CopyTo() for better efficiency than LINQ .ToArray()
+                    long[] decode = new long[uniqueValues.Count];
+                    uniqueValues.CopyTo(decode, 0);
                     Dictionary<long, int> encode = new Dictionary<long, int>();
                     meta.WriteVInt(decode.Length);
                     for (int i = 0; i < decode.Length; i++)


[22/27] lucenenet git commit: Lucene.Net.Core: reviewed and removed some unnecessary TODOs

Posted by ni...@apache.org.
Lucene.Net.Core: reviewed and removed some unnecessary TODOs


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/3ec3e794
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/3ec3e794
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/3ec3e794

Branch: refs/heads/api-work
Commit: 3ec3e794e6d4622c4bdeb2fa3c71a011fedcc170
Parents: efd894b
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Feb 5 18:33:58 2017 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Sun Feb 5 18:33:58 2017 +0700

----------------------------------------------------------------------
 .../Codecs/BlockTreeTermsReader.cs              | 146 ++++++++-----------
 .../Index/AtomicReaderContext.cs                |   2 +-
 src/Lucene.Net.Core/Index/IndexWriter.cs        |  12 +-
 src/Lucene.Net.Core/Index/Term.cs               |  13 +-
 src/Lucene.Net.Core/Search/PhraseQuery.cs       |   1 -
 src/Lucene.Net.Core/Search/ReqExclScorer.cs     |   2 -
 .../Search/ScoreCachingWrappingScorer.cs        |   2 -
 .../Search/Spans/NearSpansOrdered.cs            |   1 -
 src/Lucene.Net.Core/Search/Spans/TermSpans.cs   |   2 -
 src/Lucene.Net.Core/Util/CollectionUtil.cs      |  21 +--
 src/Lucene.Net.Core/Util/WeakIdentityMap.cs     |   2 -
 11 files changed, 83 insertions(+), 121 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/3ec3e794/src/Lucene.Net.Core/Codecs/BlockTreeTermsReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Codecs/BlockTreeTermsReader.cs b/src/Lucene.Net.Core/Codecs/BlockTreeTermsReader.cs
index 0a9fc2d..be4641d 100644
--- a/src/Lucene.Net.Core/Codecs/BlockTreeTermsReader.cs
+++ b/src/Lucene.Net.Core/Codecs/BlockTreeTermsReader.cs
@@ -492,16 +492,6 @@ namespace Lucene.Net.Codecs
             {
                 StringBuilder @out = new StringBuilder();
 
-                /* LUCENE TO-DO I don't think this is neccesary
-                try
-                {
-                  @out = new PrintStream(bos, false, IOUtils.UTF_8);
-                }
-                catch (UnsupportedEncodingException bogus)
-                {
-                  throw new Exception(bogus);
-                }*/
-
                 @out.AppendLine("  index FST:");
                 @out.AppendLine("    " + IndexNodeCount + " nodes");
                 @out.AppendLine("    " + IndexArcCount + " arcs");
@@ -536,15 +526,6 @@ namespace Lucene.Net.Codecs
                     Debug.Assert(TotalBlockCount == total);
                 }
                 return @out.ToString();
-                /* LUCENE TO-DO I dont think this is neccesary
-                try
-                {
-                  return bos.ToString(IOUtils.UTF_8);
-                }
-                catch (UnsupportedEncodingException bogus)
-                {
-                  throw new Exception(bogus);
-                }*/
             }
         }
 
@@ -2416,69 +2397,70 @@ namespace Lucene.Net.Codecs
                     }
                 }
 
-                /*LUCENE TO-DO Not in use
-                private void PrintSeekState(PrintStream @out)
-                {
-                  if (CurrentFrame == StaticFrame)
-                  {
-                    @out.println("  no prior seek");
-                  }
-                  else
-                  {
-                    @out.println("  prior seek state:");
-                    int ord = 0;
-                    bool isSeekFrame = true;
-                    while (true)
-                    {
-                      Frame f = GetFrame(ord);
-                      Debug.Assert(f != null);
-                      BytesRef prefix = new BytesRef(Term_Renamed.Bytes, 0, f.Prefix);
-                      if (f.NextEnt == -1)
-                      {
-                        @out.println("    frame " + (isSeekFrame ? "(seek)" : "(next)") + " ord=" + ord + " fp=" + f.Fp + (f.IsFloor ? (" (fpOrig=" + f.FpOrig + ")") : "") + " prefixLen=" + f.Prefix + " prefix=" + prefix + (f.NextEnt == -1 ? "" : (" (of " + f.EntCount + ")")) + " hasTerms=" + f.HasTerms + " isFloor=" + f.IsFloor + " code=" + ((f.Fp << BlockTreeTermsWriter.OUTPUT_FLAGS_NUM_BITS) + (f.HasTerms ? BlockTreeTermsWriter.OUTPUT_FLAG_HAS_TERMS:0) + (f.IsFloor ? BlockTreeTermsWriter.OUTPUT_FLAG_IS_FLOOR:0)) + " isLastInFloor=" + f.IsLastInFloor + " mdUpto=" + f.MetaDataUpto + " tbOrd=" + f.TermBlockOrd);
-                      }
-                      else
-                      {
-                        @out.println("    frame " + (isSeekFrame ? "(seek, loaded)" : "(next, loaded)") + " ord=" + ord + " fp=" + f.Fp + (f.IsFloor ? (" (fpOrig=" + f.FpOrig + ")") : "") + " prefixLen=" + f.Prefix + " prefix=" + prefix + " nextEnt=" + f.NextEnt + (f.NextEnt == -1 ? "" : (" (of " + f.EntCount + ")")) + " hasTerms=" + f.HasTerms + " isFloor=" + f.IsFloor + " code=" + ((f.Fp << BlockTreeTermsWriter.OUTPUT_FLAGS_NUM_BITS) + (f.HasTerms ? BlockTreeTermsWriter.OUTPUT_FLAG_HAS_TERMS:0) + (f.IsFloor ? BlockTreeTermsWriter.OUTPUT_FLAG_IS_FLOOR:0)) + " lastSubFP=" + f.LastSubFP + " isLastInFloor=" + f.IsLastInFloor + " mdUpto=" + f.MetaDataUpto + " tbOrd=" + f.TermBlockOrd);
-                      }
-                      if (OuterInstance.Index != null)
-                      {
-                        Debug.Assert(!isSeekFrame || f.Arc != null, "isSeekFrame=" + isSeekFrame + " f.arc=" + f.Arc);
-                        if (f.Prefix > 0 && isSeekFrame && f.Arc.Label != (Term_Renamed.Bytes[f.Prefix - 1] & 0xFF))
-                        {
-                          @out.println("      broken seek state: arc.label=" + (char) f.Arc.Label + " vs term byte=" + (char)(Term_Renamed.Bytes[f.Prefix - 1] & 0xFF));
-                          throw new Exception("seek state is broken");
-                        }
-                        BytesRef output = Util.Get(OuterInstance.Index, prefix);
-                        if (output == null)
-                        {
-                          @out.println("      broken seek state: prefix is not final in index");
-                          throw new Exception("seek state is broken");
-                        }
-                        else if (isSeekFrame && !f.IsFloor)
-                        {
-                          ByteArrayDataInput reader = new ByteArrayDataInput(output.Bytes, output.Offset, output.Length);
-                          long codeOrig = reader.ReadVLong();
-                          long code = (f.Fp << BlockTreeTermsWriter.OUTPUT_FLAGS_NUM_BITS) | (f.HasTerms ? BlockTreeTermsWriter.OUTPUT_FLAG_HAS_TERMS:0) | (f.IsFloor ? BlockTreeTermsWriter.OUTPUT_FLAG_IS_FLOOR:0);
-                          if (codeOrig != code)
-                          {
-                            @out.println("      broken seek state: output code=" + codeOrig + " doesn't match frame code=" + code);
-                            throw new Exception("seek state is broken");
-                          }
-                        }
-                      }
-                      if (f == CurrentFrame)
-                      {
-                        break;
-                      }
-                      if (f.Prefix == ValidIndexPrefix)
-                      {
-                        isSeekFrame = false;
-                      }
-                      ord++;
-                    }
-                  }
-                }*/
+                // LUCENENET NOTE: Not in use
+
+                //private void PrintSeekState(PrintStream @out)
+                //{
+                //    if (CurrentFrame == StaticFrame)
+                //    {
+                //        @out.println("  no prior seek");
+                //    }
+                //    else
+                //    {
+                //        @out.println("  prior seek state:");
+                //        int ord = 0;
+                //        bool isSeekFrame = true;
+                //        while (true)
+                //        {
+                //            Frame f = GetFrame(ord);
+                //            Debug.Assert(f != null);
+                //            BytesRef prefix = new BytesRef(Term_Renamed.Bytes, 0, f.Prefix);
+                //            if (f.NextEnt == -1)
+                //            {
+                //                @out.println("    frame " + (isSeekFrame ? "(seek)" : "(next)") + " ord=" + ord + " fp=" + f.Fp + (f.IsFloor ? (" (fpOrig=" + f.FpOrig + ")") : "") + " prefixLen=" + f.Prefix + " prefix=" + prefix + (f.NextEnt == -1 ? "" : (" (of " + f.EntCount + ")")) + " hasTerms=" + f.HasTerms + " isFloor=" + f.IsFloor + " code=" + ((f.Fp << BlockTreeTermsWriter.OUTPUT_FLAGS_NUM_BITS) + (f.HasTerms ? BlockTreeTermsWriter.OUTPUT_FLAG_HAS_TERMS : 0) + (f.IsFloor ? BlockTreeTermsWriter.OUTPUT_FLAG_IS_FLOOR : 0)) + " isLastInFloor=" + f.IsLastInFloor + " mdUpto=" + f.MetaDataUpto + " tbOrd=" + f.TermBlockOrd);
+                //            }
+                //            else
+                //            {
+                //                @out.println("    frame " + (isSeekFrame ? "(seek, loaded)" : "(next, loaded)") + " ord=" + ord + " fp=" + f.Fp + (f.IsFloor ? (" (fpOrig=" + f.FpOrig + ")") : "") + " prefixLen=" + f.Prefix + " prefix=" + prefix + " nextEnt=" + f.NextEnt + (f.NextEnt == -1 ? "" : (" (of " + f.EntCount + ")")) + " hasTerms=" + f.HasTerms + " isFloor=" + f.IsFloor + " code=" + ((f.Fp << BlockTreeTermsWriter.OUTPUT_FLAGS_NUM_BITS) + (f.HasTerms ? BlockTreeTermsWriter.OUTPUT_FLAG_HAS_TERMS : 0) + (f.IsFloor ? BlockTreeTermsWriter.OUTPUT_FLAG_IS_FLOOR : 0)) + " lastSubFP=" + f.LastSubFP + " isLastInFloor=" + f.IsLastInFloor + " mdUpto=" + f.MetaDataUpto + " tbOrd=" + f.TermBlockOrd);
+                //            }
+                //            if (OuterInstance.Index != null)
+                //            {
+                //                Debug.Assert(!isSeekFrame || f.Arc != null, "isSeekFrame=" + isSeekFrame + " f.arc=" + f.Arc);
+                //                if (f.Prefix > 0 && isSeekFrame && f.Arc.Label != (Term_Renamed.Bytes[f.Prefix - 1] & 0xFF))
+                //                {
+                //                    @out.println("      broken seek state: arc.label=" + (char)f.Arc.Label + " vs term byte=" + (char)(Term_Renamed.Bytes[f.Prefix - 1] & 0xFF));
+                //                    throw new Exception("seek state is broken");
+                //                }
+                //                BytesRef output = Util.Get(OuterInstance.Index, prefix);
+                //                if (output == null)
+                //                {
+                //                    @out.println("      broken seek state: prefix is not final in index");
+                //                    throw new Exception("seek state is broken");
+                //                }
+                //                else if (isSeekFrame && !f.IsFloor)
+                //                {
+                //                    ByteArrayDataInput reader = new ByteArrayDataInput(output.Bytes, output.Offset, output.Length);
+                //                    long codeOrig = reader.ReadVLong();
+                //                    long code = (f.Fp << BlockTreeTermsWriter.OUTPUT_FLAGS_NUM_BITS) | (f.HasTerms ? BlockTreeTermsWriter.OUTPUT_FLAG_HAS_TERMS : 0) | (f.IsFloor ? BlockTreeTermsWriter.OUTPUT_FLAG_IS_FLOOR : 0);
+                //                    if (codeOrig != code)
+                //                    {
+                //                        @out.println("      broken seek state: output code=" + codeOrig + " doesn't match frame code=" + code);
+                //                        throw new Exception("seek state is broken");
+                //                    }
+                //                }
+                //            }
+                //            if (f == CurrentFrame)
+                //            {
+                //                break;
+                //            }
+                //            if (f.Prefix == ValidIndexPrefix)
+                //            {
+                //                isSeekFrame = false;
+                //            }
+                //            ord++;
+                //        }
+                //    }
+                //}
 
                 /* Decodes only the term bytes of the next term.  If caller then asks for
                    metadata, ie docFreq, totalTermFreq or pulls a D/&PEnum, we then (lazily)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/3ec3e794/src/Lucene.Net.Core/Index/AtomicReaderContext.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Index/AtomicReaderContext.cs b/src/Lucene.Net.Core/Index/AtomicReaderContext.cs
index edbf05e..8f7d2eb 100644
--- a/src/Lucene.Net.Core/Index/AtomicReaderContext.cs
+++ b/src/Lucene.Net.Core/Index/AtomicReaderContext.cs
@@ -45,7 +45,7 @@ namespace Lucene.Net.Index
             this.Ord = leafOrd;
             this.DocBase = leafDocBase;
             this.reader = reader;
-            this.leaves = IsTopLevel ? new[] { this } : null; //LUCENE TO-DO suspicous
+            this.leaves = IsTopLevel ? new[] { this } : null;
         }
 
         internal AtomicReaderContext(AtomicReader atomicReader)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/3ec3e794/src/Lucene.Net.Core/Index/IndexWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Index/IndexWriter.cs b/src/Lucene.Net.Core/Index/IndexWriter.cs
index 0897287..99a9381 100644
--- a/src/Lucene.Net.Core/Index/IndexWriter.cs
+++ b/src/Lucene.Net.Core/Index/IndexWriter.cs
@@ -3820,12 +3820,12 @@ namespace Lucene.Net.Index
         // at a time:
         private readonly object fullFlushLock = new object();
 
-        //LUCENE TO-DO Not possible in .NET
-        /*// for assert
-        internal virtual bool HoldsFullFlushLock()
-        {
-          return Thread.holdsLock(FullFlushLock);
-        }*/
+        // LUCENENET NOTE: Not possible in .NET
+        //// for assert
+        //internal virtual bool HoldsFullFlushLock()
+        //{
+        //  return Thread.holdsLock(FullFlushLock);
+        //}
 
         /// <summary>
         /// Flush all in-memory buffered updates (adds and deletes)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/3ec3e794/src/Lucene.Net.Core/Index/Term.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Index/Term.cs b/src/Lucene.Net.Core/Index/Term.cs
index ebc6522..26e5b35 100644
--- a/src/Lucene.Net.Core/Index/Term.cs
+++ b/src/Lucene.Net.Core/Index/Term.cs
@@ -91,20 +91,9 @@ namespace Lucene.Net.Index
         /// </summary>
         public static string ToString(BytesRef termText)
         {
-            // LUCENENET TODO
-            /*// the term might not be text, but usually is. so we make a best effort
-            CharsetDecoder decoder = StandardCharsets.UTF_8.newDecoder().onMalformedInput(CodingErrorAction.REPORT).onUnmappableCharacter(CodingErrorAction.REPORT);
             try
             {
-              return decoder.decode(ByteBuffer.wrap(termText.Bytes, termText.Offset, termText.Length)).ToString();
-            }
-            catch (CharacterCodingException e)
-            {
-              return termText.ToString();
-            }*/
-            try
-            {
-                // .Net port: termText already has this handy UTF8ToString method, so we're using that instead
+                // LUCENENET specific: termText already has this handy UTF8ToString method, so we're using that instead of Encoding.UTF8.GetBytes()
                 return termText.Utf8ToString();
             }
             catch

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/3ec3e794/src/Lucene.Net.Core/Search/PhraseQuery.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Search/PhraseQuery.cs b/src/Lucene.Net.Core/Search/PhraseQuery.cs
index 64140f1..10d660b 100644
--- a/src/Lucene.Net.Core/Search/PhraseQuery.cs
+++ b/src/Lucene.Net.Core/Search/PhraseQuery.cs
@@ -431,7 +431,6 @@ namespace Lucene.Net.Search
         /// <seealso cref= Lucene.Net.Search.Query#extractTerms(Set) </seealso>
         public override void ExtractTerms(ISet<Term> queryTerms)
         {
-            //LUCENE TO-DO Normal conundrum
             queryTerms.UnionWith(terms);
         }
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/3ec3e794/src/Lucene.Net.Core/Search/ReqExclScorer.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Search/ReqExclScorer.cs b/src/Lucene.Net.Core/Search/ReqExclScorer.cs
index b4f4ab3..8af838d 100644
--- a/src/Lucene.Net.Core/Search/ReqExclScorer.cs
+++ b/src/Lucene.Net.Core/Search/ReqExclScorer.cs
@@ -122,9 +122,7 @@ namespace Lucene.Net.Search
 
         public override ICollection<ChildScorer> GetChildren()
         {
-            //LUCENE TO-DO
             return new[] { new ChildScorer(reqScorer, "FILTERED") };
-            //return Collections.singleton(new ChildScorer(ReqScorer, "FILTERED"));
         }
 
         public override int Advance(int target)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/3ec3e794/src/Lucene.Net.Core/Search/ScoreCachingWrappingScorer.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Search/ScoreCachingWrappingScorer.cs b/src/Lucene.Net.Core/Search/ScoreCachingWrappingScorer.cs
index c7216d0..a6c4a47 100644
--- a/src/Lucene.Net.Core/Search/ScoreCachingWrappingScorer.cs
+++ b/src/Lucene.Net.Core/Search/ScoreCachingWrappingScorer.cs
@@ -78,9 +78,7 @@ namespace Lucene.Net.Search
 
         public override ICollection<ChildScorer> GetChildren()
         {
-            //LUCENE TO-DO
             return new[] { new ChildScorer(scorer, "CACHED") };
-            //return Collections.singleton(new ChildScorer(Scorer, "CACHED"));
         }
 
         public override long GetCost()

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/3ec3e794/src/Lucene.Net.Core/Search/Spans/NearSpansOrdered.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Search/Spans/NearSpansOrdered.cs b/src/Lucene.Net.Core/Search/Spans/NearSpansOrdered.cs
index 3906b3d..a9232a9 100644
--- a/src/Lucene.Net.Core/Search/Spans/NearSpansOrdered.cs
+++ b/src/Lucene.Net.Core/Search/Spans/NearSpansOrdered.cs
@@ -343,7 +343,6 @@ namespace Lucene.Net.Search.Spans
             var possibleMatchPayloads = new HashSet<byte[]>();
             if (subSpans[subSpans.Length - 1].IsPayloadAvailable)
             {
-                //LUCENE TO-DO UnionWith or AddAll(Set<>, IEnumerable<>)
                 possibleMatchPayloads.UnionWith(subSpans[subSpans.Length - 1].GetPayload());
             }
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/3ec3e794/src/Lucene.Net.Core/Search/Spans/TermSpans.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Search/Spans/TermSpans.cs b/src/Lucene.Net.Core/Search/Spans/TermSpans.cs
index f72f7bb..6fc28c9 100644
--- a/src/Lucene.Net.Core/Search/Spans/TermSpans.cs
+++ b/src/Lucene.Net.Core/Search/Spans/TermSpans.cs
@@ -126,9 +126,7 @@ namespace Lucene.Net.Search.Spans
             {
                 bytes = null;
             }
-            //LUCENE TO-DO
             return new[] { bytes };
-            //return Collections.singletonList(bytes);
         }
 
         // TODO: Remove warning after API has been finalized

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/3ec3e794/src/Lucene.Net.Core/Util/CollectionUtil.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Util/CollectionUtil.cs b/src/Lucene.Net.Core/Util/CollectionUtil.cs
index e1142f9..7f8cf95 100644
--- a/src/Lucene.Net.Core/Util/CollectionUtil.cs
+++ b/src/Lucene.Net.Core/Util/CollectionUtil.cs
@@ -44,11 +44,12 @@ namespace Lucene.Net.Util
             internal ListIntroSorter(IList<T> list, IComparer<T> comp)
                 : base()
             {
-                /* LUCENE TO-DO I believe all ILists are RA
-                if (!(list is RandomAccess))
-                {
-                  throw new System.ArgumentException("CollectionUtil can only sort random access lists in-place.");
-                }*/
+                // LUCENENET NOTE: All ILists in .NET are random access (only IEnumerable is forward-only)
+                //if (!(list is RandomAccess))
+                //{
+                //  throw new System.ArgumentException("CollectionUtil can only sort random access lists in-place.");
+                //}
+                
                 this.list = list;
                 this.comp = comp;
             }
@@ -83,11 +84,11 @@ namespace Lucene.Net.Util
             internal ListTimSorter(IList<T> list, IComparer<T> comp, int maxTempSlots)
                 : base(maxTempSlots)
             {
-                /* LUCENE TO-DO I believe all ILists are RA
-                if (!(list is RandomAccess))
-                {
-                  throw new System.ArgumentException("CollectionUtil can only sort random access lists in-place.");
-                }*/
+                // LUCENENET NOTE: All ILists in .NET are random access (only IEnumerable is forward-only)
+                //if (!(list is RandomAccess))
+                //{
+                //  throw new System.ArgumentException("CollectionUtil can only sort random access lists in-place.");
+                //}
                 this.list = list;
                 this.comp = comp;
                 if (maxTempSlots > 0)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/3ec3e794/src/Lucene.Net.Core/Util/WeakIdentityMap.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Util/WeakIdentityMap.cs b/src/Lucene.Net.Core/Util/WeakIdentityMap.cs
index 9cb1bef..c6c35c4 100644
--- a/src/Lucene.Net.Core/Util/WeakIdentityMap.cs
+++ b/src/Lucene.Net.Core/Util/WeakIdentityMap.cs
@@ -63,8 +63,6 @@ namespace Lucene.Net.Util
     public sealed class WeakIdentityMap<TKey, TValue>
         where TKey : class
     {
-        // LUCENENET TODO: Make this class internal as it isn't required anywhere; need to have it exposed to tests though
-
         //private readonly ReferenceQueue<object> queue = new ReferenceQueue<object>();
         private readonly IDictionary<IdentityWeakReference, TValue> backingStore;
 


[18/27] lucenenet git commit: Lucene.Net.Core.Search.Spans.SpanOrQuery: fixed ToString() so it doesn't need to remove chars from the StringBuilder

Posted by ni...@apache.org.
Lucene.Net.Core.Search.Spans.SpanOrQuery: fixed ToString() so it doesn't need to remove chars from the StringBuilder


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/6c2b8de7
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/6c2b8de7
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/6c2b8de7

Branch: refs/heads/api-work
Commit: 6c2b8de73fa1f4166da1f47f9c43518964d55f88
Parents: bab4add
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Feb 5 17:46:21 2017 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Sun Feb 5 17:46:21 2017 +0700

----------------------------------------------------------------------
 src/Lucene.Net.Core/Search/Spans/SpanOrQuery.cs | 7 +++----
 1 file changed, 3 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/6c2b8de7/src/Lucene.Net.Core/Search/Spans/SpanOrQuery.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Search/Spans/SpanOrQuery.cs b/src/Lucene.Net.Core/Search/Spans/SpanOrQuery.cs
index 08a2ab0..2d1758a 100644
--- a/src/Lucene.Net.Core/Search/Spans/SpanOrQuery.cs
+++ b/src/Lucene.Net.Core/Search/Spans/SpanOrQuery.cs
@@ -131,15 +131,14 @@ namespace Lucene.Net.Search.Spans
             StringBuilder buffer = new StringBuilder();
             buffer.Append("spanOr([");
             IEnumerator<SpanQuery> i = clauses.GetEnumerator();
+            bool first = true;
             while (i.MoveNext())
             {
                 SpanQuery clause = i.Current;
+                if (!first) buffer.Append(", ");
                 buffer.Append(clause.ToString(field));
-                buffer.Append(", ");
+                first = false;
             }
-            //LUCENE TO-DO
-            if (clauses.Count > 0)
-                buffer.Remove(buffer.Length - 2, 2);
             buffer.Append("])");
             buffer.Append(ToStringUtils.Boost(Boost));
             return buffer.ToString();


[20/27] lucenenet git commit: Lucene.Net.Core.Util.FieldCacheSanityChecker refactor: using CopyTo instead of ToArray() to move data into pre-initialized array more efficiently

Posted by ni...@apache.org.
Lucene.Net.Core.Util.FieldCacheSanityChecker refactor: using CopyTo instead of ToArray() to move data into pre-initialized array more efficiently


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/460e55e7
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/460e55e7
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/460e55e7

Branch: refs/heads/api-work
Commit: 460e55e77e2470d92d7f21691540e5bbfd9b8b3d
Parents: 6d6df6c
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Feb 5 18:09:22 2017 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Sun Feb 5 18:09:22 2017 +0700

----------------------------------------------------------------------
 src/Lucene.Net.Core/Util/FieldCacheSanityChecker.cs | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/460e55e7/src/Lucene.Net.Core/Util/FieldCacheSanityChecker.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Util/FieldCacheSanityChecker.cs b/src/Lucene.Net.Core/Util/FieldCacheSanityChecker.cs
index 57f1dcf..8a0fc7f 100644
--- a/src/Lucene.Net.Core/Util/FieldCacheSanityChecker.cs
+++ b/src/Lucene.Net.Core/Util/FieldCacheSanityChecker.cs
@@ -190,7 +190,7 @@ namespace Lucene.Net.Util
                     }
 
                     FieldCache.CacheEntry[] badness = new FieldCache.CacheEntry[badEntries.Count];
-                    badness = badEntries.ToArray(); //LUCENE TO-DO had param of badness before
+                    badEntries.CopyTo(badness, 0);
 
                     insanity.Add(new Insanity(InsanityType.VALUEMISMATCH, "Multiple distinct value objects for " + rf.ToString(), badness));
                 }


[02/27] lucenenet git commit: Lucene.Net.Expressions: fix documentation comment formatting problems

Posted by ni...@apache.org.
Lucene.Net.Expressions: fix documentation comment formatting problems


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/9fb8cb15
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/9fb8cb15
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/9fb8cb15

Branch: refs/heads/api-work
Commit: 9fb8cb15373665625c682ad2a5214f627283ce1f
Parents: 2e2feda
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Feb 5 11:24:44 2017 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Sun Feb 5 14:47:46 2017 +0700

----------------------------------------------------------------------
 .../JS/JavascriptCompiler.cs                    |   1 +
 .../JS/JavascriptLexer.cs                       | 242 +++++++++----------
 .../JS/JavascriptParser.cs                      |   2 +-
 3 files changed, 123 insertions(+), 122 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/9fb8cb15/src/Lucene.Net.Expressions/JS/JavascriptCompiler.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Expressions/JS/JavascriptCompiler.cs b/src/Lucene.Net.Expressions/JS/JavascriptCompiler.cs
index d037d2d..3495648 100644
--- a/src/Lucene.Net.Expressions/JS/JavascriptCompiler.cs
+++ b/src/Lucene.Net.Expressions/JS/JavascriptCompiler.cs
@@ -164,6 +164,7 @@ namespace Lucene.Net.Expressions.JS
 
         /// <summary>Constructs a compiler for expressions with specific set of functions</summary>
         /// <param name="sourceText">The expression to compile</param>
+        /// <param name="functions">The set of functions to compile with</param>
         private JavascriptCompiler(string sourceText, IDictionary<string, MethodInfo> functions)
         {
             if (sourceText == null)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/9fb8cb15/src/Lucene.Net.Expressions/JS/JavascriptLexer.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Expressions/JS/JavascriptLexer.cs b/src/Lucene.Net.Expressions/JS/JavascriptLexer.cs
index 416f4da..a396464 100644
--- a/src/Lucene.Net.Expressions/JS/JavascriptLexer.cs
+++ b/src/Lucene.Net.Expressions/JS/JavascriptLexer.cs
@@ -147,11 +147,11 @@ namespace Lucene.Net.Expressions.JS
 			}
 		}
 
-		// do for sure before leaving
-		// $ANTLR end "AT_ADD"
-		// $ANTLR start "AT_BIT_AND"
-		/// <exception cref="Org.Antlr.Runtime.RecognitionException"></exception>
-		public void MAT_BIT_AND()
+        // do for sure before leaving
+        // $ANTLR end "AT_ADD"
+        // $ANTLR start "AT_BIT_AND"
+        /// <exception cref="Antlr.Runtime.RecognitionException"></exception>
+        public void MAT_BIT_AND()
 		{
 		    int _type = AT_BIT_AND;
 		    int _channel = TokenChannels.Default;
@@ -164,11 +164,11 @@ namespace Lucene.Net.Expressions.JS
 		    state.channel = _channel;
 		}
 
-		// do for sure before leaving
-		// $ANTLR end "AT_BIT_AND"
-		// $ANTLR start "AT_BIT_NOT"
-		/// <exception cref="Org.Antlr.Runtime.RecognitionException"></exception>
-		public void MAT_BIT_NOT()
+        // do for sure before leaving
+        // $ANTLR end "AT_BIT_AND"
+        // $ANTLR start "AT_BIT_NOT"
+        /// <exception cref="Antlr.Runtime.RecognitionException"></exception>
+        public void MAT_BIT_NOT()
 		{
 			try
 			{
@@ -187,11 +187,11 @@ namespace Lucene.Net.Expressions.JS
 			}
 		}
 
-		// do for sure before leaving
-		// $ANTLR end "AT_BIT_NOT"
-		// $ANTLR start "AT_BIT_OR"
-		/// <exception cref="Org.Antlr.Runtime.RecognitionException"></exception>
-		public void MAT_BIT_OR()
+        // do for sure before leaving
+        // $ANTLR end "AT_BIT_NOT"
+        // $ANTLR start "AT_BIT_OR"
+        /// <exception cref="Antlr.Runtime.RecognitionException"></exception>
+        public void MAT_BIT_OR()
 		{
 			try
 			{
@@ -210,11 +210,11 @@ namespace Lucene.Net.Expressions.JS
 			}
 		}
 
-		// do for sure before leaving
-		// $ANTLR end "AT_BIT_OR"
-		// $ANTLR start "AT_BIT_SHL"
-		/// <exception cref="Org.Antlr.Runtime.RecognitionException"></exception>
-		public void MAT_BIT_SHL()
+        // do for sure before leaving
+        // $ANTLR end "AT_BIT_OR"
+        // $ANTLR start "AT_BIT_SHL"
+        /// <exception cref="Antlr.Runtime.RecognitionException"></exception>
+        public void MAT_BIT_SHL()
 		{
 			try
 			{
@@ -233,11 +233,11 @@ namespace Lucene.Net.Expressions.JS
 			}
 		}
 
-		// do for sure before leaving
-		// $ANTLR end "AT_BIT_SHL"
-		// $ANTLR start "AT_BIT_SHR"
-		/// <exception cref="Org.Antlr.Runtime.RecognitionException"></exception>
-		public void MAT_BIT_SHR()
+        // do for sure before leaving
+        // $ANTLR end "AT_BIT_SHL"
+        // $ANTLR start "AT_BIT_SHR"
+        /// <exception cref="Antlr.Runtime.RecognitionException"></exception>
+        public void MAT_BIT_SHR()
 		{
 			try
 			{
@@ -256,11 +256,11 @@ namespace Lucene.Net.Expressions.JS
 			}
 		}
 
-		// do for sure before leaving
-		// $ANTLR end "AT_BIT_SHR"
-		// $ANTLR start "AT_BIT_SHU"
-		/// <exception cref="Org.Antlr.Runtime.RecognitionException"></exception>
-		public void MAT_BIT_SHU()
+        // do for sure before leaving
+        // $ANTLR end "AT_BIT_SHR"
+        // $ANTLR start "AT_BIT_SHU"
+        /// <exception cref="Antlr.Runtime.RecognitionException"></exception>
+        public void MAT_BIT_SHU()
 		{
 			try
 			{
@@ -282,7 +282,7 @@ namespace Lucene.Net.Expressions.JS
 		// do for sure before leaving
 		// $ANTLR end "AT_BIT_SHU"
 		// $ANTLR start "AT_BIT_XOR"
-		/// <exception cref="Org.Antlr.Runtime.RecognitionException"></exception>
+		/// <exception cref="Antlr.Runtime.RecognitionException"></exception>
 		public void MAT_BIT_XOR()
 		{
 			try
@@ -305,7 +305,7 @@ namespace Lucene.Net.Expressions.JS
 		// do for sure before leaving
 		// $ANTLR end "AT_BIT_XOR"
 		// $ANTLR start "AT_BOOL_AND"
-		/// <exception cref="Org.Antlr.Runtime.RecognitionException"></exception>
+		/// <exception cref="Antlr.Runtime.RecognitionException"></exception>
 		public void MAT_BOOL_AND()
 		{
 			try
@@ -325,11 +325,11 @@ namespace Lucene.Net.Expressions.JS
 			}
 		}
 
-		// do for sure before leaving
-		// $ANTLR end "AT_BOOL_AND"
-		// $ANTLR start "AT_BOOL_NOT"
-		/// <exception cref="Org.Antlr.Runtime.RecognitionException"></exception>
-		public void MAT_BOOL_NOT()
+        // do for sure before leaving
+        // $ANTLR end "AT_BOOL_AND"
+        // $ANTLR start "AT_BOOL_NOT"
+        /// <exception cref="Antlr.Runtime.RecognitionException"></exception>
+        public void MAT_BOOL_NOT()
 		{
 			try
 			{
@@ -348,11 +348,11 @@ namespace Lucene.Net.Expressions.JS
 			}
 		}
 
-		// do for sure before leaving
-		// $ANTLR end "AT_BOOL_NOT"
-		// $ANTLR start "AT_BOOL_OR"
-		/// <exception cref="Org.Antlr.Runtime.RecognitionException"></exception>
-		public void MAT_BOOL_OR()
+        // do for sure before leaving
+        // $ANTLR end "AT_BOOL_NOT"
+        // $ANTLR start "AT_BOOL_OR"
+        /// <exception cref="Antlr.Runtime.RecognitionException"></exception>
+        public void MAT_BOOL_OR()
 		{
 			try
 			{
@@ -371,11 +371,11 @@ namespace Lucene.Net.Expressions.JS
 			}
 		}
 
-		// do for sure before leaving
-		// $ANTLR end "AT_BOOL_OR"
-		// $ANTLR start "AT_COLON"
-		/// <exception cref="Org.Antlr.Runtime.RecognitionException"></exception>
-		public void MAT_COLON()
+        // do for sure before leaving
+        // $ANTLR end "AT_BOOL_OR"
+        // $ANTLR start "AT_COLON"
+        /// <exception cref="Antlr.Runtime.RecognitionException"></exception>
+        public void MAT_COLON()
 		{
 			try
 			{
@@ -394,11 +394,11 @@ namespace Lucene.Net.Expressions.JS
 			}
 		}
 
-		// do for sure before leaving
-		// $ANTLR end "AT_COLON"
-		// $ANTLR start "AT_COMMA"
-		/// <exception cref="Org.Antlr.Runtime.RecognitionException"></exception>
-		public void MAT_COMMA()
+        // do for sure before leaving
+        // $ANTLR end "AT_COLON"
+        // $ANTLR start "AT_COMMA"
+        /// <exception cref="Antlr.Runtime.RecognitionException"></exception>
+        public void MAT_COMMA()
 		{
 			try
 			{
@@ -417,11 +417,11 @@ namespace Lucene.Net.Expressions.JS
 			}
 		}
 
-		// do for sure before leaving
-		// $ANTLR end "AT_COMMA"
-		// $ANTLR start "AT_COMP_EQ"
-		/// <exception cref="Org.Antlr.Runtime.RecognitionException"></exception>
-		public void MAT_COMP_EQ()
+        // do for sure before leaving
+        // $ANTLR end "AT_COMMA"
+        // $ANTLR start "AT_COMP_EQ"
+        /// <exception cref="Antlr.Runtime.RecognitionException"></exception>
+        public void MAT_COMP_EQ()
 		{
 			try
 			{
@@ -440,11 +440,11 @@ namespace Lucene.Net.Expressions.JS
 			}
 		}
 
-		// do for sure before leaving
-		// $ANTLR end "AT_COMP_EQ"
-		// $ANTLR start "AT_COMP_GT"
-		/// <exception cref="Org.Antlr.Runtime.RecognitionException"></exception>
-		public void MAT_COMP_GT()
+        // do for sure before leaving
+        // $ANTLR end "AT_COMP_EQ"
+        // $ANTLR start "AT_COMP_GT"
+        /// <exception cref="Antlr.Runtime.RecognitionException"></exception>
+        public void MAT_COMP_GT()
 		{
 			try
 			{
@@ -463,11 +463,11 @@ namespace Lucene.Net.Expressions.JS
 			}
 		}
 
-		// do for sure before leaving
-		// $ANTLR end "AT_COMP_GT"
-		// $ANTLR start "AT_COMP_GTE"
-		/// <exception cref="Org.Antlr.Runtime.RecognitionException"></exception>
-		public void MAT_COMP_GTE()
+        // do for sure before leaving
+        // $ANTLR end "AT_COMP_GT"
+        // $ANTLR start "AT_COMP_GTE"
+        /// <exception cref="Antlr.Runtime.RecognitionException"></exception>
+        public void MAT_COMP_GTE()
 		{
 			try
 			{
@@ -486,11 +486,11 @@ namespace Lucene.Net.Expressions.JS
 			}
 		}
 
-		// do for sure before leaving
-		// $ANTLR end "AT_COMP_GTE"
-		// $ANTLR start "AT_COMP_LT"
-		/// <exception cref="Org.Antlr.Runtime.RecognitionException"></exception>
-		public void MAT_COMP_LT()
+        // do for sure before leaving
+        // $ANTLR end "AT_COMP_GTE"
+        // $ANTLR start "AT_COMP_LT"
+        /// <exception cref="Antlr.Runtime.RecognitionException"></exception>
+        public void MAT_COMP_LT()
 		{
 			try
 			{
@@ -509,11 +509,11 @@ namespace Lucene.Net.Expressions.JS
 			}
 		}
 
-		// do for sure before leaving
-		// $ANTLR end "AT_COMP_LT"
-		// $ANTLR start "AT_COMP_LTE"
-		/// <exception cref="Org.Antlr.Runtime.RecognitionException"></exception>
-		public void MAT_COMP_LTE()
+        // do for sure before leaving
+        // $ANTLR end "AT_COMP_LT"
+        // $ANTLR start "AT_COMP_LTE"
+        /// <exception cref="Antlr.Runtime.RecognitionException"></exception>
+        public void MAT_COMP_LTE()
 		{
 			try
 			{
@@ -532,11 +532,11 @@ namespace Lucene.Net.Expressions.JS
 			}
 		}
 
-		// do for sure before leaving
-		// $ANTLR end "AT_COMP_LTE"
-		// $ANTLR start "AT_COMP_NEQ"
-		/// <exception cref="Org.Antlr.Runtime.RecognitionException"></exception>
-		public void MAT_COMP_NEQ()
+        // do for sure before leaving
+        // $ANTLR end "AT_COMP_LTE"
+        // $ANTLR start "AT_COMP_NEQ"
+        /// <exception cref="Antlr.Runtime.RecognitionException"></exception>
+        public void MAT_COMP_NEQ()
 		{
 			try
 			{
@@ -555,11 +555,11 @@ namespace Lucene.Net.Expressions.JS
 			}
 		}
 
-		// do for sure before leaving
-		// $ANTLR end "AT_COMP_NEQ"
-		// $ANTLR start "AT_COND_QUE"
-		/// <exception cref="Org.Antlr.Runtime.RecognitionException"></exception>
-		public void MAT_COND_QUE()
+        // do for sure before leaving
+        // $ANTLR end "AT_COMP_NEQ"
+        // $ANTLR start "AT_COND_QUE"
+        /// <exception cref="Antlr.Runtime.RecognitionException"></exception>
+        public void MAT_COND_QUE()
 		{
 			try
 			{
@@ -578,11 +578,11 @@ namespace Lucene.Net.Expressions.JS
 			}
 		}
 
-		// do for sure before leaving
-		// $ANTLR end "AT_COND_QUE"
-		// $ANTLR start "AT_DIVIDE"
-		/// <exception cref="Org.Antlr.Runtime.RecognitionException"></exception>
-		public void MAT_DIVIDE()
+        // do for sure before leaving
+        // $ANTLR end "AT_COND_QUE"
+        // $ANTLR start "AT_DIVIDE"
+        /// <exception cref="Antlr.Runtime.RecognitionException"></exception>
+        public void MAT_DIVIDE()
 		{
 			try
 			{
@@ -601,11 +601,11 @@ namespace Lucene.Net.Expressions.JS
 			}
 		}
 
-		// do for sure before leaving
-		// $ANTLR end "AT_DIVIDE"
-		// $ANTLR start "AT_DOT"
-		/// <exception cref="Org.Antlr.Runtime.RecognitionException"></exception>
-		public void MAT_DOT()
+        // do for sure before leaving
+        // $ANTLR end "AT_DIVIDE"
+        // $ANTLR start "AT_DOT"
+        /// <exception cref="Antlr.Runtime.RecognitionException"></exception>
+        public void MAT_DOT()
 		{
 			try
 			{
@@ -624,11 +624,11 @@ namespace Lucene.Net.Expressions.JS
 			}
 		}
 
-		// do for sure before leaving
-		// $ANTLR end "AT_DOT"
-		// $ANTLR start "AT_LPAREN"
-		/// <exception cref="Org.Antlr.Runtime.RecognitionException"></exception>
-		public void MAT_LPAREN()
+        // do for sure before leaving
+        // $ANTLR end "AT_DOT"
+        // $ANTLR start "AT_LPAREN"
+        /// <exception cref="Antlr.Runtime.RecognitionException"></exception>
+        public void MAT_LPAREN()
 		{
 			try
 			{
@@ -650,7 +650,7 @@ namespace Lucene.Net.Expressions.JS
 		// do for sure before leaving
 		// $ANTLR end "AT_LPAREN"
 		// $ANTLR start "AT_MODULO"
-		/// <exception cref="Org.Antlr.Runtime.RecognitionException"></exception>
+		/// <exception cref="Antlr.Runtime.RecognitionException"></exception>
 		public void MAT_MODULO()
 		{
 			try
@@ -670,11 +670,11 @@ namespace Lucene.Net.Expressions.JS
 			}
 		}
 
-		// do for sure before leaving
-		// $ANTLR end "AT_MODULO"
-		// $ANTLR start "AT_MULTIPLY"
-		/// <exception cref="Org.Antlr.Runtime.RecognitionException"></exception>
-		public void MAT_MULTIPLY()
+        // do for sure before leaving
+        // $ANTLR end "AT_MODULO"
+        // $ANTLR start "AT_MULTIPLY"
+        /// <exception cref="Antlr.Runtime.RecognitionException"></exception>
+        public void MAT_MULTIPLY()
 		{
 			try
 			{
@@ -696,7 +696,7 @@ namespace Lucene.Net.Expressions.JS
 		// do for sure before leaving
 		// $ANTLR end "AT_MULTIPLY"
 		// $ANTLR start "AT_RPAREN"
-		/// <exception cref="Org.Antlr.Runtime.RecognitionException"></exception>
+		/// <exception cref="Antlr.Runtime.RecognitionException"></exception>
 		public void MAT_RPAREN()
 		{
 			try
@@ -719,7 +719,7 @@ namespace Lucene.Net.Expressions.JS
 		// do for sure before leaving
 		// $ANTLR end "AT_RPAREN"
 		// $ANTLR start "AT_SUBTRACT"
-		/// <exception cref="Org.Antlr.Runtime.RecognitionException"></exception>
+		/// <exception cref="Antlr.Runtime.RecognitionException"></exception>
 		public void MAT_SUBTRACT()
 		{
 			try
@@ -742,7 +742,7 @@ namespace Lucene.Net.Expressions.JS
 		// do for sure before leaving
 		// $ANTLR end "AT_SUBTRACT"
 		// $ANTLR start "NAMESPACE_ID"
-		/// <exception cref="Org.Antlr.Runtime.RecognitionException"></exception>
+		/// <exception cref="Antlr.Runtime.RecognitionException"></exception>
 		public void MNAMESPACE_ID()
 		{
 			try
@@ -792,7 +792,7 @@ loop1_break: ;
 		// do for sure before leaving
 		// $ANTLR end "NAMESPACE_ID"
 		// $ANTLR start "ID"
-		/// <exception cref="Org.Antlr.Runtime.RecognitionException"></exception>
+		/// <exception cref="Antlr.Runtime.RecognitionException"></exception>
 		public void MID()
 		{
 			try
@@ -858,7 +858,7 @@ loop2_break: ;
 		// do for sure before leaving
 		// $ANTLR end "ID"
 		// $ANTLR start "WS"
-		/// <exception cref="Org.Antlr.Runtime.RecognitionException"></exception>
+		/// <exception cref="Antlr.Runtime.RecognitionException"></exception>
 		public void MWS()
 		{
 			try
@@ -924,7 +924,7 @@ loop2_break: ;
 		// do for sure before leaving
 		// $ANTLR end "WS"
 		// $ANTLR start "DECIMAL"
-		/// <exception cref="Org.Antlr.Runtime.RecognitionException"></exception>
+		/// <exception cref="Antlr.Runtime.RecognitionException"></exception>
 		public void MDECIMAL()
 		{
 		    try
@@ -1094,7 +1094,7 @@ loop2_break: ;
 		// do for sure before leaving
 		// $ANTLR end "DECIMAL"
 		// $ANTLR start "OCTAL"
-		/// <exception cref="Org.Antlr.Runtime.RecognitionException"></exception>
+		/// <exception cref="Antlr.Runtime.RecognitionException"></exception>
 		public void MOCTAL()
 		{
 			try
@@ -1159,7 +1159,7 @@ loop2_break: ;
 		// do for sure before leaving
 		// $ANTLR end "OCTAL"
 		// $ANTLR start "HEX"
-		/// <exception cref="Org.Antlr.Runtime.RecognitionException"></exception>
+		/// <exception cref="Antlr.Runtime.RecognitionException"></exception>
 		public void MHEX()
 		{
 			try
@@ -1278,7 +1278,7 @@ loop2_break: ;
 		// do for sure before leaving
 		// $ANTLR end "HEX"
 		// $ANTLR start "DECIMALINTEGER"
-		/// <exception cref="Org.Antlr.Runtime.RecognitionException"></exception>
+		/// <exception cref="Antlr.Runtime.RecognitionException"></exception>
 		public void MDECIMALINTEGER()
 		{
 			try
@@ -1362,7 +1362,7 @@ loop2_break: ;
 		// do for sure before leaving
 		// $ANTLR end "DECIMALINTEGER"
 		// $ANTLR start "EXPONENT"
-		/// <exception cref="Org.Antlr.Runtime.RecognitionException"></exception>
+		/// <exception cref="Antlr.Runtime.RecognitionException"></exception>
 		public void MEXPONENT()
 		{
 			try
@@ -1457,7 +1457,7 @@ loop2_break: ;
 		// do for sure before leaving
 		// $ANTLR end "EXPONENT"
 		// $ANTLR start "DECIMALDIGIT"
-		/// <exception cref="Org.Antlr.Runtime.RecognitionException"></exception>
+		/// <exception cref="Antlr.Runtime.RecognitionException"></exception>
 		public void MDECIMALDIGIT()
 		{
 			try
@@ -1485,7 +1485,7 @@ loop2_break: ;
 		// do for sure before leaving
 		// $ANTLR end "DECIMALDIGIT"
 		// $ANTLR start "HEXDIGIT"
-		/// <exception cref="Org.Antlr.Runtime.RecognitionException"></exception>
+		/// <exception cref="Antlr.Runtime.RecognitionException"></exception>
 		public void MHEXDIGIT()
 		{
 			try
@@ -1514,7 +1514,7 @@ loop2_break: ;
 		// do for sure before leaving
 		// $ANTLR end "HEXDIGIT"
 		// $ANTLR start "OCTALDIGIT"
-		/// <exception cref="Org.Antlr.Runtime.RecognitionException"></exception>
+		/// <exception cref="Antlr.Runtime.RecognitionException"></exception>
 		public void MOCTALDIGIT()
 		{
 			try

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/9fb8cb15/src/Lucene.Net.Expressions/JS/JavascriptParser.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Expressions/JS/JavascriptParser.cs b/src/Lucene.Net.Expressions/JS/JavascriptParser.cs
index 0fea96c..ca6d636 100644
--- a/src/Lucene.Net.Expressions/JS/JavascriptParser.cs
+++ b/src/Lucene.Net.Expressions/JS/JavascriptParser.cs
@@ -398,7 +398,7 @@ namespace Lucene.Net.Expressions.JS
 
         // $ANTLR start "conditional"
         // src/java/org/apache/lucene/expressions/js/Javascript.g:254:1: conditional : logical_or ( AT_COND_QUE ^ conditional AT_COLON ! conditional )? ;
-        /// <exception cref="Org.Antlr.Runtime.RecognitionException"></exception>
+        /// <exception cref="Antlr.Runtime.RecognitionException"></exception>
         public AstParserRuleReturnScope<ITree, IToken> Conditional()
         {
             var retval = new AstParserRuleReturnScope<ITree, IToken> { Start = input.LT(1) };


[05/27] lucenenet git commit: Lucene.Net.Grouping: fix documentation comment formatting problems

Posted by ni...@apache.org.
Lucene.Net.Grouping: fix documentation comment formatting problems


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/1b9fe40d
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/1b9fe40d
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/1b9fe40d

Branch: refs/heads/api-work
Commit: 1b9fe40d777be7f78aac4cad9635c57b2e51dc37
Parents: 638f2a1
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Feb 5 12:14:03 2017 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Sun Feb 5 14:47:48 2017 +0700

----------------------------------------------------------------------
 .../AbstractAllGroupHeadsCollector.cs                 | 14 +++++++-------
 src/Lucene.Net.Grouping/AbstractAllGroupsCollector.cs |  6 +++---
 .../AbstractDistinctValuesCollector.cs                |  4 ++--
 .../AbstractFirstPassGroupingCollector.cs             |  6 +++---
 .../AbstractGroupFacetCollector.cs                    |  4 ++--
 .../AbstractSecondPassGroupingCollector.cs            |  2 +-
 src/Lucene.Net.Grouping/BlockGroupingCollector.cs     |  2 +-
 .../Function/FunctionFirstPassGroupingCollector.cs    |  2 +-
 .../Function/FunctionSecondPassGroupingCollector.cs   |  2 +-
 src/Lucene.Net.Grouping/GroupingSearch.cs             |  8 ++++----
 .../Term/TermAllGroupHeadsCollector.cs                |  2 +-
 .../Term/TermDistinctValuesCollector.cs               |  2 +-
 .../Term/TermFirstPassGroupingCollector.cs            |  2 +-
 13 files changed, 28 insertions(+), 28 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1b9fe40d/src/Lucene.Net.Grouping/AbstractAllGroupHeadsCollector.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Grouping/AbstractAllGroupHeadsCollector.cs b/src/Lucene.Net.Grouping/AbstractAllGroupHeadsCollector.cs
index 3f6d46b..beba4d5 100644
--- a/src/Lucene.Net.Grouping/AbstractAllGroupHeadsCollector.cs
+++ b/src/Lucene.Net.Grouping/AbstractAllGroupHeadsCollector.cs
@@ -99,7 +99,7 @@ namespace Lucene.Net.Search.Grouping
         /// </para>
         /// </summary>
         /// <param name="doc">The document to retrieve the group head for.</param>
-        /// <exception cref="IOException">If I/O related errors occur</exception>
+        /// <exception cref="System.IO.IOException">If I/O related errors occur</exception>
         protected override abstract void RetrieveGroupHeadAndAddIfNotExist(int doc);
 
         /// <summary>
@@ -194,14 +194,14 @@ namespace Lucene.Net.Search.Grouping
         /// -1 if the specified document wasn't competitive against the current most relevant document, 1 if the
         /// specified document was competitive against the current most relevant document. Otherwise 0.
         /// </returns>
-        /// <exception cref="IOException">If I/O related errors occur</exception>
+        /// <exception cref="System.IO.IOException">If I/O related errors occur</exception>
         public abstract int Compare(int compIDX, int doc);
 
         /// <summary>
         /// Updates the current most relevant document with the specified document.
         /// </summary>
         /// <param name="doc">The specified document</param>
-        /// <exception cref="IOException">If I/O related errors occur</exception>
+        /// <exception cref="System.IO.IOException">If I/O related errors occur</exception>
         public abstract void UpdateDocHead(int doc);
     }
 
@@ -233,15 +233,15 @@ namespace Lucene.Net.Search.Grouping
         public abstract int GroupHeadsCount { get; }
 
         /// <summary>
-        /// Returns the group head and puts it into <see cref="TemporalResult"/>.
+        /// Returns the group head and puts it into <see cref="AbstractAllGroupHeadsCollector{GH}.TemporalResult"/>.
         /// If the group head wasn't encountered before then it will be added to the collected group heads.
         /// <para>
-        /// The <see cref="TemporalResult.stop"/> property will be <c>true</c> if the group head wasn't encountered before
+        /// The <see cref="AbstractAllGroupHeadsCollector{GH}.TemporalResult.Stop"/> property will be <c>true</c> if the group head wasn't encountered before
         /// otherwise <c>false</c>.
         /// </para>
         /// </summary>
         /// <param name="doc">The document to retrieve the group head for.</param>
-        /// <exception cref="IOException">If I/O related errors occur</exception>
+        /// <exception cref="System.IO.IOException">If I/O related errors occur</exception>
         protected abstract void RetrieveGroupHeadAndAddIfNotExist(int doc);
 
 
@@ -274,7 +274,7 @@ namespace Lucene.Net.Search.Grouping
         /// Called before collecting from each <see cref="AtomicReaderContext"/>. All doc ids in
         /// <see cref="Collect(int)"/> will correspond to <see cref="Index.IndexReaderContext.Reader"/>.
         ///
-        /// Add <see cref="AtomicReaderContext#docBase"/> to the current <see cref="Index.IndexReaderContext.Reader"/>'s
+        /// Add <see cref="AtomicReaderContext.DocBase"/> to the current <see cref="Index.IndexReaderContext.Reader"/>'s
         /// internal document id to re-base ids in <see cref="Collect(int)"/>.
         /// </summary>
         /// <param name="context">next atomic reader context </param>

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1b9fe40d/src/Lucene.Net.Grouping/AbstractAllGroupsCollector.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Grouping/AbstractAllGroupsCollector.cs b/src/Lucene.Net.Grouping/AbstractAllGroupsCollector.cs
index af10a0d..27e8104 100644
--- a/src/Lucene.Net.Grouping/AbstractAllGroupsCollector.cs
+++ b/src/Lucene.Net.Grouping/AbstractAllGroupsCollector.cs
@@ -52,7 +52,7 @@ namespace Lucene.Net.Search.Grouping
         /// <summary>
         /// Returns the group values
         /// <para>
-        /// This is an unordered collections of group values. For each group that matched the query there is a <see cref="BytesRef"/>
+        /// This is an unordered collections of group values. For each group that matched the query there is a <see cref="Util.BytesRef"/>
         /// representing a group value.
         /// </para>
         /// </summary>
@@ -87,7 +87,7 @@ namespace Lucene.Net.Search.Grouping
         /// Called before collecting from each <see cref="AtomicReaderContext"/>. All doc ids in
         /// <see cref="Collect(int)"/> will correspond to <see cref="Index.IndexReaderContext.Reader"/>.
         ///
-        /// Add <see cref="AtomicReaderContext#docBase"/> to the current <see cref="Index.IndexReaderContext.Reader"/>'s
+        /// Add <see cref="AtomicReaderContext.DocBase"/> to the current <see cref="Index.IndexReaderContext.Reader"/>'s
         /// internal document id to re-base ids in <see cref="Collect(int)"/>.
         /// </summary>
         /// <param name="context">next atomic reader context </param>
@@ -115,7 +115,7 @@ namespace Lucene.Net.Search.Grouping
         /// <summary>
         /// Returns the group values
         /// <para>
-        /// This is an unordered collections of group values. For each group that matched the query there is a <see cref="BytesRef"/>
+        /// This is an unordered collections of group values. For each group that matched the query there is a <see cref="Util.BytesRef"/>
         /// representing a group value.
         /// </para>
         /// </summary>

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1b9fe40d/src/Lucene.Net.Grouping/AbstractDistinctValuesCollector.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Grouping/AbstractDistinctValuesCollector.cs b/src/Lucene.Net.Grouping/AbstractDistinctValuesCollector.cs
index 2ad7595..8f586ed 100644
--- a/src/Lucene.Net.Grouping/AbstractDistinctValuesCollector.cs
+++ b/src/Lucene.Net.Grouping/AbstractDistinctValuesCollector.cs
@@ -66,7 +66,7 @@ namespace Lucene.Net.Search.Grouping
         /// Called before collecting from each <see cref="AtomicReaderContext"/>. All doc ids in
         /// <see cref="Collect(int)"/> will correspond to <see cref="Index.IndexReaderContext.Reader"/>.
         ///
-        /// Add <see cref="AtomicReaderContext#docBase"/> to the current <see cref="Index.IndexReaderContext.Reader"/>'s
+        /// Add <see cref="AtomicReaderContext.DocBase"/> to the current <see cref="Index.IndexReaderContext.Reader"/>'s
         /// internal document id to re-base ids in <see cref="Collect(int)"/>.
         /// </summary>
         /// <param name="context">next atomic reader context </param>
@@ -85,7 +85,7 @@ namespace Lucene.Net.Search.Grouping
         private AbstractDistinctValuesCollector() { }
 
         /// <summary>
-        /// Returned by <see cref="AbstractDistinctValuesCollector.GetGroups()"/>,
+        /// Returned by <see cref="AbstractDistinctValuesCollector{GC}.Groups"/>,
         /// representing the value and set of distinct values for the group.
         /// </summary>
         /// <typeparam name="TGroupValue"></typeparam>

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1b9fe40d/src/Lucene.Net.Grouping/AbstractFirstPassGroupingCollector.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Grouping/AbstractFirstPassGroupingCollector.cs b/src/Lucene.Net.Grouping/AbstractFirstPassGroupingCollector.cs
index f4bc5fd..126596b 100644
--- a/src/Lucene.Net.Grouping/AbstractFirstPassGroupingCollector.cs
+++ b/src/Lucene.Net.Grouping/AbstractFirstPassGroupingCollector.cs
@@ -64,7 +64,7 @@ namespace Lucene.Net.Search.Grouping
         /// Sort.RELEVANCE.
         /// </param>
         /// <param name="topNGroups">How many top groups to keep.</param>
-        /// <exception cref="IOException">If I/O related errors occur</exception>
+        /// <exception cref="System.IO.IOException">If I/O related errors occur</exception>
         public AbstractFirstPassGroupingCollector(Sort groupSort, int topNGroups)
         {
             if (topNGroups < 1)
@@ -101,7 +101,7 @@ namespace Lucene.Net.Search.Grouping
         /// number of unique groups collected is &lt;= offset.
         /// </summary>
         /// <param name="groupOffset">The offset in the collected groups</param>
-        /// <param name="fillFields">Whether to fill to <see cref="SearchGroup.sortValues"/></param>
+        /// <param name="fillFields">Whether to fill to <see cref="SearchGroup{TGroupValue}.SortValues"/></param>
         /// <returns>top groups, starting from offset</returns>
         public virtual IEnumerable<ISearchGroup<TGroupValue>> GetTopGroups(int groupOffset, bool fillFields)
         {
@@ -427,7 +427,7 @@ namespace Lucene.Net.Search.Grouping
         /// number of unique groups collected is &lt;= offset.
         /// </summary>
         /// <param name="groupOffset">The offset in the collected groups</param>
-        /// <param name="fillFields">Whether to fill to <see cref="SearchGroup.sortValues"/></param>
+        /// <param name="fillFields">Whether to fill to <see cref="SearchGroup{TGroupValue}.SortValues"/></param>
         /// <returns>top groups, starting from offset</returns>
         /// <remarks>
         /// LUCENENET NOTE: We must use <see cref="IEnumerable{TGroupValue}"/> rather than 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1b9fe40d/src/Lucene.Net.Grouping/AbstractGroupFacetCollector.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Grouping/AbstractGroupFacetCollector.cs b/src/Lucene.Net.Grouping/AbstractGroupFacetCollector.cs
index 4ccca43..ad2c0c1 100644
--- a/src/Lucene.Net.Grouping/AbstractGroupFacetCollector.cs
+++ b/src/Lucene.Net.Grouping/AbstractGroupFacetCollector.cs
@@ -57,7 +57,7 @@ namespace Lucene.Net.Search.Grouping
         /// are sorted lexicographically in ascending order.
         /// </param>
         /// <returns>grouped facet results</returns>
-        /// <exception cref="IOException">If I/O related errors occur during merging segment grouped facet counts.</exception>
+        /// <exception cref="System.IO.IOException">If I/O related errors occur during merging segment grouped facet counts.</exception>
         public virtual GroupedFacetResult MergeSegmentResults(int size, int minCount, bool orderByCount)
         {
             if (m_segmentFacetCounts != null)
@@ -344,7 +344,7 @@ namespace Lucene.Net.Search.Grouping
             /// <summary>
             /// Go to next term in this <see cref="AbstractSegmentResult"/> in order to retrieve the grouped facet counts.
             /// </summary>
-            /// <exception cref="IOException">If I/O related errors occur</exception>
+            /// <exception cref="System.IO.IOException">If I/O related errors occur</exception>
             protected internal abstract void NextTerm();
 
         }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1b9fe40d/src/Lucene.Net.Grouping/AbstractSecondPassGroupingCollector.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Grouping/AbstractSecondPassGroupingCollector.cs b/src/Lucene.Net.Grouping/AbstractSecondPassGroupingCollector.cs
index ee75c57..396efc6 100644
--- a/src/Lucene.Net.Grouping/AbstractSecondPassGroupingCollector.cs
+++ b/src/Lucene.Net.Grouping/AbstractSecondPassGroupingCollector.cs
@@ -106,7 +106,7 @@ namespace Lucene.Net.Search.Grouping
         /// </summary>
         /// <param name="doc">The specified doc</param>
         /// <returns>the group the specified doc belongs to or <c>null</c> if no group could be retrieved</returns>
-        /// <exception cref="IOException">If an I/O related error occurred</exception>
+        /// <exception cref="System.IO.IOException">If an I/O related error occurred</exception>
         protected abstract AbstractSecondPassGroupingCollector.SearchGroupDocs<TGroupValue> RetrieveGroup(int doc);
 
         public virtual void SetNextReader(AtomicReaderContext context)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1b9fe40d/src/Lucene.Net.Grouping/BlockGroupingCollector.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Grouping/BlockGroupingCollector.cs b/src/Lucene.Net.Grouping/BlockGroupingCollector.cs
index 165f386..6f38ca3 100644
--- a/src/Lucene.Net.Grouping/BlockGroupingCollector.cs
+++ b/src/Lucene.Net.Grouping/BlockGroupingCollector.cs
@@ -369,7 +369,7 @@ namespace Lucene.Net.Search.Grouping
         /// </para>
         /// </summary>
         /// <typeparam name="TGroupValue">The expected return type for group value</typeparam>
-        /// <<param name="withinGroupSort">
+        /// <param name="withinGroupSort">
         /// The <see cref="Sort"/> used to sort
         /// documents within each group.  Passing null is
         /// allowed, to sort by relevance.

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1b9fe40d/src/Lucene.Net.Grouping/Function/FunctionFirstPassGroupingCollector.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Grouping/Function/FunctionFirstPassGroupingCollector.cs b/src/Lucene.Net.Grouping/Function/FunctionFirstPassGroupingCollector.cs
index 224db45..f389049 100644
--- a/src/Lucene.Net.Grouping/Function/FunctionFirstPassGroupingCollector.cs
+++ b/src/Lucene.Net.Grouping/Function/FunctionFirstPassGroupingCollector.cs
@@ -50,7 +50,7 @@ namespace Lucene.Net.Search.Grouping.Function
         /// <see cref="Sort.RELEVANCE"/>.
         /// </param>
         /// <param name="topNGroups">How many top groups to keep.</param>
-        /// <exception cref="IOException">When I/O related errors occur</exception>
+        /// <exception cref="System.IO.IOException">When I/O related errors occur</exception>
         public FunctionFirstPassGroupingCollector(ValueSource groupByVS, IDictionary /* Map<?, ?> */ vsContext, Sort groupSort, int topNGroups)
             : base(groupSort, topNGroups)
         {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1b9fe40d/src/Lucene.Net.Grouping/Function/FunctionSecondPassGroupingCollector.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Grouping/Function/FunctionSecondPassGroupingCollector.cs b/src/Lucene.Net.Grouping/Function/FunctionSecondPassGroupingCollector.cs
index 75222e6..6bd648e 100644
--- a/src/Lucene.Net.Grouping/Function/FunctionSecondPassGroupingCollector.cs
+++ b/src/Lucene.Net.Grouping/Function/FunctionSecondPassGroupingCollector.cs
@@ -49,7 +49,7 @@ namespace Lucene.Net.Search.Grouping.Function
         /// <param name="fillSortFields">Whether to fill the sort values in <see cref="TopGroups{TGroupValueType}.WithinGroupSort"/></param>
         /// <param name="groupByVS">The <see cref="ValueSource"/> to group by</param>
         /// <param name="vsContext">The value source context</param>
-        /// <exception cref="IOException">When I/O related errors occur</exception>
+        /// <exception cref="System.IO.IOException">When I/O related errors occur</exception>
         public FunctionSecondPassGroupingCollector(IEnumerable<ISearchGroup<MutableValue>> searchGroups, 
             Sort groupSort, Sort withinGroupSort, int maxDocsPerGroup, bool getScores, bool getMaxScores, 
             bool fillSortFields, ValueSource groupByVS, IDictionary /* Map<?, ?> */ vsContext)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1b9fe40d/src/Lucene.Net.Grouping/GroupingSearch.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Grouping/GroupingSearch.cs b/src/Lucene.Net.Grouping/GroupingSearch.cs
index 0ac877c..c3bcee9 100644
--- a/src/Lucene.Net.Grouping/GroupingSearch.cs
+++ b/src/Lucene.Net.Grouping/GroupingSearch.cs
@@ -104,7 +104,7 @@ namespace Lucene.Net.Search.Grouping
         /// <param name="groupOffset">The group offset</param>
         /// <param name="groupLimit">The number of groups to return from the specified group offset</param>
         /// <returns>the grouped result as a <see cref="ITopGroups{Object}"/> instance</returns>
-        /// <exception cref="IOException">If any I/O related errors occur</exception>
+        /// <exception cref="System.IO.IOException">If any I/O related errors occur</exception>
         public virtual ITopGroups<object> Search(IndexSearcher searcher, Query query, int groupOffset, int groupLimit)
         {
             return Search<object>(searcher, null, query, groupOffset, groupLimit);
@@ -120,7 +120,7 @@ namespace Lucene.Net.Search.Grouping
         /// <param name="groupOffset">The group offset</param>
         /// <param name="groupLimit">The number of groups to return from the specified group offset</param>
         /// <returns>the grouped result as a <see cref="ITopGroups{Object}"/> instance</returns>
-        /// <exception cref="IOException">If any I/O related errors occur</exception>
+        /// <exception cref="System.IO.IOException">If any I/O related errors occur</exception>
         public virtual ITopGroups<TGroupValue> Search<TGroupValue>(IndexSearcher searcher, Query query, int groupOffset, int groupLimit)
         {
             return Search<TGroupValue>(searcher, null, query, groupOffset, groupLimit);
@@ -135,7 +135,7 @@ namespace Lucene.Net.Search.Grouping
         /// <param name="groupOffset">The group offset</param>
         /// <param name="groupLimit">The number of groups to return from the specified group offset</param>
         /// <returns>the grouped result as a <see cref="ITopGroups{Object}"/> instance</returns>
-        /// <exception cref="IOException">If any I/O related errors occur</exception>
+        /// <exception cref="System.IO.IOException">If any I/O related errors occur</exception>
         public virtual ITopGroups<object> Search(IndexSearcher searcher, Filter filter, Query query, int groupOffset, int groupLimit)
         {
             if (groupFunction != null)
@@ -166,7 +166,7 @@ namespace Lucene.Net.Search.Grouping
         /// <param name="groupOffset">The group offset</param>
         /// <param name="groupLimit">The number of groups to return from the specified group offset</param>
         /// <returns>the grouped result as a <see cref="ITopGroups{Object}"/> instance</returns>
-        /// <exception cref="IOException">If any I/O related errors occur</exception>
+        /// <exception cref="System.IO.IOException">If any I/O related errors occur</exception>
         public virtual ITopGroups<TGroupValue> Search<TGroupValue>(IndexSearcher searcher, Filter filter, Query query, int groupOffset, int groupLimit)
         {
             if (groupField != null || groupFunction != null)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1b9fe40d/src/Lucene.Net.Grouping/Term/TermAllGroupHeadsCollector.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Grouping/Term/TermAllGroupHeadsCollector.cs b/src/Lucene.Net.Grouping/Term/TermAllGroupHeadsCollector.cs
index 8961fed..e3c68dc 100644
--- a/src/Lucene.Net.Grouping/Term/TermAllGroupHeadsCollector.cs
+++ b/src/Lucene.Net.Grouping/Term/TermAllGroupHeadsCollector.cs
@@ -62,7 +62,7 @@ namespace Lucene.Net.Search.Grouping.Terms
         private TermAllGroupHeadsCollector() { }
 
         /// <summary>
-        /// Creates an <see cref=AbstractAllGroupHeadsCollector""/> instance based on the supplied arguments.
+        /// Creates an <see cref="AbstractAllGroupHeadsCollector"/> instance based on the supplied arguments.
         /// This factory method decides with implementation is best suited.
         /// <para>
         /// Delegates to <see cref="Create(string, Sort, int)"/> with an initialSize of 128.

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1b9fe40d/src/Lucene.Net.Grouping/Term/TermDistinctValuesCollector.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Grouping/Term/TermDistinctValuesCollector.cs b/src/Lucene.Net.Grouping/Term/TermDistinctValuesCollector.cs
index 704ab70..fd186cb 100644
--- a/src/Lucene.Net.Grouping/Term/TermDistinctValuesCollector.cs
+++ b/src/Lucene.Net.Grouping/Term/TermDistinctValuesCollector.cs
@@ -25,7 +25,7 @@ namespace Lucene.Net.Search.Grouping.Terms
 	 */
 
     /// <summary>
-    /// A term based implementation of <see cref="AbstractDistinctValuesCollector{TermDistinctValuesCollector.GroupCount}"/> that relies
+    /// A term based implementation of <see cref="T:AbstractDistinctValuesCollector{TermDistinctValuesCollector.GroupCount}"/> that relies
     /// on <see cref="SortedDocValues"/> to count the distinct values per group.
     /// 
     /// @lucene.experimental

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1b9fe40d/src/Lucene.Net.Grouping/Term/TermFirstPassGroupingCollector.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Grouping/Term/TermFirstPassGroupingCollector.cs b/src/Lucene.Net.Grouping/Term/TermFirstPassGroupingCollector.cs
index afefb78..f2e31a0 100644
--- a/src/Lucene.Net.Grouping/Term/TermFirstPassGroupingCollector.cs
+++ b/src/Lucene.Net.Grouping/Term/TermFirstPassGroupingCollector.cs
@@ -54,7 +54,7 @@ namespace Lucene.Net.Search.Grouping.Terms
         /// <param name="topNGroups">
         /// How many top groups to keep.
         /// </param>
-        /// <exception cref="IOException">When I/O related errors occur</exception>
+        /// <exception cref="System.IO.IOException">When I/O related errors occur</exception>
         public TermFirstPassGroupingCollector(string groupField, Sort groupSort, int topNGroups)
             : base(groupSort, topNGroups)
         {


[08/27] lucenenet git commit: Lucene.Net.Join: fix documentation comment formatting problems

Posted by ni...@apache.org.
Lucene.Net.Join: fix documentation comment formatting problems


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/bbac4318
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/bbac4318
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/bbac4318

Branch: refs/heads/api-work
Commit: bbac43182e6b3338a5be59f6096df725d6c10378
Parents: 2721759
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Feb 5 12:32:37 2017 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Sun Feb 5 14:47:50 2017 +0700

----------------------------------------------------------------------
 src/Lucene.Net.Join/TermsCollector.cs             | 2 +-
 src/Lucene.Net.Join/TermsWithScoreCollector.cs    | 3 ++-
 src/Lucene.Net.Join/ToParentBlockJoinCollector.cs | 2 +-
 src/Lucene.Net.Join/ToParentBlockJoinQuery.cs     | 3 +--
 4 files changed, 5 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/bbac4318/src/Lucene.Net.Join/TermsCollector.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Join/TermsCollector.cs b/src/Lucene.Net.Join/TermsCollector.cs
index 8549465..449b128 100644
--- a/src/Lucene.Net.Join/TermsCollector.cs
+++ b/src/Lucene.Net.Join/TermsCollector.cs
@@ -70,7 +70,7 @@ namespace Lucene.Net.Join
         /// Called before collecting from each <see cref="AtomicReaderContext"/>. All doc ids in
         /// <see cref="Collect(int)"/> will correspond to <see cref="Index.IndexReaderContext.Reader"/>.
         ///
-        /// Add <see cref="AtomicReaderContext#docBase"/> to the current <see cref="Index.IndexReaderContext.Reader"/>'s
+        /// Add <see cref="AtomicReaderContext.DocBase"/> to the current <see cref="Index.IndexReaderContext.Reader"/>'s
         /// internal document id to re-base ids in <see cref="Collect(int)"/>.
         /// </summary>
         /// <param name="context">next atomic reader context </param>

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/bbac4318/src/Lucene.Net.Join/TermsWithScoreCollector.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Join/TermsWithScoreCollector.cs b/src/Lucene.Net.Join/TermsWithScoreCollector.cs
index be2c943..d2581f9 100644
--- a/src/Lucene.Net.Join/TermsWithScoreCollector.cs
+++ b/src/Lucene.Net.Join/TermsWithScoreCollector.cs
@@ -85,7 +85,7 @@ namespace Lucene.Net.Join
         /// Called before collecting from each <see cref="AtomicReaderContext"/>. All doc ids in
         /// <see cref="Collect(int)"/> will correspond to <see cref="Index.IndexReaderContext.Reader"/>.
         ///
-        /// Add <see cref="AtomicReaderContext#docBase"/> to the current <see cref="Index.IndexReaderContext.Reader"/>'s
+        /// Add <see cref="AtomicReaderContext.DocBase"/> to the current <see cref="Index.IndexReaderContext.Reader"/>'s
         /// internal document id to re-base ids in <see cref="Collect(int)"/>.
         /// </summary>
         /// <param name="context">next atomic reader context </param>
@@ -102,6 +102,7 @@ namespace Lucene.Net.Join
         /// </summary>
         /// <param name="field">The field to collect terms for.</param>
         /// <param name="multipleValuesPerDocument">Whether the field to collect terms for has multiple values per document.</param>
+        /// <param name="scoreMode">See <see cref="ScoreMode"/></param>
         /// <returns>A <see cref="TermsWithScoreCollector"/> instance</returns>
         internal static TermsWithScoreCollector Create(string field, bool multipleValuesPerDocument, ScoreMode scoreMode)
         {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/bbac4318/src/Lucene.Net.Join/ToParentBlockJoinCollector.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Join/ToParentBlockJoinCollector.cs b/src/Lucene.Net.Join/ToParentBlockJoinCollector.cs
index 77a38cf..46847de 100644
--- a/src/Lucene.Net.Join/ToParentBlockJoinCollector.cs
+++ b/src/Lucene.Net.Join/ToParentBlockJoinCollector.cs
@@ -68,7 +68,7 @@ namespace Lucene.Net.Join
     /// join.  The <see cref="TopGroups{T}"/> of the nested joins will not be
     /// correct.</para>
     /// 
-    /// See <see cref="org.apache.lucene.search.join"/> for a code
+    /// See <a href="http://lucene.apache.org/core/4_8_0/join/">http://lucene.apache.org/core/4_8_0/join/</a> for a code
     /// sample.
     /// 
     /// @lucene.experimental

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/bbac4318/src/Lucene.Net.Join/ToParentBlockJoinQuery.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Join/ToParentBlockJoinQuery.cs b/src/Lucene.Net.Join/ToParentBlockJoinQuery.cs
index 44485d9..3dea9b6 100644
--- a/src/Lucene.Net.Join/ToParentBlockJoinQuery.cs
+++ b/src/Lucene.Net.Join/ToParentBlockJoinQuery.cs
@@ -61,14 +61,13 @@ namespace Lucene.Net.Join
     /// from <see cref="ToParentBlockJoinCollector"/> will not contain every
     /// child for parents that had matched.</para>
     /// 
-    /// <para>See <see cref="org.apache.lucene.search.join"/> for an
+    /// <para>See <a href="http://lucene.apache.org/core/4_8_0/join/">http://lucene.apache.org/core/4_8_0/join/</a> for an
     /// overview. </para>
     /// 
     /// @lucene.experimental
     /// </summary>
     public class ToParentBlockJoinQuery : Query
     {
-
         private readonly Filter _parentsFilter;
         private readonly Query _childQuery;
 


[16/27] lucenenet git commit: Lucene.Net (Index.DocsAndPositionsEnum + Codecs.Lucene3xFields) refactor: Payload > GetPayload() (makes a conversion)

Posted by ni...@apache.org.
Lucene.Net (Index.DocsAndPositionsEnum + Codecs.Lucene3xFields) refactor: Payload > GetPayload() (makes a conversion)


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/1ace7809
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/1ace7809
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/1ace7809

Branch: refs/heads/api-work
Commit: 1ace780993cfa2cff19dde5319bea86636ca7a62
Parents: bcc0d17
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Feb 5 14:35:48 2017 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Sun Feb 5 14:47:56 2017 +0700

----------------------------------------------------------------------
 .../Memory/DirectPostingsFormat.cs              | 52 +++++++++----------
 .../Memory/MemoryPostingsFormat.cs              |  7 +--
 .../Pulsing/PulsingPostingsReader.cs            | 37 +++++++-------
 src/Lucene.Net.Codecs/Sep/SepPostingsReader.cs  | 53 +++++++++-----------
 .../SimpleText/SimpleTextFieldsReader.cs        |  4 +-
 .../SimpleText/SimpleTextTermVectorsReader.cs   |  4 +-
 .../Compressing/CompressingTermVectorsReader.cs | 19 +++----
 .../Codecs/Lucene3x/Lucene3xFields.cs           |  7 +--
 .../Lucene3x/Lucene3xTermVectorsReader.cs       |  7 +--
 .../Codecs/Lucene3x/SegmentTermPositions.cs     | 37 +++++++-------
 .../Codecs/Lucene40/Lucene40PostingsReader.cs   | 50 ++++++++----------
 .../Lucene40/Lucene40TermVectorsReader.cs       | 29 +++++------
 .../Codecs/Lucene41/Lucene41PostingsReader.cs   | 30 +++++------
 .../Codecs/MappingMultiDocsAndPositionsEnum.cs  |  7 +--
 src/Lucene.Net.Core/Codecs/PostingsConsumer.cs  |  4 +-
 src/Lucene.Net.Core/Codecs/TermVectorsWriter.cs |  2 +-
 src/Lucene.Net.Core/Index/CheckIndex.cs         | 12 ++---
 .../Index/DocsAndPositionsEnum.cs               |  2 +-
 src/Lucene.Net.Core/Index/FilterAtomicReader.cs |  7 +--
 .../Index/MultiDocsAndPositionsEnum.cs          |  7 +--
 src/Lucene.Net.Core/Search/MultiPhraseQuery.cs  |  7 +--
 .../Search/Payloads/PayloadTermQuery.cs         |  2 +-
 src/Lucene.Net.Core/Search/Spans/TermSpans.cs   |  4 +-
 .../Highlight/TokenSources.cs                   |  2 +-
 .../TokenStreamFromTermPositionVector.cs        |  2 +-
 .../PostingsHighlight/MultiTermHighlighting.cs  | 11 ++--
 .../PostingsHighlight/PostingsHighlighter.cs    |  6 +--
 .../MemoryIndex.MemoryIndexReader.cs            |  7 +--
 .../Index/Sorter/SortingAtomicReader.cs         |  9 ++--
 .../Codecs/ramonly/RAMOnlyPostingsFormat.cs     | 17 +++----
 .../Index/AssertingAtomicReader.cs              | 19 +++----
 .../Index/BasePostingsFormatTestCase.cs         | 17 +++----
 .../Index/BaseTermVectorsFormatTestCase.cs      |  2 +-
 .../Util/LuceneTestCase.cs                      |  4 +-
 .../Index/Memory/MemoryIndexTest.cs             |  2 +-
 .../Index/Sorter/SorterTestBase.cs              |  4 +-
 src/Lucene.Net.Tests/core/Index/TestCodecs.cs   |  6 +--
 .../core/Index/TestDocumentWriter.cs            |  6 +--
 .../core/Index/TestLongPostings.cs              |  8 +--
 .../core/Index/TestMultiLevelSkipList.cs        |  2 +-
 src/Lucene.Net.Tests/core/Index/TestPayloads.cs | 22 ++++----
 .../core/Index/TestPayloadsOnVectors.cs         |  4 +-
 .../core/Index/TestPostingsOffsets.cs           |  8 +--
 43 files changed, 238 insertions(+), 310 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1ace7809/src/Lucene.Net.Codecs/Memory/DirectPostingsFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Codecs/Memory/DirectPostingsFormat.cs b/src/Lucene.Net.Codecs/Memory/DirectPostingsFormat.cs
index 43c1ba3..063d2f2 100644
--- a/src/Lucene.Net.Codecs/Memory/DirectPostingsFormat.cs
+++ b/src/Lucene.Net.Codecs/Memory/DirectPostingsFormat.cs
@@ -478,7 +478,7 @@ namespace Lucene.Net.Codecs.Memory
                                         }
                                         if (hasPayloads_Renamed)
                                         {
-                                            BytesRef payload = docsAndPositionsEnum.Payload;
+                                            BytesRef payload = docsAndPositionsEnum.GetPayload();
                                             if (payload != null)
                                             {
                                                 scratch.Add(payload.Length);
@@ -578,7 +578,7 @@ namespace Lucene.Net.Codecs.Memory
                                         positions[upto][posUpto] = docsAndPositionsEnum.NextPosition();
                                         if (hasPayloads_Renamed)
                                         {
-                                            BytesRef payload = docsAndPositionsEnum.Payload;
+                                            BytesRef payload = docsAndPositionsEnum.GetPayload();
                                             if (payload != null)
                                             {
                                                 var payloadBytes = new byte[payload.Length];
@@ -2288,21 +2288,18 @@ namespace Lucene.Net.Codecs.Memory
                 return SlowAdvance(target);
             }
 
-            public override BytesRef Payload
+            public override BytesRef GetPayload()
             {
-                get
+                if (payloadLength > 0)
                 {
-                    if (payloadLength > 0)
-                    {
-                        payload.Bytes = payloadBytes;
-                        payload.Offset = lastPayloadOffset;
-                        payload.Length = payloadLength;
-                        return payload;
-                    }
-                    else
-                    {
-                        return null;
-                    }
+                    payload.Bytes = payloadBytes;
+                    payload.Offset = lastPayloadOffset;
+                    payload.Length = payloadLength;
+                    return payload;
+                }
+                else
+                {
+                    return null;
                 }
             }
 
@@ -2729,23 +2726,20 @@ namespace Lucene.Net.Codecs.Memory
 
             private readonly BytesRef _payload = new BytesRef();
 
-            public override BytesRef Payload
+            public override BytesRef GetPayload()
             {
-                get
-                {
-                    if (payloads == null)
-                        return null;
+                if (payloads == null)
+                    return null;
                 
-                    var payloadBytes = payloads[upto][posUpto/(hasOffsets ? 3 : 1)];
-                    if (payloadBytes == null)
-                    {
-                        return null;
-                    }
-                    _payload.Bytes = payloadBytes;
-                    _payload.Length = payloadBytes.Length;
-                    _payload.Offset = 0;
-                    return _payload;
+                var payloadBytes = payloads[upto][posUpto/(hasOffsets ? 3 : 1)];
+                if (payloadBytes == null)
+                {
+                    return null;
                 }
+                _payload.Bytes = payloadBytes;
+                _payload.Length = payloadBytes.Length;
+                _payload.Offset = 0;
+                return _payload;
             }
 
             public override long GetCost()

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1ace7809/src/Lucene.Net.Codecs/Memory/MemoryPostingsFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Codecs/Memory/MemoryPostingsFormat.cs b/src/Lucene.Net.Codecs/Memory/MemoryPostingsFormat.cs
index 1052816..3fda936 100644
--- a/src/Lucene.Net.Codecs/Memory/MemoryPostingsFormat.cs
+++ b/src/Lucene.Net.Codecs/Memory/MemoryPostingsFormat.cs
@@ -713,12 +713,9 @@ namespace Lucene.Net.Codecs.Memory
                 get { return startOffset_Renamed + offsetLength; }
             }
 
-            public override BytesRef Payload
+            public override BytesRef GetPayload()
             {
-                get
-                {
-                    return payload.Length > 0 ? payload : null;
-                }
+                return payload.Length > 0 ? payload : null;
             }
 
             public override int DocID

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1ace7809/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsReader.cs b/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsReader.cs
index 38f477b..3592de9 100644
--- a/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsReader.cs
+++ b/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsReader.cs
@@ -608,31 +608,28 @@ namespace Lucene.Net.Codecs.Pulsing
                 }
             }
 
-            public override BytesRef Payload
+            public override BytesRef GetPayload()
             {
-                get
-                {
-                    if (_payloadRetrieved)
-                        return _payload;
+                if (_payloadRetrieved)
+                    return _payload;
 
-                    if (_storePayloads && _payloadLength > 0)
+                if (_storePayloads && _payloadLength > 0)
+                {
+                    _payloadRetrieved = true;
+                    if (_payload == null)
                     {
-                        _payloadRetrieved = true;
-                        if (_payload == null)
-                        {
-                            _payload = new BytesRef(_payloadLength);
-                        }
-                        else
-                        {
-                            _payload.Grow(_payloadLength);
-                        }
-                        _postings.ReadBytes(_payload.Bytes, 0, _payloadLength);
-                        _payload.Length = _payloadLength;
-                        return _payload;
+                        _payload = new BytesRef(_payloadLength);
                     }
-
-                    return null;
+                    else
+                    {
+                        _payload.Grow(_payloadLength);
+                    }
+                    _postings.ReadBytes(_payload.Bytes, 0, _payloadLength);
+                    _payload.Length = _payloadLength;
+                    return _payload;
                 }
+
+                return null;
             }
 
             public override long GetCost()

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1ace7809/src/Lucene.Net.Codecs/Sep/SepPostingsReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Codecs/Sep/SepPostingsReader.cs b/src/Lucene.Net.Codecs/Sep/SepPostingsReader.cs
index 02abf66..9226c2e 100644
--- a/src/Lucene.Net.Codecs/Sep/SepPostingsReader.cs
+++ b/src/Lucene.Net.Codecs/Sep/SepPostingsReader.cs
@@ -714,41 +714,38 @@ namespace Lucene.Net.Codecs.Sep
 
             private BytesRef _payload;
 
-            public override BytesRef Payload
+            public override BytesRef GetPayload()
             {
-                get
+                if (!_payloadPending)
                 {
-                    if (!_payloadPending)
-                    {
-                        return null;
-                    }
-
-                    if (_pendingPayloadBytes == 0)
-                    {
-                        return _payload;
-                    }
+                    return null;
+                }
 
-                    Debug.Assert(_pendingPayloadBytes >= _payloadLength);
+                if (_pendingPayloadBytes == 0)
+                {
+                    return _payload;
+                }
 
-                    if (_pendingPayloadBytes > _payloadLength)
-                    {
-                        _payloadIn.Seek(_payloadIn.FilePointer + (_pendingPayloadBytes - _payloadLength));
-                    }
+                Debug.Assert(_pendingPayloadBytes >= _payloadLength);
 
-                    if (_payload == null)
-                    {
-                        _payload = new BytesRef {Bytes = new byte[_payloadLength]};
-                    }
-                    else if (_payload.Bytes.Length < _payloadLength)
-                    {
-                        _payload.Grow(_payloadLength);
-                    }
+                if (_pendingPayloadBytes > _payloadLength)
+                {
+                    _payloadIn.Seek(_payloadIn.FilePointer + (_pendingPayloadBytes - _payloadLength));
+                }
 
-                    _payloadIn.ReadBytes(_payload.Bytes, 0, _payloadLength);
-                    _payload.Length = _payloadLength;
-                    _pendingPayloadBytes = 0;
-                    return _payload;
+                if (_payload == null)
+                {
+                    _payload = new BytesRef {Bytes = new byte[_payloadLength]};
+                }
+                else if (_payload.Bytes.Length < _payloadLength)
+                {
+                    _payload.Grow(_payloadLength);
                 }
+
+                _payloadIn.ReadBytes(_payload.Bytes, 0, _payloadLength);
+                _payload.Length = _payloadLength;
+                _pendingPayloadBytes = 0;
+                return _payload;
             }
 
             public override long GetCost()

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1ace7809/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldsReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldsReader.cs b/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldsReader.cs
index 83047a4..5c2398c 100644
--- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldsReader.cs
+++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldsReader.cs
@@ -547,9 +547,9 @@ namespace Lucene.Net.Codecs.SimpleText
                 get { return _endOffset; }
             }
 
-            public override BytesRef Payload
+            public override BytesRef GetPayload()
             {
-                get { return _payload; }
+                return _payload;
             }
 
             public override long GetCost()

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1ace7809/src/Lucene.Net.Codecs/SimpleText/SimpleTextTermVectorsReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextTermVectorsReader.cs b/src/Lucene.Net.Codecs/SimpleText/SimpleTextTermVectorsReader.cs
index a6d5f5b..1fd43bd 100644
--- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextTermVectorsReader.cs
+++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextTermVectorsReader.cs
@@ -575,9 +575,9 @@ namespace Lucene.Net.Codecs.SimpleText
                 _nextPos = 0;
             }
 
-            public override BytesRef Payload
+            public override BytesRef GetPayload()
             {
-                get { return _payloads == null ? null : _payloads[_nextPos - 1]; }
+                return _payloads == null ? null : _payloads[_nextPos - 1];
             }
 
             public override int NextPosition()

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1ace7809/src/Lucene.Net.Core/Codecs/Compressing/CompressingTermVectorsReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Codecs/Compressing/CompressingTermVectorsReader.cs b/src/Lucene.Net.Core/Codecs/Compressing/CompressingTermVectorsReader.cs
index 2257ecb..1abbb55 100644
--- a/src/Lucene.Net.Core/Codecs/Compressing/CompressingTermVectorsReader.cs
+++ b/src/Lucene.Net.Core/Codecs/Compressing/CompressingTermVectorsReader.cs
@@ -1149,19 +1149,16 @@ namespace Lucene.Net.Codecs.Compressing
                 }
             }
 
-            public override BytesRef Payload
+            public override BytesRef GetPayload()
             {
-                get
+                CheckPosition();
+                if (payloadIndex == null || payload.Length == 0)
                 {
-                    CheckPosition();
-                    if (payloadIndex == null || payload.Length == 0)
-                    {
-                        return null;
-                    }
-                    else
-                    {
-                        return payload;
-                    }
+                    return null;
+                }
+                else
+                {
+                    return payload;
                 }
             }
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1ace7809/src/Lucene.Net.Core/Codecs/Lucene3x/Lucene3xFields.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Codecs/Lucene3x/Lucene3xFields.cs b/src/Lucene.Net.Core/Codecs/Lucene3x/Lucene3xFields.cs
index 520d2cf..57aab75 100644
--- a/src/Lucene.Net.Core/Codecs/Lucene3x/Lucene3xFields.cs
+++ b/src/Lucene.Net.Core/Codecs/Lucene3x/Lucene3xFields.cs
@@ -1268,12 +1268,9 @@ namespace Lucene.Net.Codecs.Lucene3x
                 get { return -1; }
             }
 
-            public override BytesRef Payload
+            public override BytesRef GetPayload()
             {
-                get
-                {
-                    return pos.Payload;
-                }
+                return pos.GetPayload();
             }
 
             public override long GetCost()

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1ace7809/src/Lucene.Net.Core/Codecs/Lucene3x/Lucene3xTermVectorsReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Codecs/Lucene3x/Lucene3xTermVectorsReader.cs b/src/Lucene.Net.Core/Codecs/Lucene3x/Lucene3xTermVectorsReader.cs
index 5161e5e..75331bb 100644
--- a/src/Lucene.Net.Core/Codecs/Lucene3x/Lucene3xTermVectorsReader.cs
+++ b/src/Lucene.Net.Core/Codecs/Lucene3x/Lucene3xTermVectorsReader.cs
@@ -808,12 +808,9 @@ namespace Lucene.Net.Codecs.Lucene3x
                 nextPos = 0;
             }
 
-            public override BytesRef Payload
+            public override BytesRef GetPayload()
             {
-                get
-                {
-                    return null;
-                }
+                return null;
             }
 
             public override int NextPosition()

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1ace7809/src/Lucene.Net.Core/Codecs/Lucene3x/SegmentTermPositions.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Codecs/Lucene3x/SegmentTermPositions.cs b/src/Lucene.Net.Core/Codecs/Lucene3x/SegmentTermPositions.cs
index bd68abc..95893dc 100644
--- a/src/Lucene.Net.Core/Codecs/Lucene3x/SegmentTermPositions.cs
+++ b/src/Lucene.Net.Core/Codecs/Lucene3x/SegmentTermPositions.cs
@@ -228,33 +228,30 @@ namespace Lucene.Net.Codecs.Lucene3x
             }
         }
 
-        public BytesRef Payload
+        public BytesRef GetPayload()
         {
-            get
+            if (payloadLength <= 0)
+            {
+                return null; // no payload
+            }
+
+            if (needToLoadPayload)
             {
-                if (payloadLength <= 0)
+                // read payloads lazily
+                if (payload == null)
                 {
-                    return null; // no payload
+                    payload = new BytesRef(payloadLength);
                 }
-
-                if (needToLoadPayload)
+                else
                 {
-                    // read payloads lazily
-                    if (payload == null)
-                    {
-                        payload = new BytesRef(payloadLength);
-                    }
-                    else
-                    {
-                        payload.Grow(payloadLength);
-                    }
-
-                    proxStream.ReadBytes(payload.Bytes, payload.Offset, payloadLength);
-                    payload.Length = payloadLength;
-                    needToLoadPayload = false;
+                    payload.Grow(payloadLength);
                 }
-                return payload;
+
+                proxStream.ReadBytes(payload.Bytes, payload.Offset, payloadLength);
+                payload.Length = payloadLength;
+                needToLoadPayload = false;
             }
+            return payload;
         }
 
         public bool IsPayloadAvailable

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1ace7809/src/Lucene.Net.Core/Codecs/Lucene40/Lucene40PostingsReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Codecs/Lucene40/Lucene40PostingsReader.cs b/src/Lucene.Net.Core/Codecs/Lucene40/Lucene40PostingsReader.cs
index 7244618..abb8876 100644
--- a/src/Lucene.Net.Core/Codecs/Lucene40/Lucene40PostingsReader.cs
+++ b/src/Lucene.Net.Core/Codecs/Lucene40/Lucene40PostingsReader.cs
@@ -960,12 +960,9 @@ namespace Lucene.Net.Codecs.Lucene40
             /// Returns the payload at this position, or null if no
             ///  payload was indexed.
             /// </summary>
-            public override BytesRef Payload
+            public override BytesRef GetPayload()
             {
-                get
-                {
-                    return null;
-                }
+                return null;
             }
 
             public override long GetCost()
@@ -1268,37 +1265,34 @@ namespace Lucene.Net.Codecs.Lucene40
             /// Returns the payload at this position, or null if no
             ///  payload was indexed.
             /// </summary>
-            public override BytesRef Payload
+            public override BytesRef GetPayload()
             {
-                get
+                if (storePayloads)
                 {
-                    if (storePayloads)
+                    if (payloadLength <= 0)
                     {
-                        if (payloadLength <= 0)
-                        {
-                            return null;
-                        }
-                        Debug.Assert(lazyProxPointer == -1);
-                        Debug.Assert(posPendingCount < freq);
+                        return null;
+                    }
+                    Debug.Assert(lazyProxPointer == -1);
+                    Debug.Assert(posPendingCount < freq);
 
-                        if (payloadPending)
+                    if (payloadPending)
+                    {
+                        if (payloadLength > payload.Bytes.Length)
                         {
-                            if (payloadLength > payload.Bytes.Length)
-                            {
-                                payload.Grow(payloadLength);
-                            }
-
-                            proxIn.ReadBytes(payload.Bytes, 0, payloadLength);
-                            payload.Length = payloadLength;
-                            payloadPending = false;
+                            payload.Grow(payloadLength);
                         }
 
-                        return payload;
-                    }
-                    else
-                    {
-                        return null;
+                        proxIn.ReadBytes(payload.Bytes, 0, payloadLength);
+                        payload.Length = payloadLength;
+                        payloadPending = false;
                     }
+
+                    return payload;
+                }
+                else
+                {
+                    return null;
                 }
             }
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1ace7809/src/Lucene.Net.Core/Codecs/Lucene40/Lucene40TermVectorsReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Codecs/Lucene40/Lucene40TermVectorsReader.cs b/src/Lucene.Net.Core/Codecs/Lucene40/Lucene40TermVectorsReader.cs
index 2e08e14..d75ffef 100644
--- a/src/Lucene.Net.Core/Codecs/Lucene40/Lucene40TermVectorsReader.cs
+++ b/src/Lucene.Net.Core/Codecs/Lucene40/Lucene40TermVectorsReader.cs
@@ -783,27 +783,24 @@ namespace Lucene.Net.Codecs.Lucene40
                 nextPos = 0;
             }
 
-            public override BytesRef Payload
+            public override BytesRef GetPayload()
             {
-                get
+                if (payloadOffsets == null)
+                {
+                    return null;
+                }
+                else
                 {
-                    if (payloadOffsets == null)
+                    int off = payloadOffsets[nextPos - 1];
+                    int end = nextPos == payloadOffsets.Length ? payloadBytes.Length : payloadOffsets[nextPos];
+                    if (end - off == 0)
                     {
                         return null;
                     }
-                    else
-                    {
-                        int off = payloadOffsets[nextPos - 1];
-                        int end = nextPos == payloadOffsets.Length ? payloadBytes.Length : payloadOffsets[nextPos];
-                        if (end - off == 0)
-                        {
-                            return null;
-                        }
-                        payload.Bytes = payloadBytes;
-                        payload.Offset = off;
-                        payload.Length = end - off;
-                        return payload;
-                    }
+                    payload.Bytes = payloadBytes;
+                    payload.Offset = off;
+                    payload.Length = end - off;
+                    return payload;
                 }
             }
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1ace7809/src/Lucene.Net.Core/Codecs/Lucene41/Lucene41PostingsReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Codecs/Lucene41/Lucene41PostingsReader.cs b/src/Lucene.Net.Core/Codecs/Lucene41/Lucene41PostingsReader.cs
index f848ff5..9bb4f4a 100644
--- a/src/Lucene.Net.Core/Codecs/Lucene41/Lucene41PostingsReader.cs
+++ b/src/Lucene.Net.Core/Codecs/Lucene41/Lucene41PostingsReader.cs
@@ -1046,12 +1046,9 @@ namespace Lucene.Net.Codecs.Lucene41
                 get { return -1; }
             }
 
-            public override BytesRef Payload
+            public override BytesRef GetPayload()
             {
-                get
-                {
-                    return null;
-                }
+                return null;
             }
 
             public override long GetCost()
@@ -1697,21 +1694,18 @@ namespace Lucene.Net.Codecs.Lucene41
                 get { return endOffset; }
             }
 
-            public override BytesRef Payload
+            public override BytesRef GetPayload()
             {
-                get
+                // if (DEBUG) {
+                //   System.out.println("    FPR.getPayload payloadLength=" + payloadLength + " payloadByteUpto=" + payloadByteUpto);
+                // }
+                if (payloadLength == 0)
                 {
-                    // if (DEBUG) {
-                    //   System.out.println("    FPR.getPayload payloadLength=" + payloadLength + " payloadByteUpto=" + payloadByteUpto);
-                    // }
-                    if (payloadLength == 0)
-                    {
-                        return null;
-                    }
-                    else
-                    {
-                        return payload;
-                    }
+                    return null;
+                }
+                else
+                {
+                    return payload;
                 }
             }
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1ace7809/src/Lucene.Net.Core/Codecs/MappingMultiDocsAndPositionsEnum.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Codecs/MappingMultiDocsAndPositionsEnum.cs b/src/Lucene.Net.Core/Codecs/MappingMultiDocsAndPositionsEnum.cs
index 26f8edd..15daee5 100644
--- a/src/Lucene.Net.Core/Codecs/MappingMultiDocsAndPositionsEnum.cs
+++ b/src/Lucene.Net.Core/Codecs/MappingMultiDocsAndPositionsEnum.cs
@@ -161,12 +161,9 @@ namespace Lucene.Net.Codecs
             get { return current.EndOffset; }
         }
 
-        public override BytesRef Payload
+        public override BytesRef GetPayload()
         {
-            get
-            {
-                return current.Payload;
-            }
+            return current.GetPayload();
         }
 
         public override long GetCost()

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1ace7809/src/Lucene.Net.Core/Codecs/PostingsConsumer.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Codecs/PostingsConsumer.cs b/src/Lucene.Net.Core/Codecs/PostingsConsumer.cs
index 4f52d83..3fc91e1 100644
--- a/src/Lucene.Net.Core/Codecs/PostingsConsumer.cs
+++ b/src/Lucene.Net.Core/Codecs/PostingsConsumer.cs
@@ -139,7 +139,7 @@ namespace Lucene.Net.Codecs
                     for (int i = 0; i < freq; i++)
                     {
                         int position = postingsEnum.NextPosition();
-                        BytesRef payload = postingsEnum.Payload;
+                        BytesRef payload = postingsEnum.GetPayload();
                         this.AddPosition(position, payload, -1, -1);
                     }
                     this.FinishDoc();
@@ -164,7 +164,7 @@ namespace Lucene.Net.Codecs
                     for (int i = 0; i < freq; i++)
                     {
                         int position = postingsEnum.NextPosition();
-                        BytesRef payload = postingsEnum.Payload;
+                        BytesRef payload = postingsEnum.GetPayload();
                         this.AddPosition(position, payload, postingsEnum.StartOffset, postingsEnum.EndOffset);
                     }
                     this.FinishDoc();

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1ace7809/src/Lucene.Net.Core/Codecs/TermVectorsWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Codecs/TermVectorsWriter.cs b/src/Lucene.Net.Core/Codecs/TermVectorsWriter.cs
index ea1c1df..e8cade2 100644
--- a/src/Lucene.Net.Core/Codecs/TermVectorsWriter.cs
+++ b/src/Lucene.Net.Core/Codecs/TermVectorsWriter.cs
@@ -333,7 +333,7 @@ namespace Lucene.Net.Codecs
                             int startOffset = docsAndPositionsEnum.StartOffset;
                             int endOffset = docsAndPositionsEnum.EndOffset;
 
-                            BytesRef payload = docsAndPositionsEnum.Payload;
+                            BytesRef payload = docsAndPositionsEnum.GetPayload();
 
                             Debug.Assert(!hasPositions || pos >= 0);
                             AddPosition(pos, startOffset, endOffset, payload);

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1ace7809/src/Lucene.Net.Core/Index/CheckIndex.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Index/CheckIndex.cs b/src/Lucene.Net.Core/Index/CheckIndex.cs
index 2f3cd0c..fd84f7d 100644
--- a/src/Lucene.Net.Core/Index/CheckIndex.cs
+++ b/src/Lucene.Net.Core/Index/CheckIndex.cs
@@ -1236,7 +1236,7 @@ namespace Lucene.Net.Index
                                     throw new Exception("term " + term + ": doc " + doc + ": pos " + pos + " < lastPos " + lastPos);
                                 }
                                 lastPos = pos;
-                                BytesRef payload = postings.Payload;
+                                BytesRef payload = postings.GetPayload();
                                 if (payload != null)
                                 {
                                     Debug.Assert(payload.IsValid());
@@ -2264,7 +2264,7 @@ namespace Lucene.Net.Index
                                                     }
                                                 }
 
-                                                BytesRef payload = postings.Payload;
+                                                BytesRef payload = postings.GetPayload();
 
                                                 if (payload != null)
                                                 {
@@ -2279,20 +2279,20 @@ namespace Lucene.Net.Index
                                                     {
                                                         // we have payloads, but not at this position.
                                                         // postings has payloads too, it should not have one at this position
-                                                        if (postingsPostings.Payload != null)
+                                                        if (postingsPostings.GetPayload() != null)
                                                         {
-                                                            throw new Exception("vector term=" + term + " field=" + field + " doc=" + j + " has no payload but postings does: " + postingsPostings.Payload);
+                                                            throw new Exception("vector term=" + term + " field=" + field + " doc=" + j + " has no payload but postings does: " + postingsPostings.GetPayload());
                                                         }
                                                     }
                                                     else
                                                     {
                                                         // we have payloads, and one at this position
                                                         // postings should also have one at this position, with the same bytes.
-                                                        if (postingsPostings.Payload == null)
+                                                        if (postingsPostings.GetPayload() == null)
                                                         {
                                                             throw new Exception("vector term=" + term + " field=" + field + " doc=" + j + " has payload=" + payload + " but postings does not.");
                                                         }
-                                                        BytesRef postingsPayload = postingsPostings.Payload;
+                                                        BytesRef postingsPayload = postingsPostings.GetPayload();
                                                         if (!payload.Equals(postingsPayload))
                                                         {
                                                             throw new Exception("vector term=" + term + " field=" + field + " doc=" + j + " has payload=" + payload + " but differs from postings payload=" + postingsPayload);

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1ace7809/src/Lucene.Net.Core/Index/DocsAndPositionsEnum.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Index/DocsAndPositionsEnum.cs b/src/Lucene.Net.Core/Index/DocsAndPositionsEnum.cs
index 5f89448..6decd0b 100644
--- a/src/Lucene.Net.Core/Index/DocsAndPositionsEnum.cs
+++ b/src/Lucene.Net.Core/Index/DocsAndPositionsEnum.cs
@@ -72,6 +72,6 @@ namespace Lucene.Net.Index
         ///  (neither members of the returned BytesRef nor bytes
         ///  in the byte[]).
         /// </summary>
-        public abstract BytesRef Payload { get; } // LUCENENET TODO: Change to GetPayload() ?
+        public abstract BytesRef GetPayload();
     }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1ace7809/src/Lucene.Net.Core/Index/FilterAtomicReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Index/FilterAtomicReader.cs b/src/Lucene.Net.Core/Index/FilterAtomicReader.cs
index 1fdef36..b700d1b 100644
--- a/src/Lucene.Net.Core/Index/FilterAtomicReader.cs
+++ b/src/Lucene.Net.Core/Index/FilterAtomicReader.cs
@@ -354,12 +354,9 @@ namespace Lucene.Net.Index
                 get { return m_input.EndOffset; }
             }
 
-            public override BytesRef Payload
+            public override BytesRef GetPayload()
             {
-                get
-                {
-                    return m_input.Payload;
-                }
+                return m_input.GetPayload();
             }
 
             public override long GetCost()

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1ace7809/src/Lucene.Net.Core/Index/MultiDocsAndPositionsEnum.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Index/MultiDocsAndPositionsEnum.cs b/src/Lucene.Net.Core/Index/MultiDocsAndPositionsEnum.cs
index c2dbc0d..a65ae42 100644
--- a/src/Lucene.Net.Core/Index/MultiDocsAndPositionsEnum.cs
+++ b/src/Lucene.Net.Core/Index/MultiDocsAndPositionsEnum.cs
@@ -193,12 +193,9 @@ namespace Lucene.Net.Index
             get { return current.EndOffset; }
         }
 
-        public override BytesRef Payload
+        public override BytesRef GetPayload()
         {
-            get
-            {
-                return current.Payload;
-            }
+            return current.GetPayload();
         }
 
         // TODO: implement bulk read more efficiently than super

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1ace7809/src/Lucene.Net.Core/Search/MultiPhraseQuery.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Search/MultiPhraseQuery.cs b/src/Lucene.Net.Core/Search/MultiPhraseQuery.cs
index c90feba..f893fe6 100644
--- a/src/Lucene.Net.Core/Search/MultiPhraseQuery.cs
+++ b/src/Lucene.Net.Core/Search/MultiPhraseQuery.cs
@@ -665,12 +665,9 @@ namespace Lucene.Net.Search
             get { return -1; }
         }
 
-        public override BytesRef Payload
+        public override BytesRef GetPayload()
         {
-            get
-            {
-                return null;
-            }
+            return null;
         }
 
         public override sealed int Advance(int target)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1ace7809/src/Lucene.Net.Core/Search/Payloads/PayloadTermQuery.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Search/Payloads/PayloadTermQuery.cs b/src/Lucene.Net.Core/Search/Payloads/PayloadTermQuery.cs
index b50df07..3f8f4c3 100644
--- a/src/Lucene.Net.Core/Search/Payloads/PayloadTermQuery.cs
+++ b/src/Lucene.Net.Core/Search/Payloads/PayloadTermQuery.cs
@@ -125,7 +125,7 @@ namespace Lucene.Net.Search.Payloads
                     if (termSpans.IsPayloadAvailable)
                     {
                         DocsAndPositionsEnum postings = termSpans.Postings;
-                        m_payload = postings.Payload;
+                        m_payload = postings.GetPayload();
                         if (m_payload != null)
                         {
                             m_payloadScore = outerInstance.outerInstance.m_function.CurrentScore(m_doc, outerInstance.outerInstance.Term.Field, m_spans.Start, m_spans.End, m_payloadsSeen, m_payloadScore, m_docScorer.ComputePayloadFactor(m_doc, m_spans.Start, m_spans.End, m_payload));

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1ace7809/src/Lucene.Net.Core/Search/Spans/TermSpans.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Search/Spans/TermSpans.cs b/src/Lucene.Net.Core/Search/Spans/TermSpans.cs
index 022e521..f72f7bb 100644
--- a/src/Lucene.Net.Core/Search/Spans/TermSpans.cs
+++ b/src/Lucene.Net.Core/Search/Spans/TermSpans.cs
@@ -114,7 +114,7 @@ namespace Lucene.Net.Search.Spans
         // TODO: Remove warning after API has been finalized
         public override ICollection<byte[]> GetPayload()
         {
-            var payload = m_postings.Payload;
+            var payload = m_postings.GetPayload();
             m_readPayload = true;
             byte[] bytes;
             if (payload != null)
@@ -136,7 +136,7 @@ namespace Lucene.Net.Search.Spans
         {
             get
             {
-                return m_readPayload == false && m_postings.Payload != null;
+                return m_readPayload == false && m_postings.GetPayload() != null;
             }
         }
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1ace7809/src/Lucene.Net.Highlighter/Highlight/TokenSources.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Highlighter/Highlight/TokenSources.cs b/src/Lucene.Net.Highlighter/Highlight/TokenSources.cs
index edfbb35..5800ce5 100644
--- a/src/Lucene.Net.Highlighter/Highlight/TokenSources.cs
+++ b/src/Lucene.Net.Highlighter/Highlight/TokenSources.cs
@@ -241,7 +241,7 @@ namespace Lucene.Net.Search.Highlight
                         // Must make a deep copy of the returned payload,
                         // since D&PEnum API is allowed to re-use on every
                         // call:
-                        token.Payload = BytesRef.DeepCopyOf(dpEnum.Payload);
+                        token.Payload = BytesRef.DeepCopyOf(dpEnum.GetPayload());
                     }
 
                     if (tokenPositionsGuaranteedContiguous && pos != -1)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1ace7809/src/Lucene.Net.Highlighter/Highlight/TokenStreamFromTermPositionVector.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Highlighter/Highlight/TokenStreamFromTermPositionVector.cs b/src/Lucene.Net.Highlighter/Highlight/TokenStreamFromTermPositionVector.cs
index fec5a35..cbc84e8 100644
--- a/src/Lucene.Net.Highlighter/Highlight/TokenStreamFromTermPositionVector.cs
+++ b/src/Lucene.Net.Highlighter/Highlight/TokenStreamFromTermPositionVector.cs
@@ -83,7 +83,7 @@ namespace Lucene.Net.Search.Highlight
                         // Must make a deep copy of the returned payload,
                         // since D&PEnum API is allowed to re-use on every
                         // call:
-                        token.Payload = BytesRef.DeepCopyOf(dpEnum.Payload);
+                        token.Payload = BytesRef.DeepCopyOf(dpEnum.GetPayload());
                     }
 
                     // Yes - this is the position, not the increment! This is for

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1ace7809/src/Lucene.Net.Highlighter/PostingsHighlight/MultiTermHighlighting.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Highlighter/PostingsHighlight/MultiTermHighlighting.cs b/src/Lucene.Net.Highlighter/PostingsHighlight/MultiTermHighlighting.cs
index d995c88..e5a5bcd 100644
--- a/src/Lucene.Net.Highlighter/PostingsHighlight/MultiTermHighlighting.cs
+++ b/src/Lucene.Net.Highlighter/PostingsHighlight/MultiTermHighlighting.cs
@@ -313,16 +313,13 @@ namespace Lucene.Net.Search.PostingsHighlight
             }
 
 
-            public override BytesRef Payload
+            public override BytesRef GetPayload()
             {
-                get
+                if (matchDescriptions[currentMatch] == null)
                 {
-                    if (matchDescriptions[currentMatch] == null)
-                    {
-                        matchDescriptions[currentMatch] = new BytesRef(matchers[currentMatch].ToString());
-                    }
-                    return matchDescriptions[currentMatch];
+                    matchDescriptions[currentMatch] = new BytesRef(matchers[currentMatch].ToString());
                 }
+                return matchDescriptions[currentMatch];
             }
 
             public override int DocID

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1ace7809/src/Lucene.Net.Highlighter/PostingsHighlight/PostingsHighlighter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Highlighter/PostingsHighlight/PostingsHighlighter.cs b/src/Lucene.Net.Highlighter/PostingsHighlight/PostingsHighlighter.cs
index 758a33a..a7db1eb 100644
--- a/src/Lucene.Net.Highlighter/PostingsHighlight/PostingsHighlighter.cs
+++ b/src/Lucene.Net.Highlighter/PostingsHighlight/PostingsHighlighter.cs
@@ -726,7 +726,7 @@ namespace Lucene.Net.Search.PostingsHighlight
                     if (term == null)
                     {
                         // multitermquery match, pull from payload
-                        term = off.dp.Payload;
+                        term = off.dp.GetPayload();
                         Debug.Assert(term != null);
                     }
                     current.AddMatch(start, end, term);
@@ -843,9 +843,9 @@ namespace Lucene.Net.Search.PostingsHighlight
                 get { return int.MaxValue; }
             }
 
-            public override BytesRef Payload
+            public override BytesRef GetPayload()
             {
-                get { return null; }
+                return null;
             }
 
             public override int Freq

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1ace7809/src/Lucene.Net.Memory/MemoryIndex.MemoryIndexReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Memory/MemoryIndex.MemoryIndexReader.cs b/src/Lucene.Net.Memory/MemoryIndex.MemoryIndexReader.cs
index b6ba603..3553d01 100644
--- a/src/Lucene.Net.Memory/MemoryIndex.MemoryIndexReader.cs
+++ b/src/Lucene.Net.Memory/MemoryIndex.MemoryIndexReader.cs
@@ -554,12 +554,9 @@ namespace Lucene.Net.Index.Memory
                     get { return endOffset_Renamed; }
                 }
 
-                public override BytesRef Payload
+                public override BytesRef GetPayload()
                 {
-                    get
-                    {
-                        return null;
-                    }
+                    return null;
                 }
 
                 public override long GetCost()

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1ace7809/src/Lucene.Net.Misc/Index/Sorter/SortingAtomicReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Misc/Index/Sorter/SortingAtomicReader.cs b/src/Lucene.Net.Misc/Index/Sorter/SortingAtomicReader.cs
index 7bce547..32e7783 100644
--- a/src/Lucene.Net.Misc/Index/Sorter/SortingAtomicReader.cs
+++ b/src/Lucene.Net.Misc/Index/Sorter/SortingAtomicReader.cs
@@ -688,7 +688,7 @@ namespace Lucene.Net.Index.Sorter
                 for (int i = 0; i < freq; i++)
                 {
                     int pos = @in.NextPosition();
-                    BytesRef payload = @in.Payload;
+                    BytesRef payload = @in.GetPayload();
                     // The low-order bit of token is set only if there is a payload, the
                     // previous bits are the delta-encoded position. 
                     int token = (pos - previousPosition) << 1 | (payload == null ? 0 : 1);
@@ -732,12 +732,9 @@ namespace Lucene.Net.Index.Sorter
                 get { return currFreq; }
             }
 
-            public override BytesRef Payload
+            public override BytesRef GetPayload()
             {
-                get
-                {
-                    return payload.Length == 0 ? null : payload;
-                }
+                return payload.Length == 0 ? null : payload;
             }
 
             public override int NextDoc()

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1ace7809/src/Lucene.Net.TestFramework/Codecs/ramonly/RAMOnlyPostingsFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/ramonly/RAMOnlyPostingsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/ramonly/RAMOnlyPostingsFormat.cs
index e0e0d20..01180d3 100644
--- a/src/Lucene.Net.TestFramework/Codecs/ramonly/RAMOnlyPostingsFormat.cs
+++ b/src/Lucene.Net.TestFramework/Codecs/ramonly/RAMOnlyPostingsFormat.cs
@@ -630,18 +630,15 @@ namespace Lucene.Net.Codecs.ramonly
                 get { return -1; }
             }
 
-            public override BytesRef Payload
+            public override BytesRef GetPayload()
             {
-                get
+                if (Current.Payloads != null && Current.Payloads[PosUpto - 1] != null)
                 {
-                    if (Current.Payloads != null && Current.Payloads[PosUpto - 1] != null)
-                    {
-                        return new BytesRef(Current.Payloads[PosUpto - 1]);
-                    }
-                    else
-                    {
-                        return null;
-                    }
+                    return new BytesRef(Current.Payloads[PosUpto - 1]);
+                }
+                else
+                {
+                    return null;
                 }
             }
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1ace7809/src/Lucene.Net.TestFramework/Index/AssertingAtomicReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Index/AssertingAtomicReader.cs b/src/Lucene.Net.TestFramework/Index/AssertingAtomicReader.cs
index 01cf168..6fcdc14 100644
--- a/src/Lucene.Net.TestFramework/Index/AssertingAtomicReader.cs
+++ b/src/Lucene.Net.TestFramework/Index/AssertingAtomicReader.cs
@@ -466,17 +466,14 @@ namespace Lucene.Net.Index
                 }
             }
 
-            public override BytesRef Payload
-            {
-                get
-                {
-                    Debug.Assert(State != DocsEnumState.START, "getPayload() called before nextDoc()/advance()");
-                    Debug.Assert(State != DocsEnumState.FINISHED, "getPayload() called after NO_MORE_DOCS");
-                    Debug.Assert(PositionCount > 0, "getPayload() called before nextPosition()!");
-                    BytesRef payload = base.Payload;
-                    Debug.Assert(payload == null || payload.IsValid() && payload.Length > 0, "getPayload() returned payload with invalid length!");
-                    return payload;
-                }
+            public override BytesRef GetPayload()
+            {
+                Debug.Assert(State != DocsEnumState.START, "getPayload() called before nextDoc()/advance()");
+                Debug.Assert(State != DocsEnumState.FINISHED, "getPayload() called after NO_MORE_DOCS");
+                Debug.Assert(PositionCount > 0, "getPayload() called before nextPosition()!");
+                BytesRef payload = base.GetPayload();
+                Debug.Assert(payload == null || payload.IsValid() && payload.Length > 0, "getPayload() returned payload with invalid length!");
+                return payload;
             }
         }
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1ace7809/src/Lucene.Net.TestFramework/Index/BasePostingsFormatTestCase.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Index/BasePostingsFormatTestCase.cs b/src/Lucene.Net.TestFramework/Index/BasePostingsFormatTestCase.cs
index c562b7f..4774c67 100644
--- a/src/Lucene.Net.TestFramework/Index/BasePostingsFormatTestCase.cs
+++ b/src/Lucene.Net.TestFramework/Index/BasePostingsFormatTestCase.cs
@@ -299,12 +299,9 @@ namespace Lucene.Net.Index
                 get { return EndOffset_Renamed; }
             }
 
-            public override BytesRef Payload
+            public override BytesRef GetPayload()
             {
-                get
-                {
-                    return Payload_Renamed.Length == 0 ? null : Payload_Renamed;
-                }
+                return Payload_Renamed.Length == 0 ? null : Payload_Renamed;
             }
 
             public override int Advance(int target)
@@ -601,7 +598,7 @@ namespace Lucene.Net.Index
                             for (int posUpto = 0; posUpto < freq; posUpto++)
                             {
                                 int pos = postings.NextPosition();
-                                BytesRef payload = postings.Payload;
+                                BytesRef payload = postings.GetPayload();
 
                                 if (VERBOSE)
                                 {
@@ -978,7 +975,7 @@ namespace Lucene.Net.Index
 
                         if (doCheckPayloads)
                         {
-                            BytesRef expectedPayload = expected.Payload;
+                            BytesRef expectedPayload = expected.GetPayload();
                             if (Random().NextDouble() <= payloadCheckChance)
                             {
                                 if (VERBOSE)
@@ -987,11 +984,11 @@ namespace Lucene.Net.Index
                                 }
                                 if (expectedPayload == null || expectedPayload.Length == 0)
                                 {
-                                    Assert.IsNull(docsAndPositionsEnum.Payload, "should not have payload");
+                                    Assert.IsNull(docsAndPositionsEnum.GetPayload(), "should not have payload");
                                 }
                                 else
                                 {
-                                    BytesRef payload = docsAndPositionsEnum.Payload;
+                                    BytesRef payload = docsAndPositionsEnum.GetPayload();
                                     Assert.IsNotNull(payload, "should have payload but doesn't");
 
                                     Assert.AreEqual(expectedPayload.Length, payload.Length, "payload length is wrong");
@@ -1002,7 +999,7 @@ namespace Lucene.Net.Index
 
                                     // make a deep copy
                                     payload = BytesRef.DeepCopyOf(payload);
-                                    Assert.AreEqual(payload, docsAndPositionsEnum.Payload, "2nd call to getPayload returns something different!");
+                                    Assert.AreEqual(payload, docsAndPositionsEnum.GetPayload(), "2nd call to getPayload returns something different!");
                                 }
                             }
                             else

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1ace7809/src/Lucene.Net.TestFramework/Index/BaseTermVectorsFormatTestCase.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Index/BaseTermVectorsFormatTestCase.cs b/src/Lucene.Net.TestFramework/Index/BaseTermVectorsFormatTestCase.cs
index e4200ce..b3822d2 100644
--- a/src/Lucene.Net.TestFramework/Index/BaseTermVectorsFormatTestCase.cs
+++ b/src/Lucene.Net.TestFramework/Index/BaseTermVectorsFormatTestCase.cs
@@ -621,7 +621,7 @@ namespace Lucene.Net.Index
                                 bool foundPayload = false;
                                 foreach (int index in indexes)
                                 {
-                                    if (tk.TermBytes[index].Equals(termsEnum.Term) && Equals(tk.Payloads[index], docsAndPositionsEnum.Payload))
+                                    if (tk.TermBytes[index].Equals(termsEnum.Term) && Equals(tk.Payloads[index], docsAndPositionsEnum.GetPayload()))
                                     {
                                         foundPayload = true;
                                         break;

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1ace7809/src/Lucene.Net.TestFramework/Util/LuceneTestCase.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Util/LuceneTestCase.cs b/src/Lucene.Net.TestFramework/Util/LuceneTestCase.cs
index 1d1cf51..8bc158b 100644
--- a/src/Lucene.Net.TestFramework/Util/LuceneTestCase.cs
+++ b/src/Lucene.Net.TestFramework/Util/LuceneTestCase.cs
@@ -2064,7 +2064,7 @@ namespace Lucene.Net.Util
                 for (int i = 0; i < freq; i++)
                 {
                     Assert.AreEqual(leftDocs.NextPosition(), rightDocs.NextPosition(), info);
-                    Assert.AreEqual(leftDocs.Payload, rightDocs.Payload, info);
+                    Assert.AreEqual(leftDocs.GetPayload(), rightDocs.GetPayload(), info);
                     Assert.AreEqual(leftDocs.StartOffset, rightDocs.StartOffset, info);
                     Assert.AreEqual(leftDocs.EndOffset, rightDocs.EndOffset, info);
                 }
@@ -2178,7 +2178,7 @@ namespace Lucene.Net.Util
                 for (int i = 0; i < freq; i++)
                 {
                     Assert.AreEqual(leftDocs.NextPosition(), rightDocs.NextPosition(), info);
-                    Assert.AreEqual(leftDocs.Payload, rightDocs.Payload, info);
+                    Assert.AreEqual(leftDocs.GetPayload(), rightDocs.GetPayload(), info);
                 }
             }
         }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1ace7809/src/Lucene.Net.Tests.Memory/Index/Memory/MemoryIndexTest.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Memory/Index/Memory/MemoryIndexTest.cs b/src/Lucene.Net.Tests.Memory/Index/Memory/MemoryIndexTest.cs
index 82720b0..ae11e87 100644
--- a/src/Lucene.Net.Tests.Memory/Index/Memory/MemoryIndexTest.cs
+++ b/src/Lucene.Net.Tests.Memory/Index/Memory/MemoryIndexTest.cs
@@ -575,7 +575,7 @@ namespace Lucene.Net.Index.Memory
                     assertEquals("Position test failed" + failDesc, memPos, pos);
                     assertEquals("Start offset test failed" + failDesc, memDocsPosEnum.StartOffset, docsPosEnum.StartOffset);
                     assertEquals("End offset test failed" + failDesc, memDocsPosEnum.EndOffset, docsPosEnum.EndOffset);
-                    assertEquals("Missing payload test failed" + failDesc, docsPosEnum.Payload, null);
+                    assertEquals("Missing payload test failed" + failDesc, docsPosEnum.GetPayload(), null);
                 }
             }
             assertNull("Still some tokens not processed", memTermEnum.Next());

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1ace7809/src/Lucene.Net.Tests.Misc/Index/Sorter/SorterTestBase.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Misc/Index/Sorter/SorterTestBase.cs b/src/Lucene.Net.Tests.Misc/Index/Sorter/SorterTestBase.cs
index bd8be54..e1cbe65 100644
--- a/src/Lucene.Net.Tests.Misc/Index/Sorter/SorterTestBase.cs
+++ b/src/Lucene.Net.Tests.Misc/Index/Sorter/SorterTestBase.cs
@@ -262,7 +262,7 @@ namespace Lucene.Net.Index.Sorter
                         assertEquals("incorrect startOffset for doc=" + doc, i, sortedPositions.StartOffset);
                         assertEquals("incorrect endOffset for doc=" + doc, i, sortedPositions.EndOffset);
                     }
-                    assertEquals("incorrect payload for doc=" + doc, freq - i, int.Parse(sortedPositions.Payload.Utf8ToString(), CultureInfo.InvariantCulture));
+                    assertEquals("incorrect payload for doc=" + doc, freq - i, int.Parse(sortedPositions.GetPayload().Utf8ToString(), CultureInfo.InvariantCulture));
                 }
             }
 
@@ -286,7 +286,7 @@ namespace Lucene.Net.Index.Sorter
                         assertEquals("incorrect startOffset for doc=" + doc, i, sortedPositions.StartOffset);
                         assertEquals("incorrect endOffset for doc=" + doc, i, sortedPositions.EndOffset);
                     }
-                    assertEquals("incorrect payload for doc=" + doc, freq - i, int.Parse(sortedPositions.Payload.Utf8ToString(), CultureInfo.InvariantCulture));
+                    assertEquals("incorrect payload for doc=" + doc, freq - i, int.Parse(sortedPositions.GetPayload().Utf8ToString(), CultureInfo.InvariantCulture));
                 }
             }
         }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1ace7809/src/Lucene.Net.Tests/core/Index/TestCodecs.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/core/Index/TestCodecs.cs b/src/Lucene.Net.Tests/core/Index/TestCodecs.cs
index 70dc0bc..80e1d87 100644
--- a/src/Lucene.Net.Tests/core/Index/TestCodecs.cs
+++ b/src/Lucene.Net.Tests/core/Index/TestCodecs.cs
@@ -609,17 +609,17 @@ namespace Lucene.Net.Index
                     Assert.AreEqual(positions[i].Pos, pos);
                     if (positions[i].Payload != null)
                     {
-                        Assert.IsNotNull(posEnum.Payload);
+                        Assert.IsNotNull(posEnum.GetPayload());
                         if (Random().Next(3) < 2)
                         {
                             // Verify the payload bytes
-                            BytesRef otherPayload = posEnum.Payload;
+                            BytesRef otherPayload = posEnum.GetPayload();
                             Assert.IsTrue(positions[i].Payload.Equals(otherPayload), "expected=" + positions[i].Payload.ToString() + " got=" + otherPayload.ToString());
                         }
                     }
                     else
                     {
-                        Assert.IsNull(posEnum.Payload);
+                        Assert.IsNull(posEnum.GetPayload());
                     }
                 }
             }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1ace7809/src/Lucene.Net.Tests/core/Index/TestDocumentWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/core/Index/TestDocumentWriter.cs b/src/Lucene.Net.Tests/core/Index/TestDocumentWriter.cs
index 4ca08a7..ea81441 100644
--- a/src/Lucene.Net.Tests/core/Index/TestDocumentWriter.cs
+++ b/src/Lucene.Net.Tests/core/Index/TestDocumentWriter.cs
@@ -178,11 +178,11 @@ namespace Lucene.Net.Index
             int freq = termPositions.Freq;
             Assert.AreEqual(3, freq);
             Assert.AreEqual(0, termPositions.NextPosition());
-            Assert.IsNotNull(termPositions.Payload);
+            Assert.IsNotNull(termPositions.GetPayload());
             Assert.AreEqual(6, termPositions.NextPosition());
-            Assert.IsNull(termPositions.Payload);
+            Assert.IsNull(termPositions.GetPayload());
             Assert.AreEqual(7, termPositions.NextPosition());
-            Assert.IsNull(termPositions.Payload);
+            Assert.IsNull(termPositions.GetPayload());
             reader.Dispose();
         }
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1ace7809/src/Lucene.Net.Tests/core/Index/TestLongPostings.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/core/Index/TestLongPostings.cs b/src/Lucene.Net.Tests/core/Index/TestLongPostings.cs
index b871d39..0f06912 100644
--- a/src/Lucene.Net.Tests/core/Index/TestLongPostings.cs
+++ b/src/Lucene.Net.Tests/core/Index/TestLongPostings.cs
@@ -251,10 +251,10 @@ namespace Lucene.Net.Index
                                 Assert.AreEqual(pos, postings.NextPosition());
                                 if (Random().NextBoolean())
                                 {
-                                    var dummy = postings.Payload;
+                                    var dummy = postings.GetPayload();
                                     if (Random().NextBoolean())
                                     {
-                                        dummy = postings.Payload; // get it again
+                                        dummy = postings.GetPayload(); // get it again
                                     }
                                 }
                             }
@@ -314,10 +314,10 @@ namespace Lucene.Net.Index
                                 Assert.AreEqual(pos, postings.NextPosition());
                                 if (Random().NextBoolean())
                                 {
-                                    var dummy = postings.Payload;
+                                    var dummy = postings.GetPayload();
                                     if (Random().NextBoolean())
                                     {
-                                        dummy = postings.Payload; // get it again
+                                        dummy = postings.GetPayload(); // get it again
                                     }
                                 }
                             }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1ace7809/src/Lucene.Net.Tests/core/Index/TestMultiLevelSkipList.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/core/Index/TestMultiLevelSkipList.cs b/src/Lucene.Net.Tests/core/Index/TestMultiLevelSkipList.cs
index 83aab4c..70a459e 100644
--- a/src/Lucene.Net.Tests/core/Index/TestMultiLevelSkipList.cs
+++ b/src/Lucene.Net.Tests/core/Index/TestMultiLevelSkipList.cs
@@ -118,7 +118,7 @@ namespace Lucene.Net.Index
             Assert.AreEqual(target, tp.DocID, "Wrong document " + tp.DocID + " after skipTo target " + target);
             Assert.AreEqual(1, tp.Freq, "Frequency is not 1: " + tp.Freq);
             tp.NextPosition();
-            BytesRef b = tp.Payload;
+            BytesRef b = tp.GetPayload();
             Assert.AreEqual(1, b.Length);
             Assert.AreEqual((sbyte)target, (sbyte)b.Bytes[b.Offset], "Wrong payload for the target " + target + ": " + (sbyte)b.Bytes[b.Offset]);
         }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1ace7809/src/Lucene.Net.Tests/core/Index/TestPayloads.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/core/Index/TestPayloads.cs b/src/Lucene.Net.Tests/core/Index/TestPayloads.cs
index e1798f6..ec64145 100644
--- a/src/Lucene.Net.Tests/core/Index/TestPayloads.cs
+++ b/src/Lucene.Net.Tests/core/Index/TestPayloads.cs
@@ -212,7 +212,7 @@ namespace Lucene.Net.Index
                     for (int j = 0; j < numTerms; j++)
                     {
                         tps[j].NextPosition();
-                        BytesRef br = tps[j].Payload;
+                        BytesRef br = tps[j].GetPayload();
                         if (br != null)
                         {
                             Array.Copy(br.Bytes, br.Offset, verifyPayloadData, offset, br.Length);
@@ -235,7 +235,7 @@ namespace Lucene.Net.Index
             tp.NextDoc();
             // now we don't read this payload
             tp.NextPosition();
-            BytesRef payload = tp.Payload;
+            BytesRef payload = tp.GetPayload();
             Assert.AreEqual(1, payload.Length, "Wrong payload length.");
             Assert.AreEqual(payload.Bytes[payload.Offset], payloadData[numTerms]);
             tp.NextDoc();
@@ -244,7 +244,7 @@ namespace Lucene.Net.Index
             // we don't read this payload and skip to a different document
             tp.Advance(5);
             tp.NextPosition();
-            payload = tp.Payload;
+            payload = tp.GetPayload();
             Assert.AreEqual(1, payload.Length, "Wrong payload length.");
             Assert.AreEqual(payload.Bytes[payload.Offset], payloadData[5 * numTerms]);
 
@@ -254,16 +254,16 @@ namespace Lucene.Net.Index
             tp = MultiFields.GetTermPositionsEnum(reader, MultiFields.GetLiveDocs(reader), terms[1].Field, new BytesRef(terms[1].Text()));
             tp.NextDoc();
             tp.NextPosition();
-            Assert.AreEqual(1, tp.Payload.Length, "Wrong payload length.");
+            Assert.AreEqual(1, tp.GetPayload().Length, "Wrong payload length.");
             tp.Advance(skipInterval - 1);
             tp.NextPosition();
-            Assert.AreEqual(1, tp.Payload.Length, "Wrong payload length.");
+            Assert.AreEqual(1, tp.GetPayload().Length, "Wrong payload length.");
             tp.Advance(2 * skipInterval - 1);
             tp.NextPosition();
-            Assert.AreEqual(1, tp.Payload.Length, "Wrong payload length.");
+            Assert.AreEqual(1, tp.GetPayload().Length, "Wrong payload length.");
             tp.Advance(3 * skipInterval - 1);
             tp.NextPosition();
-            Assert.AreEqual(3 * skipInterval - 2 * numDocs - 1, tp.Payload.Length, "Wrong payload length.");
+            Assert.AreEqual(3 * skipInterval - 2 * numDocs - 1, tp.GetPayload().Length, "Wrong payload length.");
 
             reader.Dispose();
 
@@ -288,7 +288,7 @@ namespace Lucene.Net.Index
             tp.NextDoc();
             tp.NextPosition();
 
-            BytesRef bref = tp.Payload;
+            BytesRef bref = tp.GetPayload();
             verifyPayloadData = new byte[bref.Length];
             var portion = new byte[1500];
             Array.Copy(payloadData, 100, portion, 0, 1500);
@@ -506,7 +506,7 @@ namespace Lucene.Net.Index
                     for (int i = 0; i < freq; i++)
                     {
                         tp.NextPosition();
-                        BytesRef payload = tp.Payload;
+                        BytesRef payload = tp.GetPayload();
                         Assert.AreEqual(termText, payload.Utf8ToString());
                     }
                 }
@@ -692,7 +692,7 @@ namespace Lucene.Net.Index
             DocsAndPositionsEnum de = sr.TermPositionsEnum(new Term("field", "withPayload"));
             de.NextDoc();
             de.NextPosition();
-            Assert.AreEqual(new BytesRef("test"), de.Payload);
+            Assert.AreEqual(new BytesRef("test"), de.GetPayload());
             writer.Dispose();
             reader.Dispose();
             dir.Dispose();
@@ -729,7 +729,7 @@ namespace Lucene.Net.Index
             DocsAndPositionsEnum de = sr.TermPositionsEnum(new Term("field", "withPayload"));
             de.NextDoc();
             de.NextPosition();
-            Assert.AreEqual(new BytesRef("test"), de.Payload);
+            Assert.AreEqual(new BytesRef("test"), de.GetPayload());
             writer.Dispose();
             reader.Dispose();
             dir.Dispose();

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1ace7809/src/Lucene.Net.Tests/core/Index/TestPayloadsOnVectors.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/core/Index/TestPayloadsOnVectors.cs b/src/Lucene.Net.Tests/core/Index/TestPayloadsOnVectors.cs
index 81d8863..7e26232 100644
--- a/src/Lucene.Net.Tests/core/Index/TestPayloadsOnVectors.cs
+++ b/src/Lucene.Net.Tests/core/Index/TestPayloadsOnVectors.cs
@@ -84,7 +84,7 @@ namespace Lucene.Net.Index
             DocsAndPositionsEnum de = termsEnum.DocsAndPositions(null, null);
             Assert.AreEqual(0, de.NextDoc());
             Assert.AreEqual(0, de.NextPosition());
-            Assert.AreEqual(new BytesRef("test"), de.Payload);
+            Assert.AreEqual(new BytesRef("test"), de.GetPayload());
             writer.Dispose();
             reader.Dispose();
             dir.Dispose();
@@ -129,7 +129,7 @@ namespace Lucene.Net.Index
             DocsAndPositionsEnum de = termsEnum.DocsAndPositions(null, null);
             Assert.AreEqual(0, de.NextDoc());
             Assert.AreEqual(3, de.NextPosition());
-            Assert.AreEqual(new BytesRef("test"), de.Payload);
+            Assert.AreEqual(new BytesRef("test"), de.GetPayload());
             writer.Dispose();
             reader.Dispose();
             dir.Dispose();

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1ace7809/src/Lucene.Net.Tests/core/Index/TestPostingsOffsets.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/core/Index/TestPostingsOffsets.cs b/src/Lucene.Net.Tests/core/Index/TestPostingsOffsets.cs
index f09a50c..bcb30f3 100644
--- a/src/Lucene.Net.Tests/core/Index/TestPostingsOffsets.cs
+++ b/src/Lucene.Net.Tests/core/Index/TestPostingsOffsets.cs
@@ -184,8 +184,8 @@ namespace Lucene.Net.Index
                         if (withPayloads)
                         {
                             // check that we have a payload and it starts with "pos"
-                            Assert.IsNotNull(dp.Payload);
-                            BytesRef payload = dp.Payload;
+                            Assert.IsNotNull(dp.GetPayload());
+                            BytesRef payload = dp.GetPayload();
                             Assert.IsTrue(payload.Utf8ToString().StartsWith("pos:"));
                         } // note: withPayloads=false doesnt necessarily mean we dont have them from MockAnalyzer!
                     }
@@ -215,8 +215,8 @@ namespace Lucene.Net.Index
                     if (withPayloads)
                     {
                         // check that we have a payload and it starts with "pos"
-                        Assert.IsNotNull(dp.Payload);
-                        BytesRef payload = dp.Payload;
+                        Assert.IsNotNull(dp.GetPayload());
+                        BytesRef payload = dp.GetPayload();
                         Assert.IsTrue(payload.Utf8ToString().StartsWith("pos:"));
                     } // note: withPayloads=false doesnt necessarily mean we dont have them from MockAnalyzer!
                 }


[27/27] lucenenet git commit: Lucene.Net.Core.Util.NumericUtils: Changed SHIFT_START_INT from sbyte to byte, since it is converted to byte anyway, and stores a positive value.

Posted by ni...@apache.org.
Lucene.Net.Core.Util.NumericUtils: Changed SHIFT_START_INT from sbyte to byte, since it is converted to byte anyway, and stores a positive value.


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/f71dfb40
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/f71dfb40
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/f71dfb40

Branch: refs/heads/api-work
Commit: f71dfb400d61ac8e59056ff5db076b6f26fc13f1
Parents: 7cb6064
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Feb 5 19:35:14 2017 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Sun Feb 5 19:35:14 2017 +0700

----------------------------------------------------------------------
 src/Lucene.Net.Core/Util/NumericUtils.cs | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/f71dfb40/src/Lucene.Net.Core/Util/NumericUtils.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Util/NumericUtils.cs b/src/Lucene.Net.Core/Util/NumericUtils.cs
index a013102..894e33b 100644
--- a/src/Lucene.Net.Core/Util/NumericUtils.cs
+++ b/src/Lucene.Net.Core/Util/NumericUtils.cs
@@ -89,7 +89,7 @@ namespace Lucene.Net.Util
         /// Integers are stored at lower precision by shifting off lower bits. The shift count is
         /// stored as <code>SHIFT_START_INT+shift</code> in the first byte
         /// </summary>
-        public const sbyte SHIFT_START_INT = 0x60;
+        public const byte SHIFT_START_INT = 0x60;
 
         /// <summary>
         /// The maximum term length (used for <code>byte[]</code> buffer size)


[14/27] lucenenet git commit: Lucene.Net.Suggest: fix documentation comment formatting problems

Posted by ni...@apache.org.
Lucene.Net.Suggest: fix documentation comment formatting problems


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/bcc0d170
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/bcc0d170
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/bcc0d170

Branch: refs/heads/api-work
Commit: bcc0d170fc8d49b963bfd1d6b7d007ab20d5a344
Parents: c95f6ab
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Feb 5 13:56:49 2017 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Sun Feb 5 14:47:55 2017 +0700

----------------------------------------------------------------------
 .../Spell/DirectSpellChecker.cs                   | 18 +++++++++---------
 src/Lucene.Net.Suggest/Spell/SpellChecker.cs      | 14 +++++++-------
 .../Suggest/Analyzing/AnalyzingSuggester.cs       |  6 +++---
 .../Suggest/Analyzing/FreeTextSuggester.cs        |  4 ++--
 .../Suggest/Analyzing/FuzzySuggester.cs           | 12 ++++++------
 .../Suggest/DocumentDictionary.cs                 |  2 +-
 .../Suggest/Fst/FSTCompletion.cs                  | 16 ++++++++--------
 .../Suggest/Fst/FSTCompletionBuilder.cs           |  2 +-
 .../Suggest/Fst/FSTCompletionLookup.cs            |  2 +-
 .../Suggest/Fst/WFSTCompletionLookup.cs           |  2 +-
 .../Suggest/Jaspell/JaspellTernarySearchTrie.cs   | 17 ++++++-----------
 11 files changed, 45 insertions(+), 50 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/bcc0d170/src/Lucene.Net.Suggest/Spell/DirectSpellChecker.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Suggest/Spell/DirectSpellChecker.cs b/src/Lucene.Net.Suggest/Spell/DirectSpellChecker.cs
index 433a502..e808424 100644
--- a/src/Lucene.Net.Suggest/Spell/DirectSpellChecker.cs
+++ b/src/Lucene.Net.Suggest/Spell/DirectSpellChecker.cs
@@ -180,12 +180,12 @@ namespace Lucene.Net.Search.Spell
 
         /// <summary>
         /// Gets or sets the minimal threshold of documents a term must appear for a match.
-        /// <p>
+        /// <para/>
         /// This can improve quality by only suggesting high-frequency terms. Note that
         /// very high values might decrease performance slightly, by forcing the spellchecker
         /// to draw more candidates from the term dictionary, but a practical value such
-        /// as <code>1</code> can be very useful towards improving quality.
-        /// <p>
+        /// as <c>1</c> can be very useful towards improving quality.
+        /// <para/>
         /// This can be specified as a relative percentage of documents such as 0.5f,
         /// or it can be specified as an absolute whole document frequency, such as 4f.
         /// Absolute document frequencies may not be fractional.
@@ -209,7 +209,7 @@ namespace Lucene.Net.Search.Spell
 
         /// <summary>
         /// Gets or sets the minimum length of a query term (default: 4) needed to return suggestions.
-        /// <p>
+        /// <para/>
         /// Very short query terms will often cause only bad suggestions with any distance
         /// metric.
         /// </summary>
@@ -229,11 +229,11 @@ namespace Lucene.Net.Search.Spell
         /// <summary>
         /// Gets or sets the maximum threshold (default: 0.01f) of documents a query term can 
         /// appear in order to provide suggestions.
-        /// <p>
+        /// <para/>
         /// Very high-frequency terms are typically spelled correctly. Additionally,
         /// this can increase performance as it will do no work for the common case
         /// of correctly-spelled input terms.
-        /// <p>
+        /// <para/>
         /// This can be specified as a relative percentage of documents such as 0.5f,
         /// or it can be specified as an absolute whole document frequency, such as 4f.
         /// Absolute document frequencies may not be fractional.
@@ -257,12 +257,12 @@ namespace Lucene.Net.Search.Spell
 
         /// <summary>
         /// True if the spellchecker should lowercase terms (default: true)
-        /// <p>
+        /// <para/>
         /// This is a convenience method, if your index field has more complicated
         /// analysis (such as StandardTokenizer removing punctuation), its probably
         /// better to turn this off, and instead run your query terms through your
         /// Analyzer first.
-        /// <p>
+        /// <para/>
         /// If this option is not on, case differences count as an edit!
         /// </summary>
         public virtual bool LowerCaseTerms
@@ -298,7 +298,7 @@ namespace Lucene.Net.Search.Spell
         /// <summary>
         /// Gets or sets the string distance metric.
         /// The default is <see cref="INTERNAL_LEVENSHTEIN"/>.
-        /// <p>
+        /// <para/>
         /// Note: because this spellchecker draws its candidates from the term
         /// dictionary using Damerau-Levenshtein, it works best with an edit-distance-like
         /// string metric. If you use a different metric than the default,

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/bcc0d170/src/Lucene.Net.Suggest/Spell/SpellChecker.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Suggest/Spell/SpellChecker.cs b/src/Lucene.Net.Suggest/Spell/SpellChecker.cs
index 76b02e6..801488d 100644
--- a/src/Lucene.Net.Suggest/Spell/SpellChecker.cs
+++ b/src/Lucene.Net.Suggest/Spell/SpellChecker.cs
@@ -226,7 +226,7 @@ namespace Lucene.Net.Search.Spell
         /// </summary>
         /// <param name="word"> the word you want a spell check done on </param>
         /// <param name="numSug"> the number of suggested words </param>
-        /// <exception cref="System.IO.IOException"> if the underlying index throws an <see cref="IOException"/> </exception>
+        /// <exception cref="System.IO.IOException"> if the underlying index throws an <see cref="System.IO.IOException"/> </exception>
         /// <exception cref="AlreadyClosedException"> if the Spellchecker is already disposed </exception>
         /// <returns>string[] the sorted list of the suggest words with these 2 criteria:
         /// first criteria: the edit distance, second criteria (only if restricted mode): the popularity
@@ -253,7 +253,7 @@ namespace Lucene.Net.Search.Spell
         /// <param name="word"> the word you want a spell check done on </param>
         /// <param name="numSug"> the number of suggested words </param>
         /// <param name="accuracy"> The minimum score a suggestion must have in order to qualify for inclusion in the results </param>
-        /// <exception cref="System.IO.IOException"> if the underlying index throws an <see cref="IOException"/> </exception>
+        /// <exception cref="System.IO.IOException"> if the underlying index throws an <see cref="System.IO.IOException"/> </exception>
         /// <exception cref="AlreadyClosedException"> if the Spellchecker is already disposed </exception>
         /// <returns>string[] the sorted list of the suggest words with these 2 criteria:
         /// first criteria: the edit distance, second criteria (only if restricted mode): the popularity
@@ -295,8 +295,8 @@ namespace Lucene.Net.Search.Spell
         /// <param name="suggestMode"> 
         /// (NOTE: if indexReader==null and/or field==null, then this is overridden with SuggestMode.SUGGEST_ALWAYS) </param>
         /// <param name="accuracy"> The minimum score a suggestion must have in order to qualify for inclusion in the results </param>
-        /// <exception cref="System.IO.IOException"> if the underlying index throws an <see cref="IOException"/> </exception>
-        /// <exception cref="AlreadyClosedException"> if the <see cref="Spellchecker"/> is already disposed </exception>
+        /// <exception cref="System.IO.IOException"> if the underlying index throws an <see cref="System.IO.IOException"/> </exception>
+        /// <exception cref="AlreadyClosedException"> if the <see cref="SpellChecker"/> is already disposed </exception>
         /// <returns> string[] the sorted list of the suggest words with these 2 criteria:
         /// first criteria: the edit distance, second criteria (only if restricted mode): the popularity
         /// of the suggest words in the field of the user index
@@ -475,7 +475,7 @@ namespace Lucene.Net.Search.Spell
         /// Check whether the word exists in the index. </summary>
         /// <param name="word"> word to check </param>
         /// <exception cref="System.IO.IOException"> If there is a low-level I/O error. </exception>
-        /// <exception cref="AlreadyClosedException"> if the <see cref="Spellchecker"/> is already disposed </exception>
+        /// <exception cref="AlreadyClosedException"> if the <see cref="SpellChecker"/> is already disposed </exception>
         /// <returns> true if the word exists in the index </returns>
         public virtual bool Exist(string word)
         {
@@ -498,7 +498,7 @@ namespace Lucene.Net.Search.Spell
         /// <param name="dict"> Dictionary to index </param>
         /// <param name="config"> <see cref="IndexWriterConfig"/> to use </param>
         /// <param name="fullMerge"> whether or not the spellcheck index should be fully merged </param>
-        /// <exception cref="AlreadyClosedException"> if the <see cref="Spellchecker"/> is already disposed </exception>
+        /// <exception cref="AlreadyClosedException"> if the <see cref="SpellChecker"/> is already disposed </exception>
         /// <exception cref="System.IO.IOException"> If there is a low-level I/O error. </exception>
         public void IndexDictionary(IDictionary dict, IndexWriterConfig config, bool fullMerge)
         {
@@ -674,7 +674,7 @@ namespace Lucene.Net.Search.Spell
 
         /// <summary>
         /// Dispose the underlying IndexSearcher used by this SpellChecker </summary>
-        /// <exception cref="System.IO.IOException"> if the close operation causes an <see cref="IOException"/> </exception>
+        /// <exception cref="System.IO.IOException"> if the close operation causes an <see cref="System.IO.IOException"/> </exception>
         /// <exception cref="AlreadyClosedException"> if the <see cref="SpellChecker"/> is already disposed </exception>
         public void Dispose()
         {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/bcc0d170/src/Lucene.Net.Suggest/Suggest/Analyzing/AnalyzingSuggester.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Suggest/Suggest/Analyzing/AnalyzingSuggester.cs b/src/Lucene.Net.Suggest/Suggest/Analyzing/AnalyzingSuggester.cs
index dd163ac..8eef805 100644
--- a/src/Lucene.Net.Suggest/Suggest/Analyzing/AnalyzingSuggester.cs
+++ b/src/Lucene.Net.Suggest/Suggest/Analyzing/AnalyzingSuggester.cs
@@ -43,7 +43,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing
     /// suggestion "The Ghost of Christmas Past". Note that
     /// position increments MUST NOT be preserved for this example
     /// to work, so you should call the constructor with 
-    /// <paramref name="preservePositionIncrements"/> parameter set to 
+    /// <see cref="preservePositionIncrements"/> parameter set to 
     /// false
     /// 
     /// </para>
@@ -164,7 +164,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing
         private long count = 0;
 
         /// <summary>
-        /// Calls <see cref="AnalyzingSuggester(Analyzer,Analyzer,Options,int,int,bool)">
+        /// Calls <see cref="AnalyzingSuggester(Analyzer, Analyzer, SuggesterOptions, int, int, bool)">
         /// AnalyzingSuggester(analyzer, analyzer, SuggesterOptions.EXACT_FIRST | SuggesterOptions.PRESERVE_SEP, 256, -1, true)
         /// </see>
         /// </summary>
@@ -174,7 +174,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing
         }
 
         /// <summary>
-        /// Calls <see cref="AnalyzingSuggester(Analyzer,Analyzer,Options,int,int,bool)">
+        /// Calls <see cref="AnalyzingSuggester(Analyzer,Analyzer,SuggesterOptions,int,int,bool)">
         /// AnalyzingSuggester(indexAnalyzer, queryAnalyzer, SuggesterOptions.EXACT_FIRST | SuggesterOptions.PRESERVE_SEP, 256, -1, true)
         /// </see>
         /// </summary>

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/bcc0d170/src/Lucene.Net.Suggest/Suggest/Analyzing/FreeTextSuggester.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Suggest/Suggest/Analyzing/FreeTextSuggester.cs b/src/Lucene.Net.Suggest/Suggest/Analyzing/FreeTextSuggester.cs
index 18242df..0a29651 100644
--- a/src/Lucene.Net.Suggest/Suggest/Analyzing/FreeTextSuggester.cs
+++ b/src/Lucene.Net.Suggest/Suggest/Analyzing/FreeTextSuggester.cs
@@ -66,7 +66,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing
     /// point, cast to long).  Divide by <see cref="long.MaxValue"/> to get
     /// the score back, which ranges from 0.0 to 1.0.
     /// 
-    /// <paramref name="onlyMorePopular"/> is unused.
+    /// <c>onlyMorePopular</c> is unused.
     /// 
     /// @lucene.experimental
     /// </para>
@@ -166,7 +166,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing
         /// <summary>
         /// Instantiate, using the provided indexing and lookup
         /// analyzers, and specified model (2 = bigram, 3 =
-        /// trigram ,etc.).  The separator is passed to <see cref="ShingleFilter.TokenSeparator"/>
+        /// trigram ,etc.).  The <paramref name="separator"/> is passed to <see cref="ShingleFilter.SetTokenSeparator(string)"/>
         /// to join multiple
         /// tokens into a single ngram token; it must be an ascii
         /// (7-bit-clean) byte.  No input tokens should have this

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/bcc0d170/src/Lucene.Net.Suggest/Suggest/Analyzing/FuzzySuggester.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Suggest/Suggest/Analyzing/FuzzySuggester.cs b/src/Lucene.Net.Suggest/Suggest/Analyzing/FuzzySuggester.cs
index dac0de3..5f202fb 100644
--- a/src/Lucene.Net.Suggest/Suggest/Analyzing/FuzzySuggester.cs
+++ b/src/Lucene.Net.Suggest/Suggest/Analyzing/FuzzySuggester.cs
@@ -29,12 +29,12 @@ namespace Lucene.Net.Search.Suggest.Analyzing
     /// Implements a fuzzy <see cref="AnalyzingSuggester"/>. The similarity measurement is
     /// based on the Damerau-Levenshtein (optimal string alignment) algorithm, though
     /// you can explicitly choose classic Levenshtein by passing <c>false</c>
-    /// for the <paramref name="transpositions"/> parameter.
+    /// for the <see cref="transpositions"/> parameter.
     /// <para>
     /// At most, this query will match terms up to <see cref="LevenshteinAutomata.MAXIMUM_SUPPORTED_DISTANCE"/>
     /// edits. Higher distances are not supported.  Note that the
     /// fuzzy distance is measured in "byte space" on the bytes
-    /// returned by the <see cref="TokenStream"/>'s <see cref="Analysis.Tokenattributes.ITermToBytesRefAttribute"/>, 
+    /// returned by the <see cref="TokenStream"/>'s <see cref="Analysis.TokenAttributes.ITermToBytesRefAttribute"/>, 
     /// usually UTF8.  By default
     /// the analyzed bytes must be at least 3 <see cref="DEFAULT_MIN_FUZZY_LENGTH"/>
     /// bytes before any edits are
@@ -42,7 +42,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing
     /// byte is not allowed to be
     /// edited.  We allow up to 1 <see cref="DEFAULT_MAX_EDITS"/>
     /// edit.
-    /// If <paramref name="unicodeAware"/> parameter in the constructor is set to true, maxEdits,
+    /// If <see cref="unicodeAware"/> parameter in the constructor is set to true, maxEdits,
     /// minFuzzyLength, transpositions and nonFuzzyPrefix are measured in Unicode code 
     /// points (actual letters) instead of bytes. 
     /// 
@@ -71,8 +71,8 @@ namespace Lucene.Net.Search.Suggest.Analyzing
         private readonly bool unicodeAware;
 
         /// <summary>
-        /// Measure <paramref name="maxEdits"/>, <paramref name="minFuzzyLength"/>, 
-        /// <paramref name="transpositions"/>, and <paramref name="nonFuzzyPrefix"/> 
+        /// Measure <see cref="maxEdits"/>, <see cref="minFuzzyLength"/>, 
+        /// <see cref="transpositions"/>, and <see cref="nonFuzzyPrefix"/> 
         /// parameters in Unicode code points (actual letters)
         /// instead of bytes.
         /// </summary>
@@ -110,7 +110,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing
         }
 
         /// <summary>
-        /// Creates a <see cref="FuzzySuggester"/> instance with an index & a query analyzer initialized with default values.
+        /// Creates a <see cref="FuzzySuggester"/> instance with an index &amp; a query analyzer initialized with default values.
         /// </summary>
         /// <param name="indexAnalyzer">
         ///           <see cref="Analyzer"/> that will be used for analyzing suggestions while building the index. </param>

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/bcc0d170/src/Lucene.Net.Suggest/Suggest/DocumentDictionary.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Suggest/Suggest/DocumentDictionary.cs b/src/Lucene.Net.Suggest/Suggest/DocumentDictionary.cs
index f70c0d2..1688ddc 100644
--- a/src/Lucene.Net.Suggest/Suggest/DocumentDictionary.cs
+++ b/src/Lucene.Net.Suggest/Suggest/DocumentDictionary.cs
@@ -87,7 +87,7 @@ namespace Lucene.Net.Search.Suggest
         /// Creates a new dictionary with the contents of the fields named <paramref name="field"/>
         /// for the terms, <paramref name="weightField"/> for the weights that will be used for the 
         /// the corresponding terms, <paramref name="payloadField"/> for the corresponding payloads
-        /// for the entry and <paramref name="contextsFeild"/> for associated contexts.
+        /// for the entry and <paramref name="contextsField"/> for associated contexts.
         /// </summary>
         public DocumentDictionary(IndexReader reader, string field, string weightField, string payloadField, string contextsField)
         {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/bcc0d170/src/Lucene.Net.Suggest/Suggest/Fst/FSTCompletion.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Suggest/Suggest/Fst/FSTCompletion.cs b/src/Lucene.Net.Suggest/Suggest/Fst/FSTCompletion.cs
index 792b682..5c5edd6 100644
--- a/src/Lucene.Net.Suggest/Suggest/Fst/FSTCompletion.cs
+++ b/src/Lucene.Net.Suggest/Suggest/Fst/FSTCompletion.cs
@@ -64,7 +64,7 @@ namespace Lucene.Net.Search.Suggest.Fst
                 return Utf8.Utf8ToString() + "/" + Bucket.ToString("0.0", CultureInfo.InvariantCulture);
             }
 
-            /// <seealso cref="BytesRef.CompareTo(BytesRef)"></seealso>
+            /// <seealso cref="BytesRef.CompareTo(object)"></seealso>
             public int CompareTo(Completion o)
             {
                 return this.Utf8.CompareTo(o.Utf8);
@@ -77,7 +77,7 @@ namespace Lucene.Net.Search.Suggest.Fst
         public const int DEFAULT_BUCKETS = 10;
 
         /// <summary>
-        /// An empty result. Keep this an <see cref="List"/> to keep all the returned
+        /// An empty result. Keep this an <see cref="List{T}"/> to keep all the returned
         /// lists of single type (monomorphic calls).
         /// </summary>
         private static readonly List<Completion> EMPTY_RESULT = new List<Completion>();
@@ -94,10 +94,10 @@ namespace Lucene.Net.Search.Suggest.Fst
         /// </summary>
         private readonly FST.Arc<object>[] rootArcs;
 
-        /// <seealso cref="FSTCompletion(FST, bool, bool)" />
+        /// <seealso cref="FSTCompletion(FST{object}, bool, bool)" />
         private readonly bool exactFirst;
 
-        /// <seealso cref="FSTCompletion(FST, bool, bool)" />
+        /// <seealso cref="FSTCompletion(FST{object}, bool, bool)" />
         private readonly bool higherWeightsFirst;
 
         // LUCENENET SPECIFIC: We need some thread safety to execute atomic list operations
@@ -107,7 +107,7 @@ namespace Lucene.Net.Search.Suggest.Fst
         /// Constructs an FSTCompletion, specifying higherWeightsFirst and exactFirst. </summary>
         /// <param name="automaton">
         ///          Automaton with completions. See <see cref="FSTCompletionBuilder"/>. </param>
-        /// <param name="exactFirst">
+        /// <param name="higherWeightsFirst">
         ///          Return most popular suggestions first. This is the default
         ///          behavior for this implementation. Setting it to <c>false</c>
         ///          has no effect (use constant term weights to sort alphabetically
@@ -132,7 +132,7 @@ namespace Lucene.Net.Search.Suggest.Fst
 
         /// <summary>
         /// Defaults to higher weights first and exact first. </summary>
-        /// <seealso cref="FSTCompletion(FST, bool, bool)"/>
+        /// <seealso cref="FSTCompletion(FST{object}, bool, bool)"/>
         public FSTCompletion(FST<object> automaton)
             : this(automaton, true, true)
         {
@@ -339,7 +339,7 @@ namespace Lucene.Net.Search.Suggest.Fst
         /// position.
         /// </summary>
         /// <returns> 
-        /// Returns <c>true<c> if and only if <paramref name="list"/> contained
+        /// Returns <c>true</c> if and only if <paramref name="list"/> contained
         /// <paramref name="key"/>.
         /// </returns>
         private bool CheckExistingAndReorder(IList<Completion> list, BytesRef key)
@@ -376,7 +376,7 @@ namespace Lucene.Net.Search.Suggest.Fst
         /// <param name="utf8">
         ///          The term to descend along. </param>
         /// <returns> If <c>true</c>, <paramref name="arc"/> will be set to the arc
-        ///         matching last byte of <paramref name="term"/>. <c>false</c> is
+        ///         matching last byte of <c>term</c>. <c>false</c> is
         ///         returned if no such prefix exists. </returns>
         private bool DescendWithPrefix(FST.Arc<object> arc, BytesRef utf8)
         {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/bcc0d170/src/Lucene.Net.Suggest/Suggest/Fst/FSTCompletionBuilder.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Suggest/Suggest/Fst/FSTCompletionBuilder.cs b/src/Lucene.Net.Suggest/Suggest/Fst/FSTCompletionBuilder.cs
index 6652b9d..7bac69c 100644
--- a/src/Lucene.Net.Suggest/Suggest/Fst/FSTCompletionBuilder.cs
+++ b/src/Lucene.Net.Suggest/Suggest/Fst/FSTCompletionBuilder.cs
@@ -28,7 +28,7 @@ namespace Lucene.Net.Search.Suggest.Fst
     /// <h2>Implementation details</h2>
     /// 
     /// <para>
-    /// The construction step in <see cref="Finalize"/> works as follows:
+    /// The construction step in the object finalizer works as follows:
     /// <list type="bullet">
     /// <item>A set of input terms and their buckets is given.</item>
     /// <item>All terms in the input are prefixed with a synthetic pseudo-character

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/bcc0d170/src/Lucene.Net.Suggest/Suggest/Fst/FSTCompletionLookup.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Suggest/Suggest/Fst/FSTCompletionLookup.cs b/src/Lucene.Net.Suggest/Suggest/Fst/FSTCompletionLookup.cs
index bc0da9d..7d30394 100644
--- a/src/Lucene.Net.Suggest/Suggest/Fst/FSTCompletionLookup.cs
+++ b/src/Lucene.Net.Suggest/Suggest/Fst/FSTCompletionLookup.cs
@@ -60,7 +60,7 @@ namespace Lucene.Net.Search.Suggest.Fst
         /// An invalid bucket count if we're creating an object
         /// of this class from an existing FST.
         /// </summary>
-        /// <seealso cref="FSTCompletionLookup(FSTCompletion, bool)"/> </seealso>
+        /// <seealso cref="FSTCompletionLookup(FSTCompletion, bool)"/>
         private static int INVALID_BUCKETS_COUNT = -1;
 
         /// <summary>

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/bcc0d170/src/Lucene.Net.Suggest/Suggest/Fst/WFSTCompletionLookup.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Suggest/Suggest/Fst/WFSTCompletionLookup.cs b/src/Lucene.Net.Suggest/Suggest/Fst/WFSTCompletionLookup.cs
index fd4ac67..72e0bef 100644
--- a/src/Lucene.Net.Suggest/Suggest/Fst/WFSTCompletionLookup.cs
+++ b/src/Lucene.Net.Suggest/Suggest/Fst/WFSTCompletionLookup.cs
@@ -41,7 +41,7 @@ namespace Lucene.Net.Search.Suggest.Fst
     {
 
         /// <summary>
-        /// FST<Long>, weights are encoded as costs: (Integer.MAX_VALUE-weight)
+        /// FST{long?}, weights are encoded as costs: (int.MaxValue-weight)
         /// </summary>
         // NOTE: like FSTSuggester, this is really a WFSA, if you want to
         // customize the code to add some output you should use PairOutputs.

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/bcc0d170/src/Lucene.Net.Suggest/Suggest/Jaspell/JaspellTernarySearchTrie.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Suggest/Suggest/Jaspell/JaspellTernarySearchTrie.cs b/src/Lucene.Net.Suggest/Suggest/Jaspell/JaspellTernarySearchTrie.cs
index 4d4568e..29e5276 100644
--- a/src/Lucene.Net.Suggest/Suggest/Jaspell/JaspellTernarySearchTrie.cs
+++ b/src/Lucene.Net.Suggest/Suggest/Jaspell/JaspellTernarySearchTrie.cs
@@ -40,7 +40,7 @@ namespace Lucene.Net.Search.Suggest.Jaspell
     /// Implementation of a Ternary Search Trie, a data structure for storing
     /// <see cref="string"/>s that combines the compact size of a binary search
     /// tree with the speed of a digital search trie, and is therefore ideal for
-    /// practical use in sorting and searching data.</p>
+    /// practical use in sorting and searching data.
     /// <para>
     /// 
     /// This data structure is faster than hashing for many typical search problems,
@@ -87,6 +87,7 @@ namespace Lucene.Net.Search.Suggest.Jaspell
             /// <summary>
             /// Constructor method.
             /// </summary>
+            /// <param name="outerInstance">The containing <see cref="JaspellTernarySearchTrie"/></param>
             /// <param name="splitchar">
             ///          The char used in the split. </param>
             /// <param name="parent">
@@ -174,7 +175,7 @@ namespace Lucene.Net.Search.Suggest.Jaspell
 
         /// <summary>
         /// the number of differences allowed in a call to the <see cref="MatchAlmost"/>
-        /// <paramref cref="key"/>.
+        /// <c>key</c>.
         /// </summary>
         private int matchAlmostDiff;
 
@@ -497,12 +498,11 @@ namespace Lucene.Net.Search.Suggest.Jaspell
         }
 
         /// <summary>
-        /// Retrieve the <see cref="System.Nullable{float}"/> indexed by key, increment it by one unit
-        /// and store the new <see cref="System.Nullable{float}"/>.
+        /// Retrieve the <see cref="T:float?"/> indexed by key, increment it by one unit
+        /// and store the new <see cref="T:float?"/>.
         /// </summary>
         /// <param name="key"> A <see cref="string"/> index. </param>
-        /// <param name="culture">The culture used for lowercasing.</param>
-        /// <returns> The <see cref="System.Nullable{float}"/> retrieved from the Ternary Search Trie. </returns>
+        /// <returns> The <see cref="T:float?"/> retrieved from the Ternary Search Trie. </returns>
         public virtual float? GetAndIncrement(string key)
         {
             string key2 = culture.TextInfo.ToLower(key.Trim());
@@ -906,9 +906,6 @@ namespace Lucene.Net.Search.Suggest.Jaspell
         /// 
         /// </para>
         /// </summary>
-        /// <param name="diff">
-        ///          The number of characters by which words can differ from target
-        ///          word. </param>
         public virtual int MatchAlmostDiff
         {
             get // LUCENENET NOTE: Added property get per MSDN guidelines
@@ -942,8 +939,6 @@ namespace Lucene.Net.Search.Suggest.Jaspell
         /// case this value is temporarily overridden.
         /// </para>
         /// </summary>
-        /// <param name="num"> The number of values that will be returned when calling the
-        ///          methods above. </param>
         public virtual int NumReturnValues
         {
             get // LUCENENET NOTE: Added property get per MSDN guidelines


[21/27] lucenenet git commit: Lucene.Net.Core.Util.FieldCacheSanityChecker.ReaderField refactor: Changed to use RuntimeHelpers.GetHashCode(), which is the equivalent of Java's System.identityHashCode() method

Posted by ni...@apache.org.
Lucene.Net.Core.Util.FieldCacheSanityChecker.ReaderField refactor: Changed to use RuntimeHelpers.GetHashCode(), which is the equivalent of Java's System.identityHashCode() method


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/efd894b8
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/efd894b8
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/efd894b8

Branch: refs/heads/api-work
Commit: efd894b8a7bb046707afd8e2fb92ffe54abf7b26
Parents: 460e55e
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Feb 5 18:23:31 2017 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Sun Feb 5 18:23:31 2017 +0700

----------------------------------------------------------------------
 src/Lucene.Net.Core/Util/FieldCacheSanityChecker.cs | 16 ++++++++++------
 1 file changed, 10 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/efd894b8/src/Lucene.Net.Core/Util/FieldCacheSanityChecker.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Util/FieldCacheSanityChecker.cs b/src/Lucene.Net.Core/Util/FieldCacheSanityChecker.cs
index 8a0fc7f..cbb93ef 100644
--- a/src/Lucene.Net.Core/Util/FieldCacheSanityChecker.cs
+++ b/src/Lucene.Net.Core/Util/FieldCacheSanityChecker.cs
@@ -2,7 +2,7 @@ using Lucene.Net.Search;
 using Lucene.Net.Support;
 using System.Collections.Generic;
 using System.Diagnostics.CodeAnalysis;
-using System.Linq;
+using System.Runtime.CompilerServices;
 using System.Text;
 
 namespace Lucene.Net.Util
@@ -322,18 +322,22 @@ namespace Lucene.Net.Util
         /// </summary>
         private sealed class ReaderField
         {
-            public object ReaderKey { get; private set; }
+            public object ReaderKey
+            {
+                get { return readerKey; }
+            }
+            private readonly object readerKey;
             public string FieldName { get; private set; }
 
             public ReaderField(object readerKey, string fieldName)
             {
-                this.ReaderKey = readerKey;
+                this.readerKey = readerKey;
                 this.FieldName = fieldName;
             }
 
             public override int GetHashCode()
             {
-                return ReaderKey.GetHashCode() * FieldName.GetHashCode(); // LUCENENET TODO: IdentityHashCode
+                return RuntimeHelpers.GetHashCode(readerKey) * FieldName.GetHashCode();
             }
 
             public override bool Equals(object that)
@@ -344,12 +348,12 @@ namespace Lucene.Net.Util
                 }
 
                 ReaderField other = (ReaderField)that;
-                return (this.ReaderKey == other.ReaderKey && this.FieldName.Equals(other.FieldName));
+                return (this.readerKey == other.readerKey && this.FieldName.Equals(other.FieldName));
             }
 
             public override string ToString()
             {
-                return ReaderKey.ToString() + "+" + FieldName;
+                return readerKey.ToString() + "+" + FieldName;
             }
         }
 


[17/27] lucenenet git commit: Lucene.Net.Core.Support.Number: Removed bogus Signum() overload that was code duplication from sbyte.CompareTo(), long.CompareTo() and int.ComareTo() methods in .NET

Posted by ni...@apache.org.
Lucene.Net.Core.Support.Number: Removed bogus Signum() overload that was code duplication from sbyte.CompareTo(), long.CompareTo() and int.ComareTo() methods in .NET


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/bab4adde
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/bab4adde
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/bab4adde

Branch: refs/heads/api-work
Commit: bab4adde0e689d4f463dbcff998a0bbe9bebec35
Parents: f383920
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Feb 5 17:23:36 2017 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Sun Feb 5 17:23:36 2017 +0700

----------------------------------------------------------------------
 src/Lucene.Net.Core/Search/FieldComparator.cs | 29 ++++++++++++----------
 src/Lucene.Net.Core/Support/Number.cs         | 16 ------------
 2 files changed, 16 insertions(+), 29 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/bab4adde/src/Lucene.Net.Core/Search/FieldComparator.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Search/FieldComparator.cs b/src/Lucene.Net.Core/Search/FieldComparator.cs
index 0019688..06e04b3 100644
--- a/src/Lucene.Net.Core/Search/FieldComparator.cs
+++ b/src/Lucene.Net.Core/Search/FieldComparator.cs
@@ -374,8 +374,8 @@ namespace Lucene.Net.Search
 
             public override int Compare(int slot1, int slot2)
             {
-                //LUCENE TO-DO
-                return Number.Signum(values[slot1], values[slot2]);
+                // LUCENENET NOTE: Same logic as the Byte.compare() method in Java
+                return values[slot1].CompareTo(values[slot2]);
             }
 
             public override int CompareBottom(int doc)
@@ -387,8 +387,8 @@ namespace Lucene.Net.Search
                 {
                     v2 = m_missingValue.GetValueOrDefault();
                 }
-                //LUCENE TO-DO
-                return Number.Signum(bottom, v2);
+                // LUCENENET NOTE: Same logic as the Byte.compare() method in Java
+                return bottom.CompareTo(v2);
             }
 
             public override void Copy(int slot, int doc)
@@ -435,8 +435,8 @@ namespace Lucene.Net.Search
                 {
                     docValue = m_missingValue.GetValueOrDefault();
                 }
-                //LUCENE TO-DO
-                return Number.Signum(topValue, docValue);
+                // LUCENENET NOTE: Same logic as the Byte.compare() method in Java
+                return topValue.CompareTo(docValue);
             }
         }
 
@@ -637,8 +637,8 @@ namespace Lucene.Net.Search
 
             public override int Compare(int slot1, int slot2)
             {
-                //LUCENE TO-DO
-                return Number.Signum(values[slot1], values[slot2]);
+                // LUCENENET NOTE: Same logic as the Byte.compare() method in Java
+                return values[slot1].CompareTo(values[slot2]);
             }
 
             public override int CompareBottom(int doc)
@@ -651,8 +651,8 @@ namespace Lucene.Net.Search
                     v2 = m_missingValue.GetValueOrDefault();
                 }
 
-                //LUCENE TO-DO
-                return Number.Signum(bottom, v2);
+                // LUCENENET NOTE: Same logic as the Byte.compare() method in Java
+                return bottom.CompareTo(v2);
             }
 
             public override void Copy(int slot, int doc)
@@ -812,7 +812,8 @@ namespace Lucene.Net.Search
 
             public override int Compare(int slot1, int slot2)
             {
-                return Number.Signum(values[slot1], values[slot2]);
+                // LUCENENET NOTE: Same logic as the Long.compare() method in Java
+                return values[slot1].CompareTo(values[slot2]);
             }
 
             public override int CompareBottom(int doc)
@@ -827,7 +828,8 @@ namespace Lucene.Net.Search
                     v2 = m_missingValue.GetValueOrDefault();
                 }
 
-                return Number.Signum(bottom, v2);
+                // LUCENENET NOTE: Same logic as the Long.compare() method in Java
+                return bottom.CompareTo(v2);
             }
 
             public override void Copy(int slot, int doc)
@@ -1026,7 +1028,8 @@ namespace Lucene.Net.Search
             public override int CompareTop(int doc)
             {
                 int docValue = docBase + doc;
-                return Number.Signum(topValue, docValue);
+                // LUCENENET NOTE: Same logic as the Integer.compare() method in Java
+                return topValue.CompareTo(docValue);
             }
         }
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/bab4adde/src/Lucene.Net.Core/Support/Number.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Support/Number.cs b/src/Lucene.Net.Core/Support/Number.cs
index 7201afe..d5f6ab0 100644
--- a/src/Lucene.Net.Core/Support/Number.cs
+++ b/src/Lucene.Net.Core/Support/Number.cs
@@ -361,22 +361,6 @@ namespace Lucene.Net.Support
             return a == 0 ? 0 : (int)(a / Math.Abs(a));
         }
 
-        public static int Signum(long a, long b)
-        {
-            if (a < b)
-            {
-                return -1;
-            }
-            else if (a > b)
-            {
-                return 1;
-            }
-            else
-            {
-                return 0;
-            }
-        }
-
         // Returns the number of 1-bits in the number
         public static int BitCount(int num)
         {


[09/27] lucenenet git commit: Lucene.Net.Misc: fix documentation comment formatting problems

Posted by ni...@apache.org.
Lucene.Net.Misc: fix documentation comment formatting problems


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/b163f89d
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/b163f89d
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/b163f89d

Branch: refs/heads/api-work
Commit: b163f89de6d347028ba114134769bd6d44b7bfe8
Parents: 66d768a
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Feb 5 12:42:47 2017 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Sun Feb 5 14:47:51 2017 +0700

----------------------------------------------------------------------
 src/Lucene.Net.Misc/Index/CompoundFileExtractor.cs    | 14 ++++++--------
 src/Lucene.Net.Misc/Index/MultiPassIndexSplitter.cs   |  1 +
 .../Index/Sorter/EarlyTerminatingSortingCollector.cs  |  2 +-
 src/Lucene.Net.Misc/Index/Sorter/Sorter.cs            |  2 +-
 src/Lucene.Net.Misc/Misc/HighFreqTerms.cs             |  2 +-
 src/Lucene.Net.Misc/Util/Fst/ListOfOutputs.cs         |  6 +++---
 .../Util/Fst/UpToTwoPositiveIntOutputs.cs             |  2 +-
 7 files changed, 14 insertions(+), 15 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/b163f89d/src/Lucene.Net.Misc/Index/CompoundFileExtractor.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Misc/Index/CompoundFileExtractor.cs b/src/Lucene.Net.Misc/Index/CompoundFileExtractor.cs
index 2d2ee01..80ced50 100644
--- a/src/Lucene.Net.Misc/Index/CompoundFileExtractor.cs
+++ b/src/Lucene.Net.Misc/Index/CompoundFileExtractor.cs
@@ -23,18 +23,16 @@ namespace Lucene.Net.Index
 	 */
 
     /// <summary>
-    /// Prints the filename and size of each file within a given compound file.
-    /// Add the -extract flag to extract files to the current working directory.
-    /// In order to make the extracted version of the index work, you have to copy
-    /// the segments file from the compound index into the directory where the extracted files are stored. </summary>
-    /// <param name="args"> Usage: org.apache.lucene.index.IndexReader [-extract] &lt;cfsfile&gt; </param>
-
-    /// <summary>
     /// Command-line tool for extracting sub-files out of a compound file.
     /// </summary>
     public class CompoundFileExtractor
     {
-
+        /// <summary>
+        /// Prints the filename and size of each file within a given compound file.
+        /// Add the -extract flag to extract files to the current working directory.
+        /// In order to make the extracted version of the index work, you have to copy
+        /// the segments file from the compound index into the directory where the extracted files are stored. </summary>
+        /// <param name="args"> Usage: org.apache.lucene.index.IndexReader [-extract] &lt;cfsfile&gt; </param>
         public static void Main(string[] args)
         {
             string filename = null;

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/b163f89d/src/Lucene.Net.Misc/Index/MultiPassIndexSplitter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Misc/Index/MultiPassIndexSplitter.cs b/src/Lucene.Net.Misc/Index/MultiPassIndexSplitter.cs
index 28c54c7..a8dff93 100644
--- a/src/Lucene.Net.Misc/Index/MultiPassIndexSplitter.cs
+++ b/src/Lucene.Net.Misc/Index/MultiPassIndexSplitter.cs
@@ -49,6 +49,7 @@ namespace Lucene.Net.Index
 
         /// <summary>
         /// Split source index into multiple parts. </summary>
+        /// <param name="version">lucene compatibility version</param>
         /// <param name="in"> source index, can have deletions, can have
         /// multiple segments (or multiple readers). </param>
         /// <param name="outputs"> list of directories where the output parts will be stored. </param>

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/b163f89d/src/Lucene.Net.Misc/Index/Sorter/EarlyTerminatingSortingCollector.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Misc/Index/Sorter/EarlyTerminatingSortingCollector.cs b/src/Lucene.Net.Misc/Index/Sorter/EarlyTerminatingSortingCollector.cs
index 2a5a6d9..0c72c35 100644
--- a/src/Lucene.Net.Misc/Index/Sorter/EarlyTerminatingSortingCollector.cs
+++ b/src/Lucene.Net.Misc/Index/Sorter/EarlyTerminatingSortingCollector.cs
@@ -34,7 +34,7 @@ namespace Lucene.Net.Index.Sorter
     /// </para>
     /// <para>
     /// <b>NOTE</b>: If you wrap a <see cref="Search.TopDocsCollector{T}"/> that sorts in the same
-    /// order as the index order, the returned <see cref="TopDocsCollector{T}.TopDocs">TopDocs</see>
+    /// order as the index order, the returned <see cref="TopDocsCollector{T}.GetTopDocs()">TopDocs</see>
     /// will be correct. However the total of <see cref="TopDocsCollector{T}.TotalHits"/>
     /// hit count will be underestimated since not all matching documents will have
     /// been collected.

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/b163f89d/src/Lucene.Net.Misc/Index/Sorter/Sorter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Misc/Index/Sorter/Sorter.cs b/src/Lucene.Net.Misc/Index/Sorter/Sorter.cs
index b176574..f0be656 100644
--- a/src/Lucene.Net.Misc/Index/Sorter/Sorter.cs
+++ b/src/Lucene.Net.Misc/Index/Sorter/Sorter.cs
@@ -317,7 +317,7 @@ namespace Lucene.Net.Index.Sorter
         /// <para>This identifier is similar to <see cref="object.GetHashCode()"/> and should be
         /// chosen so that two instances of this class that sort documents likewise
         /// will have the same identifier. On the contrary, this identifier should be
-        /// different on different <see cref="Sort">sorts</see>.
+        /// different on different <see cref="Search.Sort">sorts</see>.
         /// </para>
         /// </summary>
         public string ID

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/b163f89d/src/Lucene.Net.Misc/Misc/HighFreqTerms.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Misc/Misc/HighFreqTerms.cs b/src/Lucene.Net.Misc/Misc/HighFreqTerms.cs
index 1f94deb..6675a52 100644
--- a/src/Lucene.Net.Misc/Misc/HighFreqTerms.cs
+++ b/src/Lucene.Net.Misc/Misc/HighFreqTerms.cs
@@ -92,7 +92,7 @@ namespace Lucene.Net.Misc
         }
 
         /// <summary>
-        /// Returns <see cref="TermStats[]"/> ordered by the specified comparer
+        /// Returns <see cref="T:TermStats[]"/> ordered by the specified comparer
         /// </summary>
         public static TermStats[] GetHighFreqTerms(IndexReader reader, int numTerms, string field, IComparer<TermStats> comparer)
         {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/b163f89d/src/Lucene.Net.Misc/Util/Fst/ListOfOutputs.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Misc/Util/Fst/ListOfOutputs.cs b/src/Lucene.Net.Misc/Util/Fst/ListOfOutputs.cs
index 45f3d92..851f7cb 100644
--- a/src/Lucene.Net.Misc/Util/Fst/ListOfOutputs.cs
+++ b/src/Lucene.Net.Misc/Util/Fst/ListOfOutputs.cs
@@ -28,9 +28,9 @@ namespace Lucene.Net.Util.Fst
     /// more of its output values.  You can use this when a single
     /// input may need to map to more than one output,
     /// maintaining order: pass the same input with a different
-    /// output by calling <see cref="Builder.Add(IntsRef,T)"/> multiple
+    /// output by calling <see cref="Builder{T}.Add(IntsRef,T)"/> multiple
     /// times.  The builder will then combine the outputs using
-    /// the <see cref="Outputs.Merge(T,T)"/> method.
+    /// the <see cref="Outputs{T}.Merge(T,T)"/> method.
     /// 
     /// <para>The resulting FST may not be minimal when an input has
     /// more than one output, as this requires pushing all
@@ -40,7 +40,7 @@ namespace Lucene.Net.Util.Fst
     /// <para>NOTE: the only way to create multiple outputs is to
     /// add the same input to the FST multiple times in a row.  This is
     /// how the FST maps a single input to multiple outputs (e.g. you
-    /// cannot pass a List&lt;Object&gt; to <see cref="Builder.Add(IntsRef, T)"/>).  If
+    /// cannot pass a List&lt;Object&gt; to <see cref="Builder{T}.Add(IntsRef, T)"/>).  If
     /// your outputs are longs, and you need at most 2, then use
     /// <see cref="UpToTwoPositiveIntOutputs"/> instead since it stores
     /// the outputs more compactly (by stealing a bit from each

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/b163f89d/src/Lucene.Net.Misc/Util/Fst/UpToTwoPositiveIntOutputs.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Misc/Util/Fst/UpToTwoPositiveIntOutputs.cs b/src/Lucene.Net.Misc/Util/Fst/UpToTwoPositiveIntOutputs.cs
index 8378f20..1e4cb0f 100644
--- a/src/Lucene.Net.Misc/Util/Fst/UpToTwoPositiveIntOutputs.cs
+++ b/src/Lucene.Net.Misc/Util/Fst/UpToTwoPositiveIntOutputs.cs
@@ -34,7 +34,7 @@ namespace Lucene.Net.Util.Fst
     /// <para>NOTE: the only way to create a TwoLongs output is to
     /// add the same input to the FST twice in a row.  This is
     /// how the FST maps a single input to two outputs (e.g. you
-    /// cannot pass a <see cref="TwoLongs"/> to <see cref="Builder.Add(IntsRef, T)"/>.  If you
+    /// cannot pass a <see cref="TwoLongs"/> to <see cref="Builder{T}.Add(IntsRef, T)"/>.  If you
     /// need more than two then use <see cref="ListOfOutputs{T}"/>, but if
     /// you only have at most 2 then this implementation will
     /// require fewer bytes as it steals one bit from each long


[10/27] lucenenet git commit: Lucene.Net.Queries: fix documentation comment formatting problems

Posted by ni...@apache.org.
Lucene.Net.Queries: fix documentation comment formatting problems


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/edecf4f5
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/edecf4f5
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/edecf4f5

Branch: refs/heads/api-work
Commit: edecf4f54bf7e273b16b1d76d24d03228013e471
Parents: b163f89
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Feb 5 12:52:10 2017 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Sun Feb 5 14:47:52 2017 +0700

----------------------------------------------------------------------
 src/Lucene.Net.Queries/ChainedFilter.cs               |  2 ++
 src/Lucene.Net.Queries/CommonTermsQuery.cs            |  6 ++----
 .../Function/ValueSources/IfFunction.cs               |  4 ++--
 .../Function/ValueSources/RangeMapFloatFunction.cs    |  2 +-
 .../Function/ValueSources/ScaleFloatFunction.cs       |  2 +-
 src/Lucene.Net.Queries/Mlt/MoreLikeThis.cs            | 14 +++++++-------
 6 files changed, 15 insertions(+), 15 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/edecf4f5/src/Lucene.Net.Queries/ChainedFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Queries/ChainedFilter.cs b/src/Lucene.Net.Queries/ChainedFilter.cs
index 2107d89..50896d0 100644
--- a/src/Lucene.Net.Queries/ChainedFilter.cs
+++ b/src/Lucene.Net.Queries/ChainedFilter.cs
@@ -152,6 +152,7 @@ namespace Lucene.Net.Queries
         /// </summary>
         /// <param name="context"> AtomicReaderContext </param>
         /// <param name="logic"> Logical operation </param>
+        /// <param name="index"></param>
         /// <returns> DocIdSet </returns>
         private DocIdSet GetDocIdSet(AtomicReaderContext context, int logic, int[] index)
         {
@@ -169,6 +170,7 @@ namespace Lucene.Net.Queries
         /// </summary>
         /// <param name="context"> AtomicReaderContext </param>
         /// <param name="logic"> Logical operation </param>
+        /// <param name="index"></param>
         /// <returns> DocIdSet </returns>
         private DocIdSet GetDocIdSet(AtomicReaderContext context, int[] logic, int[] index)
         {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/edecf4f5/src/Lucene.Net.Queries/CommonTermsQuery.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Queries/CommonTermsQuery.cs b/src/Lucene.Net.Queries/CommonTermsQuery.cs
index efab160..de2382b 100644
--- a/src/Lucene.Net.Queries/CommonTermsQuery.cs
+++ b/src/Lucene.Net.Queries/CommonTermsQuery.cs
@@ -31,12 +31,12 @@ namespace Lucene.Net.Queries
     /// <summary>
     /// A query that executes high-frequency terms in a optional sub-query to prevent
     /// slow queries due to "common" terms like stopwords. This query
-    /// builds 2 queries off the <see cref="Add(Term) added"/> terms: low-frequency
+    /// builds 2 queries off the <see cref="Add(Term)"/> added terms: low-frequency
     /// terms are added to a required boolean clause and high-frequency terms are
     /// added to an optional boolean clause. The optional clause is only executed if
     /// the required "low-frequency" clause matches. Scores produced by this query
     /// will be slightly different than plain <see cref="BooleanQuery"/> scorer mainly due to
-    /// differences in the <see cref="Search.Similarities.Similarity.Coord(int,int) number of leaf queries"/>
+    /// differences in the <see cref="Search.Similarities.Similarity.Coord(int,int)"/> number of leaf queries
     /// in the required boolean clause. In most cases, high-frequency terms are
     /// unlikely to significantly contribute to the document score unless at least
     /// one of the low-frequency terms are matched.  This query can improve
@@ -336,8 +336,6 @@ namespace Lucene.Net.Queries
         /// clauses is required.
         /// </para>
         /// </summary>
-        /// <param name="min">
-        ///          the number of optional clauses that must match </param>
         public virtual float HighFreqMinimumNumberShouldMatch
         {
             get { return m_highFreqMinNrShouldMatch; }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/edecf4f5/src/Lucene.Net.Queries/Function/ValueSources/IfFunction.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Queries/Function/ValueSources/IfFunction.cs b/src/Lucene.Net.Queries/Function/ValueSources/IfFunction.cs
index 621449f..605589e 100644
--- a/src/Lucene.Net.Queries/Function/ValueSources/IfFunction.cs
+++ b/src/Lucene.Net.Queries/Function/ValueSources/IfFunction.cs
@@ -23,8 +23,8 @@ namespace Lucene.Net.Queries.Function.ValueSources
      */
 
     /// <summary>
-    /// Depending on the <see cref="bool"/> value of the <paramref name="ifSource"/> function,
-    /// returns the value of the <paramref name="trueSource"/> or <paramref name="falseSource"/> function.
+    /// Depending on the <see cref="bool"/> value of the <see cref="ifSource"/> function,
+    /// returns the value of the <see cref="trueSource"/> or <see cref="falseSource"/> function.
     /// </summary>
     public class IfFunction : BoolFunction
     {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/edecf4f5/src/Lucene.Net.Queries/Function/ValueSources/RangeMapFloatFunction.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Queries/Function/ValueSources/RangeMapFloatFunction.cs b/src/Lucene.Net.Queries/Function/ValueSources/RangeMapFloatFunction.cs
index 96eec41..d510257 100644
--- a/src/Lucene.Net.Queries/Function/ValueSources/RangeMapFloatFunction.cs
+++ b/src/Lucene.Net.Queries/Function/ValueSources/RangeMapFloatFunction.cs
@@ -25,7 +25,7 @@ namespace Lucene.Net.Queries.Function.ValueSources
 
     /// <summary>
     /// <see cref="RangeMapFloatFunction"/> implements a map function over
-    /// another <see cref="ValueSource"/> whose values fall within <paramref name="min"/> and <paramref name="max"/> inclusive to <paramref name="target"/>.
+    /// another <see cref="ValueSource"/> whose values fall within <c>min</c> and <c>max</c> inclusive to <c>target</c>.
     /// <para/>
     /// Normally used as an argument to a <see cref="FunctionQuery"/>
     /// </summary>

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/edecf4f5/src/Lucene.Net.Queries/Function/ValueSources/ScaleFloatFunction.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Queries/Function/ValueSources/ScaleFloatFunction.cs b/src/Lucene.Net.Queries/Function/ValueSources/ScaleFloatFunction.cs
index 9acf763..a321258 100644
--- a/src/Lucene.Net.Queries/Function/ValueSources/ScaleFloatFunction.cs
+++ b/src/Lucene.Net.Queries/Function/ValueSources/ScaleFloatFunction.cs
@@ -24,7 +24,7 @@ namespace Lucene.Net.Queries.Function.ValueSources
      */
 
     /// <summary>
-    /// Scales values to be between <paramref name="min"/> and <paramref name="max"/>.
+    /// Scales values to be between <c>min</c> and <c>max</c>.
     /// <para/>This implementation currently traverses all of the source values to obtain
     /// their min and max.
     /// <para/>This implementation currently cannot distinguish when documents have been

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/edecf4f5/src/Lucene.Net.Queries/Mlt/MoreLikeThis.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Queries/Mlt/MoreLikeThis.cs b/src/Lucene.Net.Queries/Mlt/MoreLikeThis.cs
index a4dda6a..f3bfb1c 100644
--- a/src/Lucene.Net.Queries/Mlt/MoreLikeThis.cs
+++ b/src/Lucene.Net.Queries/Mlt/MoreLikeThis.cs
@@ -373,7 +373,7 @@ namespace Lucene.Net.Queries.Mlt
         }
 
         /// <summary>
-        /// Create the More like query from a <see cref="Util.PriorityQueue{Object[]}"/>
+        /// Create the More like query from a <see cref="T:Util.PriorityQueue{object[]}"/>
         /// </summary>
         private Query CreateQuery(Util.PriorityQueue<object[]> q)
         {
@@ -418,7 +418,7 @@ namespace Lucene.Net.Queries.Mlt
         }
 
         /// <summary>
-        /// Create a <see cref="Util.PriorityQueue{Object[]}"/> from a word->tf map.
+        /// Create a <see cref="T:Util.PriorityQueue{object[]}"/> from a word-&gt;tf map.
         /// </summary>
         /// <param name="words"> a map of words keyed on the word(<see cref="string"/>) with <see cref="Int"/> objects as the values. </param>
         /// <exception cref="IOException"/>
@@ -538,9 +538,9 @@ namespace Lucene.Net.Queries.Mlt
         }
 
         /// <summary>
-        /// Adds terms and frequencies found in vector into the <see cref="IDictionary{string, Int}"/> <paramref name="termFreqMap"/>
+        /// Adds terms and frequencies found in vector into the <see cref="T:IDictionary{string, Int}"/> <paramref name="termFreqMap"/>
         /// </summary>
-        /// <param name="termFreqMap"> a <see cref="IDictionary{string, Int}"/> of terms and their frequencies </param>
+        /// <param name="termFreqMap"> a <see cref="T:IDictionary{string, Int}"/> of terms and their frequencies </param>
         /// <param name="vector"> List of terms and their frequencies for a doc/field </param>
         private void AddTermFrequencies(IDictionary<string, Int> termFreqMap, Terms vector)
         {
@@ -573,10 +573,10 @@ namespace Lucene.Net.Queries.Mlt
         }
 
         /// <summary>
-        /// Adds term frequencies found by tokenizing text from reader into the <see cref="IDictionary{string, Int}"/> words
+        /// Adds term frequencies found by tokenizing text from reader into the <see cref="T:IDictionary{string, Int}"/> words
         /// </summary>
         /// <param name="r"> a source of text to be tokenized </param>
-        /// <param name="termFreqMap"> a <see cref="IDictionary{string, Int}"/> of terms and their frequencies </param>
+        /// <param name="termFreqMap"> a <see cref="T:IDictionary{string, Int}"/> of terms and their frequencies </param>
         /// <param name="fieldName"> Used by analyzer for any special per-field analysis </param>
         private void AddTermFrequencies(TextReader r, IDictionary<string, Int> termFreqMap, string fieldName)
         {
@@ -714,7 +714,7 @@ namespace Lucene.Net.Queries.Mlt
         }
 
         /// <summary>
-        /// <see cref="Util.PriorityQueue{object[]}"/> that orders words by score.
+        /// <see cref="T:Util.PriorityQueue{object[]}"/> that orders words by score.
         /// </summary>
         private class FreqQ : Util.PriorityQueue<object[]>
         {


[12/27] lucenenet git commit: Lucene.Net.QueryParser: fix documentation comment formatting problems

Posted by ni...@apache.org.
Lucene.Net.QueryParser: fix documentation comment formatting problems


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/e32cb9e8
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/e32cb9e8
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/e32cb9e8

Branch: refs/heads/api-work
Commit: e32cb9e8a61a8330a293039d520b822eff755cc8
Parents: edecf4f
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Feb 5 13:17:49 2017 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Sun Feb 5 14:47:53 2017 +0700

----------------------------------------------------------------------
 .../Analyzing/AnalyzingQueryParser.cs           |  2 +-
 .../Classic/QueryParser.cs                      |  4 ++--
 .../Classic/QueryParserBase.cs                  | 18 ++++++++-------
 src/Lucene.Net.QueryParser/Ext/Extensions.cs    |  6 ++---
 .../RemoveDeletedQueryNodesProcessor.cs         |  2 +-
 .../Flexible/Core/QueryParserHelper.cs          | 24 ++++++++++++--------
 .../Config/FieldDateResolutionFCListener.cs     |  4 ++--
 .../Standard/Config/NumberDateFormat.cs         |  2 +-
 .../Flexible/Standard/Nodes/NumericQueryNode.cs |  4 ++--
 .../Processors/AllowLeadingWildcardProcessor.cs |  2 +-
 .../Processors/NumericQueryNodeProcessor.cs     |  2 +-
 .../StandardQueryNodeProcessorPipeline.cs       |  2 +-
 .../Processors/TermRangeQueryNodeProcessor.cs   |  2 +-
 .../Flexible/Standard/StandardQueryParser.cs    |  2 +-
 .../Simple/SimpleQueryParser.cs                 |  2 +-
 .../Surround/Parser/QueryParserTokenManager.cs  | 10 ++++----
 .../Xml/CorePlusExtensionsParser.cs             |  2 +-
 17 files changed, 49 insertions(+), 41 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e32cb9e8/src/Lucene.Net.QueryParser/Analyzing/AnalyzingQueryParser.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Analyzing/AnalyzingQueryParser.cs b/src/Lucene.Net.QueryParser/Analyzing/AnalyzingQueryParser.cs
index e95979f..df8c4f4 100644
--- a/src/Lucene.Net.QueryParser/Analyzing/AnalyzingQueryParser.cs
+++ b/src/Lucene.Net.QueryParser/Analyzing/AnalyzingQueryParser.cs
@@ -32,7 +32,7 @@ namespace Lucene.Net.QueryParsers.Analyzing
     /// <para/>
     /// <b>Warning:</b> This class should only be used with analyzers that do not use stopwords
     /// or that add tokens. Also, several stemming analyzers are inappropriate: for example, <see cref="Analysis.De.GermanAnalyzer"/>  
-    /// will turn <c>H&auml;user</c> into <c>hau</c>, but <c>H?user</c> will 
+    /// will turn <c>H�user</c> into <c>hau</c>, but <c>H?user</c> will 
     /// become <c>h?user</c> when using this parser and thus no match would be found (i.e.
     /// using this parser will be no improvement over QueryParser in such cases). 
     /// </summary>

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e32cb9e8/src/Lucene.Net.QueryParser/Classic/QueryParser.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Classic/QueryParser.cs b/src/Lucene.Net.QueryParser/Classic/QueryParser.cs
index 179258c..daf1b72 100644
--- a/src/Lucene.Net.QueryParser/Classic/QueryParser.cs
+++ b/src/Lucene.Net.QueryParser/Classic/QueryParser.cs
@@ -63,7 +63,7 @@ namespace Lucene.Net.QueryParsers.Classic
     /// In <see cref="TermRangeQuery" />s, QueryParser tries to detect date values, e.g.
     /// <tt>date:[6/1/2005 TO 6/4/2005]</tt> produces a range query that searches
     /// for "date" fields between 2005-06-01 and 2005-06-04. Note that the format
-    /// of the accepted input depends on the <see cref="Locale" />.
+    /// of the accepted input depends on the <see cref="System.Globalization.CultureInfo" />.
     /// A <see cref="Documents.DateTools.Resolution" /> has to be set,
     /// if you want to use <see cref="Documents.DateTools"/> for date conversion.<p/>
     /// </para>
@@ -99,7 +99,7 @@ namespace Lucene.Net.QueryParsers.Classic
     {
         // NOTE: This was moved into the QueryParserBase class.
 
-        ///* The default operator_Renamed for parsing queries. 
+        // * The default operator_Renamed for parsing queries. 
         // * Use {@link QueryParser#setDefaultOperator} to change it.
         // */
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e32cb9e8/src/Lucene.Net.QueryParser/Classic/QueryParserBase.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Classic/QueryParserBase.cs b/src/Lucene.Net.QueryParser/Classic/QueryParserBase.cs
index 2a70cf4..f2a9798 100644
--- a/src/Lucene.Net.QueryParser/Classic/QueryParserBase.cs
+++ b/src/Lucene.Net.QueryParser/Classic/QueryParserBase.cs
@@ -111,7 +111,7 @@ namespace Lucene.Net.QueryParsers.Classic
         /// Whether or not to analyze range terms when constructing RangeQuerys
         /// (For example, analyzing terms into collation keys for locale-sensitive RangeQuery)
         /// </summary>
-        //bool analyzeRangeTerms = false;
+        bool analyzeRangeTerms = false;
 
         /// <summary>
         /// So the generated QueryParser(CharStream) won't error out
@@ -132,8 +132,6 @@ namespace Lucene.Net.QueryParsers.Classic
             FuzzyPrefixLength = FuzzyQuery.DefaultPrefixLength;
             Locale = CultureInfo.CurrentCulture;
             TimeZone = TimeZoneInfo.Local;
-            
-            AnalyzeRangeTerms = false;
         }
 
         /// <summary>
@@ -204,7 +202,7 @@ namespace Lucene.Net.QueryParsers.Classic
         /// when the analyzer returns more than one term from whitespace
         /// delimited text.
         /// NOTE: this behavior may not be suitable for all languages.
-        /// <p>
+        /// <para/>
         /// Set to false if phrase queries should only be generated when
         /// surrounded by double quotes.
         /// </summary>
@@ -254,7 +252,7 @@ namespace Lucene.Net.QueryParsers.Classic
 
         /// <summary>
         /// Whether terms of wildcard, prefix, fuzzy and range queries are to be automatically
-        //  lower-cased or not.  Default is <c>true</c>.
+        /// lower-cased or not.  Default is <c>true</c>.
         /// </summary>
         public virtual bool LowercaseExpandedTerms { get; set; }
 
@@ -341,7 +339,11 @@ namespace Lucene.Net.QueryParsers.Classic
         /// For example, setting this to true can enable analyzing terms into 
         /// collation keys for locale-sensitive <see cref="TermRangeQuery"/>.
         /// </summary>
-        public virtual bool AnalyzeRangeTerms { get; set; }
+        public virtual bool AnalyzeRangeTerms
+        {
+            get { return analyzeRangeTerms; }
+            set { analyzeRangeTerms = value; }
+        }
 
         protected internal virtual void AddClause(IList<BooleanClause> clauses, int conj, int mods, Query q)
         {
@@ -605,7 +607,7 @@ namespace Lucene.Net.QueryParsers.Classic
             }
             else
             {
-                start = AnalyzeRangeTerms ? AnalyzeMultitermTerm(field, part1) : new BytesRef(part1);
+                start = analyzeRangeTerms ? AnalyzeMultitermTerm(field, part1) : new BytesRef(part1);
             }
 
             if (part2 == null)
@@ -614,7 +616,7 @@ namespace Lucene.Net.QueryParsers.Classic
             }
             else
             {
-                end = AnalyzeRangeTerms ? AnalyzeMultitermTerm(field, part2) : new BytesRef(part2);
+                end = analyzeRangeTerms ? AnalyzeMultitermTerm(field, part2) : new BytesRef(part2);
             }
 
             TermRangeQuery query = new TermRangeQuery(field, start, end, startInclusive, endInclusive);

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e32cb9e8/src/Lucene.Net.QueryParser/Ext/Extensions.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Ext/Extensions.cs b/src/Lucene.Net.QueryParser/Ext/Extensions.cs
index b231166..195940f 100644
--- a/src/Lucene.Net.QueryParser/Ext/Extensions.cs
+++ b/src/Lucene.Net.QueryParser/Ext/Extensions.cs
@@ -100,7 +100,7 @@ namespace Lucene.Net.QueryParsers.Ext
 
         /// <summary>
         /// Splits a extension field and returns the field / extension part as a
-        /// <see cref="Tuple{string,string}"/>. This method tries to split on the first occurrence of the
+        /// <see cref="T:Tuple{string,string}"/>. This method tries to split on the first occurrence of the
         /// extension field delimiter, if the delimiter is not present in the string
         /// the result will contain a <code>null</code> value for the extension key and
         /// the given field string as the field value. If the given extension field
@@ -109,8 +109,8 @@ namespace Lucene.Net.QueryParsers.Ext
         /// </summary>
         /// <param name="defaultField">the default query field</param>
         /// <param name="field">the extension field string</param>
-        /// <returns>a {<see cref="Tuple{string,string}"/> with the field name as the <see cref="Tuple{string,string}.Item1"/> and the
-        /// extension key as the <see cref="Tuple{string,string}.Item2"/></returns>
+        /// <returns>a {<see cref="Tuple{T1, T2}"/> with the field name as the <see cref="Tuple{T1, T2}.Item1"/> and the
+        /// extension key as the <see cref="Tuple{T1, T2}.Item2"/></returns>
         public virtual Tuple<string, string> SplitExtensionField(string defaultField, string field)
         {
             int indexOf = field.IndexOf(this.extensionFieldDelimiter);

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e32cb9e8/src/Lucene.Net.QueryParser/Flexible/Core/Processors/RemoveDeletedQueryNodesProcessor.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Flexible/Core/Processors/RemoveDeletedQueryNodesProcessor.cs b/src/Lucene.Net.QueryParser/Flexible/Core/Processors/RemoveDeletedQueryNodesProcessor.cs
index a570d30..0a54974 100644
--- a/src/Lucene.Net.QueryParser/Flexible/Core/Processors/RemoveDeletedQueryNodesProcessor.cs
+++ b/src/Lucene.Net.QueryParser/Flexible/Core/Processors/RemoveDeletedQueryNodesProcessor.cs
@@ -22,7 +22,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Core.Processors
 
     /// <summary>
     /// A <see cref="QueryNodeProcessorPipeline"/> class removes every instance of
-    /// <see cref=""/> from a query node tree. If the resulting root node
+    /// <see cref="DeletedQueryNode"/> from a query node tree. If the resulting root node
     /// is a <see cref="DeletedQueryNode"/>, <see cref="MatchNoDocsQueryNode"/> is returned.
     /// </summary>
     public class RemoveDeletedQueryNodesProcessor : QueryNodeProcessor

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e32cb9e8/src/Lucene.Net.QueryParser/Flexible/Core/QueryParserHelper.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Flexible/Core/QueryParserHelper.cs b/src/Lucene.Net.QueryParser/Flexible/Core/QueryParserHelper.cs
index d2ba77e..8bc7be4 100644
--- a/src/Lucene.Net.QueryParser/Flexible/Core/QueryParserHelper.cs
+++ b/src/Lucene.Net.QueryParser/Flexible/Core/QueryParserHelper.cs
@@ -194,15 +194,21 @@ namespace Lucene.Net.QueryParsers.Flexible.Core
         /// <para/>
         /// In this method the three phases are executed:
         /// <para/>
-        /// &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;1st - the query string is parsed using the
-        /// text parser returned by <see cref="SyntaxParser"/>, the result is a query
-        /// node tree
-        /// <para/>
-        /// &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;2nd - the query node tree is processed by the
-        /// processor returned by <see cref="QueryNodeProcessor"/>
-        /// <para/>
-        /// &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;3th - a object is built from the query node
-        /// tree using the builder returned by <see cref="QueryBuilder"/>
+        /// <list type="number">
+        ///     <item>
+        ///     the query string is parsed using the
+        ///     text parser returned by <see cref="SyntaxParser"/>, the result is a query
+        ///     node tree.
+        ///     </item>
+        ///     <item>
+        ///     the query node tree is processed by the
+        ///     processor returned by <see cref="QueryNodeProcessor"/>.
+        ///     </item>
+        ///     <item>
+        ///     a object is built from the query node
+        ///     tree using the builder returned by <see cref="QueryBuilder"/>.
+        ///     </item>
+        /// </list>
         /// </summary>
         /// <param name="query">the query string</param>
         /// <param name="defaultField">the default field used by the text parser</param>

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e32cb9e8/src/Lucene.Net.QueryParser/Flexible/Standard/Config/FieldDateResolutionFCListener.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Config/FieldDateResolutionFCListener.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Config/FieldDateResolutionFCListener.cs
index 121b4f1..e4b0136 100644
--- a/src/Lucene.Net.QueryParser/Flexible/Standard/Config/FieldDateResolutionFCListener.cs
+++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Config/FieldDateResolutionFCListener.cs
@@ -27,8 +27,8 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Config
     /// on a defined map: fieldName -> <see cref="DateTools.Resolution"/> stored in
     /// <see cref="ConfigurationKeys.FIELD_DATE_RESOLUTION_MAP"/>.
     /// </summary>
-    /// <seealso cref="ConfigurationKeys#DATE_RESOLUTION"/>
-    /// <seealso cref="ConfigurationKeys#FIELD_DATE_RESOLUTION_MAP"/>
+    /// <seealso cref="ConfigurationKeys.DATE_RESOLUTION"/>
+    /// <seealso cref="ConfigurationKeys.FIELD_DATE_RESOLUTION_MAP"/>
     /// <seealso cref="FieldConfig"/>
     /// <seealso cref="IFieldConfigListener"/>
     public class FieldDateResolutionFCListener : IFieldConfigListener

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e32cb9e8/src/Lucene.Net.QueryParser/Flexible/Standard/Config/NumberDateFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Config/NumberDateFormat.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Config/NumberDateFormat.cs
index ceb04db..940950d 100644
--- a/src/Lucene.Net.QueryParser/Flexible/Standard/Config/NumberDateFormat.cs
+++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Config/NumberDateFormat.cs
@@ -32,7 +32,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Config
 
     /// <summary>
     /// This <see cref="NumberFormat"/> parses <see cref="long"/> into date strings and vice-versa. It
-    /// uses the given <paramref name="dateFormat"/> and <see cref="CultureInfo">locale</see> to parse and format dates, but before, it
+    /// uses the given <c>dateFormat</c> and <see cref="CultureInfo">locale</see> to parse and format dates, but before, it
     /// converts <see cref="long"/> to <see cref="DateTime"/> objects or vice-versa.
     /// <para/>
     /// Note that the <see cref="long"/> value the dates are parsed into and out of represent the number of milliseconds

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e32cb9e8/src/Lucene.Net.QueryParser/Flexible/Standard/Nodes/NumericQueryNode.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Nodes/NumericQueryNode.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Nodes/NumericQueryNode.cs
index 44afee4..50e988f 100644
--- a/src/Lucene.Net.QueryParser/Flexible/Standard/Nodes/NumericQueryNode.cs
+++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Nodes/NumericQueryNode.cs
@@ -27,7 +27,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Nodes
     /// similar to <see cref="FieldQueryNode"/>, however the <see cref="Value"/> returns an
     /// <see cref="object"/> representing a .NET numeric type.
     /// </summary>
-    /// <seealso cref="NumericConfig"/>
+    /// <seealso cref="Standard.Config.NumericConfig"/>
     public class NumericQueryNode : QueryNode, IFieldValuePairQueryNode<object> // LUCENENET TODO: Can we use Decimal??
     {
         private NumberFormat numberFormat;
@@ -65,7 +65,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Nodes
 
         /// <summary>
         /// This method is used to get the value converted to <see cref="string"/> and
-        /// escaped using the given <see cref="IEscapeQuerySyntax"/.
+        /// escaped using the given <see cref="IEscapeQuerySyntax"/>.
         /// </summary>
         /// <param name="escaper">The <see cref="IEscapeQuerySyntax"/> used to escape the value <see cref="string"/></param>
         /// <returns>The value converted to <see cref="string"/> and escaped</returns>

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e32cb9e8/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/AllowLeadingWildcardProcessor.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/AllowLeadingWildcardProcessor.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/AllowLeadingWildcardProcessor.cs
index 1a6eeb6..9ad864e 100644
--- a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/AllowLeadingWildcardProcessor.cs
+++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/AllowLeadingWildcardProcessor.cs
@@ -31,7 +31,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Processors
     /// <summary>
     /// This processor verifies if
     /// <see cref="ConfigurationKeys.ALLOW_LEADING_WILDCARD"/> is defined in the
-    /// <see cref="QueryConfigHandler"/>. If it is and leading wildcard is not allowed, it
+    /// <see cref="Core.Config.QueryConfigHandler"/>. If it is and leading wildcard is not allowed, it
     /// looks for every <see cref="WildcardQueryNode"/> contained in the query node tree
     /// and throws an exception if any of them has a leading wildcard ('*' or '?').
     /// </summary>

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e32cb9e8/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/NumericQueryNodeProcessor.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/NumericQueryNodeProcessor.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/NumericQueryNodeProcessor.cs
index 24c73da..4ab43a3 100644
--- a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/NumericQueryNodeProcessor.cs
+++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/NumericQueryNodeProcessor.cs
@@ -43,7 +43,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Processors
     /// TO 1]</b>.
     /// <para/>
     /// Note that <see cref="FieldQueryNode"/>s children of a
-    /// <see cref="RangeQueryNode"/> are ignored.
+    /// <see cref="IRangeQueryNode"/> are ignored.
     /// </summary>
     /// <seealso cref="ConfigurationKeys.NUMERIC_CONFIG"/>
     /// <seealso cref="FieldQueryNode"/>

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e32cb9e8/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/StandardQueryNodeProcessorPipeline.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/StandardQueryNodeProcessorPipeline.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/StandardQueryNodeProcessorPipeline.cs
index 64c7a00..eccd6f2 100644
--- a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/StandardQueryNodeProcessorPipeline.cs
+++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/StandardQueryNodeProcessorPipeline.cs
@@ -29,7 +29,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Processors
     /// This processor pipeline was designed to work with
     /// <see cref="Config.StandardQueryConfigHandler"/>.
     /// <para/>
-    /// The result query node tree can be used to build a <see cref="Query"/> object using
+    /// The result query node tree can be used to build a <see cref="Search.Query"/> object using
     /// <see cref="Builders.StandardQueryTreeBuilder"/>.
     /// </summary>
     /// <seealso cref="Builders.StandardQueryTreeBuilder"/>

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e32cb9e8/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/TermRangeQueryNodeProcessor.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/TermRangeQueryNodeProcessor.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/TermRangeQueryNodeProcessor.cs
index 80639d7..d5eef1a 100644
--- a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/TermRangeQueryNodeProcessor.cs
+++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/TermRangeQueryNodeProcessor.cs
@@ -31,7 +31,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Processors
     /// <summary>
     /// This processors process <see cref="TermRangeQueryNode"/>s. It reads the lower and
     /// upper bounds value from the <see cref="TermRangeQueryNode"/> object and try
-    /// to parse their values using a <paramref name="dateFormat"/>. If the values cannot be
+    /// to parse their values using a <c>dateFormat</c>. If the values cannot be
     /// parsed to a date value, it will only create the <see cref="TermRangeQueryNode"/>
     /// using the non-parsed values.
     /// <para/>

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e32cb9e8/src/Lucene.Net.QueryParser/Flexible/Standard/StandardQueryParser.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/StandardQueryParser.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/StandardQueryParser.cs
index 479ddb3..c9d0da6 100644
--- a/src/Lucene.Net.QueryParser/Flexible/Standard/StandardQueryParser.cs
+++ b/src/Lucene.Net.QueryParser/Flexible/Standard/StandardQueryParser.cs
@@ -453,7 +453,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard
         }
 
         /// <summary>
-        /// Gets or Sets the field to <see cref="DateTools.Resolution?"/> map used to normalize each date field.
+        /// Gets or Sets the field to <see cref="T:DateTools.Resolution?"/> map used to normalize each date field.
         /// </summary>
         public virtual IDictionary<string, DateTools.Resolution?> DateResolutionMap
         {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e32cb9e8/src/Lucene.Net.QueryParser/Simple/SimpleQueryParser.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Simple/SimpleQueryParser.cs b/src/Lucene.Net.QueryParser/Simple/SimpleQueryParser.cs
index ff0b637..fab1dce 100644
--- a/src/Lucene.Net.QueryParser/Simple/SimpleQueryParser.cs
+++ b/src/Lucene.Net.QueryParser/Simple/SimpleQueryParser.cs
@@ -104,7 +104,7 @@ namespace Lucene.Net.QueryParsers.Simple
         [Flags]
         public enum Operator : int
         {
-            /// <summary>Enables <c><AND/c> operator (+)</summary>
+            /// <summary>Enables <c>AND</c> operator (+)</summary>
             AND_OPERATOR = 1 << 0,
             /// <summary>Enables <c>NOT</c> operator (-)</summary>
             NOT_OPERATOR = 1 << 1,

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e32cb9e8/src/Lucene.Net.QueryParser/Surround/Parser/QueryParserTokenManager.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Surround/Parser/QueryParserTokenManager.cs b/src/Lucene.Net.QueryParser/Surround/Parser/QueryParserTokenManager.cs
index f477104..aaa67f1 100644
--- a/src/Lucene.Net.QueryParser/Surround/Parser/QueryParserTokenManager.cs
+++ b/src/Lucene.Net.QueryParser/Surround/Parser/QueryParserTokenManager.cs
@@ -533,11 +533,11 @@ namespace Lucene.Net.QueryParsers.Surround.Parser
             }
         }
 
-        /** Token literal values. */
-        //public static readonly string[] jjstrLiteralImages = {
-        //    "", null, null, null, null, null, null, null, null, null, null, null, null, 
-        //    "\50", "\51", "\54", "\72", "\136", null, null, null, null, null, null 
-        //};
+        ////** Token literal values. */
+        ////public static readonly string[] jjstrLiteralImages = {
+        ////    "", null, null, null, null, null, null, null, null, null, null, null, null, 
+        ////    "\50", "\51", "\54", "\72", "\136", null, null, null, null, null, null 
+        ////};
 
         /// <summary>Token literal values.</summary>
         public static readonly string[] jjstrLiteralImages = {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e32cb9e8/src/Lucene.Net.QueryParser/Xml/CorePlusExtensionsParser.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Xml/CorePlusExtensionsParser.cs b/src/Lucene.Net.QueryParser/Xml/CorePlusExtensionsParser.cs
index 231d52e..7941978 100644
--- a/src/Lucene.Net.QueryParser/Xml/CorePlusExtensionsParser.cs
+++ b/src/Lucene.Net.QueryParser/Xml/CorePlusExtensionsParser.cs
@@ -22,7 +22,7 @@ namespace Lucene.Net.QueryParsers.Xml
      */
 
     /// <summary>
-    /// Assembles a <see cref="QueryBuilder"/> which uses <see cref="Query"/> objects from
+    /// Assembles a <see cref="Util.QueryBuilder"/> which uses <see cref="Search.Query"/> objects from
     /// Lucene's <c>sandbox</c> and <c>queries</c>
     /// modules in addition to core queries.
     /// </summary>


[03/27] lucenenet git commit: Lucene.Net.Analaysis.Stempel: fix documentation comment formatting problems

Posted by ni...@apache.org.
Lucene.Net.Analaysis.Stempel: fix documentation comment formatting problems


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/2e2fedaa
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/2e2fedaa
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/2e2fedaa

Branch: refs/heads/api-work
Commit: 2e2fedaab71775351bd0e5cd53e84d9338381cab
Parents: 2ae5a27
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Feb 5 11:17:09 2017 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Sun Feb 5 14:47:46 2017 +0700

----------------------------------------------------------------------
 .../Egothor.Stemmer/Compile.cs                   | 19 +++++++++----------
 .../Egothor.Stemmer/MultiTrie2.cs                |  4 ++--
 .../Egothor.Stemmer/Row.cs                       |  2 +-
 3 files changed, 12 insertions(+), 13 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/2e2fedaa/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Compile.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Compile.cs b/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Compile.cs
index 20cb46a..b1d2fe9 100644
--- a/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Compile.cs
+++ b/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Compile.cs
@@ -73,16 +73,15 @@ namespace Egothor.Stemmer
         /// </summary>
         private Compile() { }
 
-        /**
-         * Entry point to the Compile application.
-         * <p>
-         * This program takes any number of arguments: the first is the name of the
-         * desired stemming algorithm to use (a list is available in the package
-         * description) , all of the rest should be the path or paths to a file or
-         * files containing a stemmer table to compile.
-         * 
-         * @param args the command line arguments
-         */
+        /// <summary>
+        /// Entry point to the Compile application.
+        /// <para/>
+        /// This program takes any number of arguments: the first is the name of the
+        /// desired stemming algorithm to use (a list is available in the package
+        /// description) , all of the rest should be the path or paths to a file or
+        /// files containing a stemmer table to compile.
+        /// </summary>
+        /// <param name="args">the command line arguments</param>
         public static void Main(string[] args)
         {
             if (args.Length < 1)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/2e2fedaa/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/MultiTrie2.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/MultiTrie2.cs b/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/MultiTrie2.cs
index cc6dbed..8d8568e 100644
--- a/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/MultiTrie2.cs
+++ b/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/MultiTrie2.cs
@@ -74,7 +74,7 @@ namespace Egothor.Stemmer
         /// Constructor for the <see cref="MultiTrie"/> object.
         /// </summary>
         /// <param name="is">the input stream</param>
-        /// <exception cref="IOException">if an I/O error occurs</exception>
+        /// <exception cref="System.IO.IOException">if an I/O error occurs</exception>
         public MultiTrie2(IDataInput @is)
             : base(@is)
         {
@@ -207,7 +207,7 @@ namespace Egothor.Stemmer
         /// Write this data structure to the given output stream.
         /// </summary>
         /// <param name="os">the output stream</param>
-        /// <exception cref="IOException">if an I/O error occurs</exception>
+        /// <exception cref="System.IO.IOException">if an I/O error occurs</exception>
         public override void Store(IDataOutput os)
         {
             base.Store(os);

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/2e2fedaa/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Row.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Row.cs b/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Row.cs
index 6fdad76..b2ee5fa 100644
--- a/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Row.cs
+++ b/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Row.cs
@@ -73,7 +73,7 @@ namespace Egothor.Stemmer
         /// <summary>
         /// Construct a <see cref="Row"/> object from input carried in via the given input stream.
         /// </summary>
-        /// <param name="@is">the input stream</param>
+        /// <param name="is">the input stream</param>
         /// <exception cref="IOException">if an I/O error occurs</exception>
         public Row(IDataInput @is)
         {


[26/27] lucenenet git commit: Lucene.Net.Core.Util.RamUsageEstimator: added SizeOf() overload for byte[] and marked the sbyte[] overload CLSCompliant(false)

Posted by ni...@apache.org.
Lucene.Net.Core.Util.RamUsageEstimator: added SizeOf() overload for byte[] and marked the sbyte[] overload CLSCompliant(false)


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/7cb60642
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/7cb60642
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/7cb60642

Branch: refs/heads/api-work
Commit: 7cb60642bc80aacc605e8257237a93b9bb4a8c5e
Parents: 0ef5798
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Feb 5 19:23:51 2017 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Sun Feb 5 19:23:51 2017 +0700

----------------------------------------------------------------------
 src/Lucene.Net.Core/Util/RamUsageEstimator.cs | 11 ++++++++++-
 1 file changed, 10 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/7cb60642/src/Lucene.Net.Core/Util/RamUsageEstimator.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Util/RamUsageEstimator.cs b/src/Lucene.Net.Core/Util/RamUsageEstimator.cs
index 0faeba0..4171d22 100644
--- a/src/Lucene.Net.Core/Util/RamUsageEstimator.cs
+++ b/src/Lucene.Net.Core/Util/RamUsageEstimator.cs
@@ -272,7 +272,16 @@ namespace Lucene.Net.Util
 
         /// <summary>
         /// Returns the size in bytes of the byte[] object. </summary>
-        public static long SizeOf(sbyte[] arr) // LUCENENET TODO: can we change to byte ? it doesn't use the values in the array anyway
+        // LUCENENET specific overload for CLS compliance
+        public static long SizeOf(byte[] arr)
+        {
+            return AlignObjectSize((long)NUM_BYTES_ARRAY_HEADER + arr.Length);
+        }
+
+        /// <summary>
+        /// Returns the size in bytes of the sbyte[] object. </summary>
+        [CLSCompliant(false)]
+        public static long SizeOf(sbyte[] arr)
         {
             return AlignObjectSize((long)NUM_BYTES_ARRAY_HEADER + arr.Length);
         }


[25/27] lucenenet git commit: Lucene.Net.Core.Util.IndexableBinaryStringTools: added CLS compliant overloads of GetEncodedLength(), Encode() and Decode() and marked existing overloads CLSCompliant(false)

Posted by ni...@apache.org.
Lucene.Net.Core.Util.IndexableBinaryStringTools: added CLS compliant overloads of GetEncodedLength(), Encode() and Decode() and marked existing overloads CLSCompliant(false)


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/0ef5798b
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/0ef5798b
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/0ef5798b

Branch: refs/heads/api-work
Commit: 0ef5798b93bb0900c397d79d2485e70e981d7775
Parents: 29e6531
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Feb 5 19:15:50 2017 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Sun Feb 5 19:15:50 2017 +0700

----------------------------------------------------------------------
 .../Util/IndexableBinaryStringTools.cs          | 60 ++++++++++++++++++--
 1 file changed, 56 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/0ef5798b/src/Lucene.Net.Core/Util/IndexableBinaryStringTools.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Util/IndexableBinaryStringTools.cs b/src/Lucene.Net.Core/Util/IndexableBinaryStringTools.cs
index a1f75bc..aad4d03 100644
--- a/src/Lucene.Net.Core/Util/IndexableBinaryStringTools.cs
+++ b/src/Lucene.Net.Core/Util/IndexableBinaryStringTools.cs
@@ -69,7 +69,22 @@ namespace Lucene.Net.Util
         /// <param name="inputOffset"> initial offset into inputArray </param>
         /// <param name="inputLength"> number of bytes in inputArray </param>
         /// <returns> The number of chars required to encode the number of bytes. </returns>
-        public static int GetEncodedLength(sbyte[] inputArray, int inputOffset, int inputLength) // LUCENENET TODO: CLS compliance
+        // LUCENENET specific overload for CLS compliance
+        public static int GetEncodedLength(byte[] inputArray, int inputOffset, int inputLength)
+        {
+            // Use long for intermediaries to protect against overflow
+            return (int)((8L * inputLength + 14L) / 15L) + 1;
+        }
+
+        /// <summary>
+        /// Returns the number of chars required to encode the given sbytes.
+        /// </summary>
+        /// <param name="inputArray"> sbyte sequence to be encoded </param>
+        /// <param name="inputOffset"> initial offset into inputArray </param>
+        /// <param name="inputLength"> number of sbytes in inputArray </param>
+        /// <returns> The number of chars required to encode the number of sbytes. </returns>
+        [CLSCompliant(false)]
+        public static int GetEncodedLength(sbyte[] inputArray, int inputOffset, int inputLength)
         {
             // Use long for intermediaries to protect against overflow
             return (int)((8L * inputLength + 14L) / 15L) + 1;
@@ -99,17 +114,35 @@ namespace Lucene.Net.Util
         }
 
         /// <summary>
-        /// Encodes the input byte sequence into the output char sequence.  Before
+        /// Encodes the input sbyte sequence into the output char sequence.  Before
         /// calling this method, ensure that the output array has sufficient
         /// capacity by calling <seealso cref="#getEncodedLength(byte[], int, int)"/>.
         /// </summary>
-        /// <param name="inputArray"> byte sequence to be encoded </param>
+        /// <param name="inputArray"> sbyte sequence to be encoded </param>
         /// <param name="inputOffset"> initial offset into inputArray </param>
         /// <param name="inputLength"> number of bytes in inputArray </param>
         /// <param name="outputArray"> char sequence to store encoded result </param>
         /// <param name="outputOffset"> initial offset into outputArray </param>
         /// <param name="outputLength"> length of output, must be getEncodedLength </param>
-        public static void Encode(sbyte[] inputArray, int inputOffset, int inputLength, char[] outputArray, int outputOffset, int outputLength) // LUCENENET TODO: CLS compliance
+        // LUCENENET specific overload for CLS compliance
+        public static void Encode(byte[] inputArray, int inputOffset, int inputLength, char[] outputArray, int outputOffset, int outputLength)
+        {
+            Encode((sbyte[])(Array)inputArray, inputOffset, inputLength, outputArray, outputOffset, outputLength);
+        }
+
+        /// <summary>
+        /// Encodes the input sbyte sequence into the output char sequence.  Before
+        /// calling this method, ensure that the output array has sufficient
+        /// capacity by calling <seealso cref="#getEncodedLength(byte[], int, int)"/>.
+        /// </summary>
+        /// <param name="inputArray"> sbyte sequence to be encoded </param>
+        /// <param name="inputOffset"> initial offset into inputArray </param>
+        /// <param name="inputLength"> number of bytes in inputArray </param>
+        /// <param name="outputArray"> char sequence to store encoded result </param>
+        /// <param name="outputOffset"> initial offset into outputArray </param>
+        /// <param name="outputLength"> length of output, must be getEncodedLength </param>
+        [CLSCompliant(false)]
+        public static void Encode(sbyte[] inputArray, int inputOffset, int inputLength, char[] outputArray, int outputOffset, int outputLength)
         {
             Debug.Assert(outputLength == GetEncodedLength(inputArray, inputOffset, inputLength));
             if (inputLength > 0)
@@ -170,6 +203,25 @@ namespace Lucene.Net.Util
         /// <param name="outputOffset"> initial offset into outputArray </param>
         /// <param name="outputLength"> length of output, must be
         ///        getDecodedLength(inputArray, inputOffset, inputLength) </param>
+        // LUCENENET specific overload for CLS compliance
+        public static void Decode(char[] inputArray, int inputOffset, int inputLength, byte[] outputArray, int outputOffset, int outputLength)
+        {
+            Decode(inputArray, inputOffset, inputLength, (sbyte[])(Array)outputArray, outputOffset, outputLength);
+        }
+
+        /// <summary>
+        /// Decodes the input char sequence into the output sbyte sequence. Before
+        /// calling this method, ensure that the output array has sufficient capacity
+        /// by calling <seealso cref="#getDecodedLength(char[], int, int)"/>.
+        /// </summary>
+        /// <param name="inputArray"> char sequence to be decoded </param>
+        /// <param name="inputOffset"> initial offset into inputArray </param>
+        /// <param name="inputLength"> number of chars in inputArray </param>
+        /// <param name="outputArray"> byte sequence to store encoded result </param>
+        /// <param name="outputOffset"> initial offset into outputArray </param>
+        /// <param name="outputLength"> length of output, must be
+        ///        getDecodedLength(inputArray, inputOffset, inputLength) </param>
+        [CLSCompliant(false)]
         public static void Decode(char[] inputArray, int inputOffset, int inputLength, sbyte[] outputArray, int outputOffset, int outputLength)
         {
             Debug.Assert(outputLength == GetDecodedLength(inputArray, inputOffset, inputLength));


[07/27] lucenenet git commit: Lucene.Net.Memory: fix documentation comment formatting problems

Posted by ni...@apache.org.
Lucene.Net.Memory: fix documentation comment formatting problems


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/66d768ae
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/66d768ae
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/66d768ae

Branch: refs/heads/api-work
Commit: 66d768ae9d114ffa27a1823e30bc47c1a9237bcd
Parents: bbac431
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Feb 5 12:34:30 2017 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Sun Feb 5 14:47:50 2017 +0700

----------------------------------------------------------------------
 src/Lucene.Net.Memory/MemoryIndex.cs | 16 ++++++++--------
 1 file changed, 8 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/66d768ae/src/Lucene.Net.Memory/MemoryIndex.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Memory/MemoryIndex.cs b/src/Lucene.Net.Memory/MemoryIndex.cs
index 45635da..cd4a284 100644
--- a/src/Lucene.Net.Memory/MemoryIndex.cs
+++ b/src/Lucene.Net.Memory/MemoryIndex.cs
@@ -33,7 +33,7 @@ namespace Lucene.Net.Index.Memory
     /// <h4>Overview</h4>
     /// 
     /// This class is a replacement/substitute for a large subset of
-    /// <see cref="RAMDirectory"/> functionality. It is designed to
+    /// <see cref="Store.RAMDirectory"/> functionality. It is designed to
     /// enable maximum efficiency for on-the-fly matchmaking combining structured and 
     /// fuzzy fulltext search in realtime streaming applications such as Nux XQuery based XML 
     /// message queues, publish-subscribe systems for Blogs/newsfeeds, text chat, data acquisition and 
@@ -73,7 +73,7 @@ namespace Lucene.Net.Index.Memory
     /// <a target="_blank" 
     /// href="http://bobwyman.pubsub.com/main/2005/05/mary_hodder_poi.html">Prospective Search</a>, 
     /// Jim Gray's
-    /// <a target="_blank" href="http://www.acmqueue.org/modules.php?name=Content&pa=showpage&pid=293&page=4">
+    /// <a target="_blank" href="http://www.acmqueue.org/modules.php?name=Content&amp;pa=showpage&amp;pid=293&amp;page=4">
     /// A Call to Arms - Custom subscriptions</a>, and Tim Bray's
     /// <a target="_blank" 
     /// href="http://www.tbray.org/ongoing/When/200x/2003/07/30/OnSearchTOC">On Search, the Series</a>.
@@ -337,12 +337,12 @@ namespace Lucene.Net.Index.Memory
         /// Equivalent to adding a tokenized, indexed, termVectorStored, unstored,
         /// Lucene <see cref="Documents.Field"/>.
         /// Finally closes the token stream. Note that untokenized keywords can be added with this method via 
-        /// <see cref="KeywordTokenStream{T}(ICollection{T}"/>)"/>, the Lucene <c>KeywordTokenizer</c> or similar utilities.
+        /// <see cref="T:KeywordTokenStream{T}(ICollection{T}"/>)"/>, the Lucene <c>KeywordTokenizer</c> or similar utilities.
         /// </summary>
         /// <param name="fieldName"> a name to be associated with the text </param>
         /// <param name="stream"> the token stream to retrieve tokens from. </param>
         /// <param name="boost"> the boost factor for hits for this field </param>
-        /// <seealso cref="Documents.Field.Boost(float)"/>
+        /// <seealso cref="Documents.Field.Boost"/>
         public virtual void AddField(string fieldName, TokenStream stream, float boost)
         {
             AddField(fieldName, stream, boost, 0);
@@ -354,7 +354,7 @@ namespace Lucene.Net.Index.Memory
         /// Equivalent to adding a tokenized, indexed, termVectorStored, unstored,
         /// Lucene <see cref="Documents.Field"/>.
         /// Finally closes the token stream. Note that untokenized keywords can be added with this method via
-        /// <see cref="KeywordTokenStream{T}(ICollection{T}"/>)"/>, the Lucene <c>KeywordTokenizer</c> or similar utilities.
+        /// <see cref="T:KeywordTokenStream{T}(ICollection{T}"/>)"/>, the Lucene <c>KeywordTokenizer</c> or similar utilities.
         /// </summary>
         /// <param name="fieldName"> a name to be associated with the text </param>
         /// <param name="stream"> the token stream to retrieve tokens from. </param>
@@ -362,7 +362,7 @@ namespace Lucene.Net.Index.Memory
         /// <param name="positionIncrementGap"> 
         /// the position increment gap if fields with the same name are added more than once
         /// </param>
-        /// <seealso cref="Documents.Field.Boost(float)"/>
+        /// <seealso cref="Documents.Field.Boost"/>
         public virtual void AddField(string fieldName, TokenStream stream, float boost, int positionIncrementGap)
         {
             AddField(fieldName, stream, boost, positionIncrementGap, 1);
@@ -373,7 +373,7 @@ namespace Lucene.Net.Index.Memory
         /// Equivalent to adding a tokenized, indexed, termVectorStored, unstored,
         /// Lucene <see cref="Documents.Field"/>.
         /// Finally closes the token stream. Note that untokenized keywords can be added with this method via 
-        /// <see cref="KeywordTokenStream{T}(ICollection{T}"/>)"/>, the Lucene <c>KeywordTokenizer</c> or similar utilities.
+        /// <see cref="T:KeywordTokenStream{T}(ICollection{T}"/>)"/>, the Lucene <c>KeywordTokenizer</c> or similar utilities.
         /// 
         /// </summary>
         /// <param name="fieldName"> a name to be associated with the text </param>
@@ -381,7 +381,7 @@ namespace Lucene.Net.Index.Memory
         /// <param name="boost"> the boost factor for hits for this field </param>
         /// <param name="positionIncrementGap"> the position increment gap if fields with the same name are added more than once </param>
         /// <param name="offsetGap"> the offset gap if fields with the same name are added more than once </param>
-        /// <seealso cref="Documents.Field.Boost(float)"/>
+        /// <seealso cref="Documents.Field.Boost"/>
         public virtual void AddField(string fieldName, TokenStream stream, float boost, int positionIncrementGap, int offsetGap)
         {
             try


[19/27] lucenenet git commit: Lucene.Net.Core..Search.ConstantScoreQuery refactor: Changed from new List() to Collections.EmptyList()

Posted by ni...@apache.org.
Lucene.Net.Core..Search.ConstantScoreQuery refactor: Changed from new List<T>() to Collections.EmptyList<T>()


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/6d6df6c9
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/6d6df6c9
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/6d6df6c9

Branch: refs/heads/api-work
Commit: 6d6df6c9e0a5ab33a938b4fdd62311115b6ab843
Parents: 6c2b8de
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Feb 5 17:50:14 2017 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Sun Feb 5 17:50:14 2017 +0700

----------------------------------------------------------------------
 src/Lucene.Net.Core/Search/ConstantScoreQuery.cs | 6 +-----
 1 file changed, 1 insertion(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/6d6df6c9/src/Lucene.Net.Core/Search/ConstantScoreQuery.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Search/ConstantScoreQuery.cs b/src/Lucene.Net.Core/Search/ConstantScoreQuery.cs
index 39cb98c..41f52c3 100644
--- a/src/Lucene.Net.Core/Search/ConstantScoreQuery.cs
+++ b/src/Lucene.Net.Core/Search/ConstantScoreQuery.cs
@@ -366,15 +366,11 @@ namespace Lucene.Net.Search
             {
                 if (outerInstance.m_query != null)
                 {
-                    //LUCENE TO-DO
-                    //return Collections.singletonList(new ChildScorer((Scorer)DocIdSetIterator, "constant"));
                     return new[] { new ChildScorer((Scorer)docIdSetIterator, "constant") };
                 }
                 else
                 {
-                    //LUCENE TO-DO
-                    return new List<ChildScorer>();
-                    //return Collections.emptyList();
+                    return Collections.EmptyList<ChildScorer>();
                 }
             }
         }


[06/27] lucenenet git commit: Lucene.Net.Highlighter: fix documentation comment formatting problems

Posted by ni...@apache.org.
Lucene.Net.Highlighter: fix documentation comment formatting problems


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/27217597
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/27217597
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/27217597

Branch: refs/heads/api-work
Commit: 27217597c3678e62ab04609de50ae37c7885e2ba
Parents: 1b9fe40
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Feb 5 12:26:47 2017 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Sun Feb 5 14:47:49 2017 +0700

----------------------------------------------------------------------
 .../Highlight/WeightedSpanTermExtractor.cs        | 16 ++++++++--------
 .../PostingsHighlight/PassageScorer.cs            |  8 ++++----
 .../PostingsHighlight/PostingsHighlighter.cs      | 18 +++++++++---------
 .../VectorHighlight/FieldTermStack.cs             |  2 +-
 .../VectorHighlight/FragmentsBuilder.cs           |  8 ++++----
 5 files changed, 26 insertions(+), 26 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/27217597/src/Lucene.Net.Highlighter/Highlight/WeightedSpanTermExtractor.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Highlighter/Highlight/WeightedSpanTermExtractor.cs b/src/Lucene.Net.Highlighter/Highlight/WeightedSpanTermExtractor.cs
index 3de333f..c31005e 100644
--- a/src/Lucene.Net.Highlighter/Highlight/WeightedSpanTermExtractor.cs
+++ b/src/Lucene.Net.Highlighter/Highlight/WeightedSpanTermExtractor.cs
@@ -56,7 +56,7 @@ namespace Lucene.Net.Search.Highlight
         }
 
         /// <summary>
-        /// Fills a <see cref="IDictionary{string, WeightedSpanTerm}"/> with <see cref="WeightedSpanTerm"/>s using the terms from the supplied <paramref name="query"/>.
+        /// Fills a <see cref="T:IDictionary{string, WeightedSpanTerm}"/> with <see cref="WeightedSpanTerm"/>s using the terms from the supplied <paramref name="query"/>.
         /// </summary>
         /// <param name="query"><see cref="Query"/> to extract Terms from</param>
         /// <param name="terms">Map to place created <see cref="WeightedSpanTerm"/>s in</param>
@@ -237,9 +237,9 @@ namespace Lucene.Net.Search.Highlight
         }
 
         /// <summary>
-        /// Fills a <see cref="IDictionary{string, WeightedSpanTerm}"/> with <see cref="WeightedSpanTerm"/>s using the terms from the supplied <see cref="SpanQuery"/>.
+        /// Fills a <see cref="T:IDictionary{string, WeightedSpanTerm}"/> with <see cref="WeightedSpanTerm"/>s using the terms from the supplied <see cref="SpanQuery"/>.
         /// </summary>
-        /// <param name="terms"><see cref="IDictionary{string, WeightedSpanTerm}"/> to place created <see cref="WeightedSpanTerm"/>s in</param>
+        /// <param name="terms"><see cref="T:IDictionary{string, WeightedSpanTerm}"/> to place created <see cref="WeightedSpanTerm"/>s in</param>
         /// <param name="spanQuery"><see cref="SpanQuery"/> to extract Terms from</param>
         /// <exception cref="System.IO.IOException">If there is a low-level I/O error</exception>
         protected virtual void ExtractWeightedSpanTerms(IDictionary<string, WeightedSpanTerm> terms, SpanQuery spanQuery)
@@ -335,10 +335,10 @@ namespace Lucene.Net.Search.Highlight
         }
 
         /// <summary>
-        /// Fills a <see cref="IDictionary{string, WeightedSpanTerm}"/> with <see cref="WeightedSpanTerm"/>s using the terms from 
+        /// Fills a <see cref="T:IDictionary{string, WeightedSpanTerm}"/> with <see cref="WeightedSpanTerm"/>s using the terms from 
         /// the supplied <see cref="Search.Spans.SpanQuery"/>.
         /// </summary>
-        /// <param name="terms"><see cref="IDictionary{string, WeightedSpanTerm}"/> to place created <see cref="WeightedSpanTerm"/>s in</param>
+        /// <param name="terms"><see cref="T:IDictionary{string, WeightedSpanTerm}"/> to place created <see cref="WeightedSpanTerm"/>s in</param>
         /// <param name="query"><see cref="Query"/> to extract Terms from</param>
         /// <exception cref="System.IO.IOException">If there is a low-level I/O error</exception>
         protected virtual void ExtractWeightedTerms(IDictionary<string, WeightedSpanTerm> terms, Query query)
@@ -462,7 +462,7 @@ namespace Lucene.Net.Search.Highlight
         }
 
         /// <summary>
-        /// Creates an <see cref="IDictionary{string, WeightedSpanTerm}"/> from the given <see cref="Query"/> and <see cref="Analysis.TokenStream"/>.
+        /// Creates an <see cref="T:IDictionary{string, WeightedSpanTerm}"/> from the given <see cref="Query"/> and <see cref="Analysis.TokenStream"/>.
         /// </summary>
         /// <param name="query"><see cref="Query"/> that caused hit</param>
         /// <param name="tokenStream"><see cref="Analysis.TokenStream"/> of text to be highlighted</param>
@@ -475,7 +475,7 @@ namespace Lucene.Net.Search.Highlight
 
 
         /// <summary>
-        /// Creates an <see cref="IDictionary{string, WeightedSpanTerm}"/> from the given <see cref="Query"/> and <see cref="Analysis.TokenStream"/>.
+        /// Creates an <see cref="T:IDictionary{string, WeightedSpanTerm}"/> from the given <see cref="Query"/> and <see cref="Analysis.TokenStream"/>.
         /// </summary>
         /// <param name="query"><see cref="Query"/> that caused hit</param>
         /// <param name="tokenStream"><see cref="Analysis.TokenStream"/> of text to be highlighted</param>
@@ -509,7 +509,7 @@ namespace Lucene.Net.Search.Highlight
         }
 
         /// <summary>
-        /// Creates an <see cref="IDictionary{string, WeightedSpanTerm}"/> from the given <see cref="Query"/> and <see cref="Analysis.TokenStream"/>. Uses a supplied
+        /// Creates an <see cref="T:IDictionary{string, WeightedSpanTerm}"/> from the given <see cref="Query"/> and <see cref="Analysis.TokenStream"/>. Uses a supplied
         /// <see cref="IndexReader"/> to properly Weight terms (for gradient highlighting).
         /// </summary>
         /// <param name="query"><see cref="Query"/> that caused hit</param>

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/27217597/src/Lucene.Net.Highlighter/PostingsHighlight/PassageScorer.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Highlighter/PostingsHighlight/PassageScorer.cs b/src/Lucene.Net.Highlighter/PostingsHighlight/PassageScorer.cs
index a1a4e35..af398da 100644
--- a/src/Lucene.Net.Highlighter/PostingsHighlight/PassageScorer.cs
+++ b/src/Lucene.Net.Highlighter/PostingsHighlight/PassageScorer.cs
@@ -23,7 +23,7 @@ namespace Lucene.Net.Search.PostingsHighlight
     /// Ranks passages found by <see cref="PostingsHighlighter"/>.
     /// <para/>
     /// Each passage is scored as a miniature document within the document.
-    /// The final score is computed as <c>norm</c> * &sum; (<c>weight</c> * <c>tf</c>).
+    /// The final score is computed as <c>norm</c> * \u2211 (<c>weight</c> * <c>tf</c>).
     /// The default implementation is <c>norm</c> * BM25.
     /// 
     /// @lucene.experimental
@@ -42,9 +42,9 @@ namespace Lucene.Net.Search.PostingsHighlight
         /// <summary>
         /// Creates <see cref="PassageScorer"/> with these default values:
         /// <list type="bullet">
-        ///     <item><paramref name="k1"/> = 1.2</item>
-        ///     <item><paramref name="b"/> = 0.75</item>
-        ///     <item><paramref name="pivot"/> = 87</item>
+        ///     <item><c>k1 = 1.2</c></item>
+        ///     <item><c>b = 0.75</c></item>
+        ///     <item><c>pivot = 87</c></item>
         /// </list>
         /// </summary>
         public PassageScorer()

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/27217597/src/Lucene.Net.Highlighter/PostingsHighlight/PostingsHighlighter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Highlighter/PostingsHighlight/PostingsHighlighter.cs b/src/Lucene.Net.Highlighter/PostingsHighlight/PostingsHighlighter.cs
index db04ee1..758a33a 100644
--- a/src/Lucene.Net.Highlighter/PostingsHighlight/PostingsHighlighter.cs
+++ b/src/Lucene.Net.Highlighter/PostingsHighlight/PostingsHighlighter.cs
@@ -231,13 +231,13 @@ namespace Lucene.Net.Search.PostingsHighlight
         /// <param name="searcher">searcher that was previously used to execute the query.</param>
         /// <param name="topDocs">TopDocs containing the summary result documents to highlight.</param>
         /// <returns>
-        /// <see cref="IDictionary{string, string[]}"/> keyed on field name, containing the array of formatted snippets 
+        /// <see cref="T:IDictionary{string, string[]}"/> keyed on field name, containing the array of formatted snippets 
         /// corresponding to the documents in <paramref name="topDocs"/>.
         /// If no highlights were found for a document, the
         /// first sentence from the field will be returned.
         /// </returns>
         /// <exception cref="IOException">if an I/O error occurred during processing</exception>
-        /// <exception cref="ArgumentException">if <paramref name="field"/> was indexed without <see cref="IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS"/></exception>
+        /// <exception cref="ArgumentException">if <c>field</c> was indexed without <see cref="IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS"/></exception>
         public virtual IDictionary<string, string[]> HighlightFields(string[] fields, Query query, IndexSearcher searcher, TopDocs topDocs)
         {
             int[] maxPassages = new int[fields.Length];
@@ -264,14 +264,14 @@ namespace Lucene.Net.Search.PostingsHighlight
         /// <param name="topDocs">TopDocs containing the summary result documents to highlight.</param>
         /// <param name="maxPassages">The maximum number of top-N ranked passages per-field used to form the highlighted snippets.</param>
         /// <returns>
-        /// <see cref="IDictionary{string, string[]}"/> keyed on field name, containing the array of formatted snippets
+        /// <see cref="T:IDictionary{string, string[]}"/> keyed on field name, containing the array of formatted snippets
         /// corresponding to the documents in <paramref name="topDocs"/>.
         /// If no highlights were found for a document, the
         /// first <paramref name="maxPassages"/> sentences from the
         /// field will be returned.
         /// </returns>
         /// <exception cref="IOException">if an I/O error occurred during processing</exception>
-        /// <exception cref="ArgumentException">if <paramref name="field"/> was indexed without <see cref="IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS"/></exception>
+        /// <exception cref="ArgumentException">if <c>field</c> was indexed without <see cref="IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS"/></exception>
         public virtual IDictionary<string, string[]> HighlightFields(string[] fields, Query query, IndexSearcher searcher, TopDocs topDocs, int[] maxPassages)
         {
             ScoreDoc[] scoreDocs = topDocs.ScoreDocs;
@@ -294,14 +294,14 @@ namespace Lucene.Net.Search.PostingsHighlight
         /// <param name="docidsIn">containing the document IDs to highlight.</param>
         /// <param name="maxPassagesIn">The maximum number of top-N ranked passages per-field used to form the highlighted snippets.</param>
         /// <returns>
-        /// <see cref="IDictionary{string, string[]}"/> keyed on field name, containing the array of formatted snippets 
+        /// <see cref="F:IDictionary{string, string[]}"/> keyed on field name, containing the array of formatted snippets 
         /// corresponding to the documents in <paramref name="docidsIn"/>.
         /// If no highlights were found for a document, the
-        /// first <paramref name="maxPassages"/> from the field will
+        /// first <c>maxPassages</c> from the field will
         /// be returned.
         /// </returns>
         /// <exception cref="IOException">if an I/O error occurred during processing</exception>
-        /// <exception cref="ArgumentException">if <paramref name="field"/> was indexed without <see cref="IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS"/></exception>
+        /// <exception cref="ArgumentException">if <c>field</c> was indexed without <see cref="IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS"/></exception>
         public virtual IDictionary<string, string[]> HighlightFields(string[] fieldsIn, Query query, IndexSearcher searcher, int[] docidsIn, int[] maxPassagesIn)
         {
             IDictionary<string, string[]> snippets = new Dictionary<string, string[]>();
@@ -361,14 +361,14 @@ namespace Lucene.Net.Search.PostingsHighlight
         /// <param name="docidsIn">containing the document IDs to highlight.</param>
         /// <param name="maxPassagesIn">The maximum number of top-N ranked passages per-field used to form the highlighted snippets.</param>
         /// <returns>
-        /// <see cref="IDictionary{string, object[]}"/> keyed on field name, containing the array of formatted snippets
+        /// <see cref="T:IDictionary{string, object[]}"/> keyed on field name, containing the array of formatted snippets
         /// corresponding to the documents in <paramref name="docidsIn"/>.
         /// If no highlights were found for a document, the
         /// first <paramref name="maxPassagesIn"/> from the field will
         /// be returned.
         /// </returns>
         /// <exception cref="IOException">if an I/O error occurred during processing</exception>
-        /// <exception cref="ArgumentException">if <paramref name="field"/> was indexed without <see cref="IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS"/></exception>
+        /// <exception cref="ArgumentException">if <c>field</c> was indexed without <see cref="IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS"/></exception>
         protected internal virtual IDictionary<string, object[]> HighlightFieldsAsObjects(string[] fieldsIn, Query query, IndexSearcher searcher, int[] docidsIn, int[] maxPassagesIn)
         {
             if (fieldsIn.Length < 1)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/27217597/src/Lucene.Net.Highlighter/VectorHighlight/FieldTermStack.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Highlighter/VectorHighlight/FieldTermStack.cs b/src/Lucene.Net.Highlighter/VectorHighlight/FieldTermStack.cs
index 7b326f1..9b8b8e6 100644
--- a/src/Lucene.Net.Highlighter/VectorHighlight/FieldTermStack.cs
+++ b/src/Lucene.Net.Highlighter/VectorHighlight/FieldTermStack.cs
@@ -65,7 +65,7 @@ namespace Lucene.Net.Search.VectorHighlight
         /// <param name="docId">document id to be highlighted</param>
         /// <param name="fieldName">field of the document to be highlighted</param>
         /// <param name="fieldQuery"><see cref="FieldQuery"/> object</param>
-        /// <exception cref="IOException">If there is a low-level I/O error</exception>
+        /// <exception cref="System.IO.IOException">If there is a low-level I/O error</exception>
         public FieldTermStack(IndexReader reader, int docId, string fieldName, FieldQuery fieldQuery)
         {
             this.fieldName = fieldName;

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/27217597/src/Lucene.Net.Highlighter/VectorHighlight/FragmentsBuilder.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Highlighter/VectorHighlight/FragmentsBuilder.cs b/src/Lucene.Net.Highlighter/VectorHighlight/FragmentsBuilder.cs
index 343a060..534c9fb 100644
--- a/src/Lucene.Net.Highlighter/VectorHighlight/FragmentsBuilder.cs
+++ b/src/Lucene.Net.Highlighter/VectorHighlight/FragmentsBuilder.cs
@@ -35,7 +35,7 @@ namespace Lucene.Net.Search.VectorHighlight
         /// <param name="fieldName">field of the document to be highlighted</param>
         /// <param name="fieldFragList"><see cref="FieldFragList"/> object</param>
         /// <returns>a created fragment or null when no fragment created</returns>
-        /// <exception cref="IOException">If there is a low-level I/O error</exception>
+        /// <exception cref="System.IO.IOException">If there is a low-level I/O error</exception>
         string CreateFragment(IndexReader reader, int docId, string fieldName,
             FieldFragList fieldFragList);
 
@@ -51,7 +51,7 @@ namespace Lucene.Net.Search.VectorHighlight
         /// created fragments or null when no fragments created.
         /// size of the array can be less than <paramref name="maxNumFragments"/>
         /// </returns>
-        /// <exception cref="IOException">If there is a low-level I/O error</exception>
+        /// <exception cref="System.IO.IOException">If there is a low-level I/O error</exception>
         string[] CreateFragments(IndexReader reader, int docId, string fieldName,
             FieldFragList fieldFragList, int maxNumFragments);
 
@@ -66,7 +66,7 @@ namespace Lucene.Net.Search.VectorHighlight
         /// <param name="postTags">post-tags to be used to highlight terms</param>
         /// <param name="encoder">an encoder that generates encoded text</param>
         /// <returns>a created fragment or null when no fragment created</returns>
-        /// <exception cref="IOException">If there is a low-level I/O error</exception>
+        /// <exception cref="System.IO.IOException">If there is a low-level I/O error</exception>
         string CreateFragment(IndexReader reader, int docId, string fieldName,
             FieldFragList fieldFragList, string[] preTags, string[] postTags,
             IEncoder encoder);
@@ -86,7 +86,7 @@ namespace Lucene.Net.Search.VectorHighlight
         /// created fragments or null when no fragments created.
         /// size of the array can be less than <paramref name="maxNumFragments"/>
         /// </returns>
-        /// <exception cref="IOException">If there is a low-level I/O error</exception>
+        /// <exception cref="System.IO.IOException">If there is a low-level I/O error</exception>
         string[] CreateFragments(IndexReader reader, int docId, string fieldName,
             FieldFragList fieldFragList, int maxNumFragments, string[] preTags, string[] postTags,
             IEncoder encoder);


[04/27] lucenenet git commit: Lucene.Net.Facet: fix documentation comment formatting problems

Posted by ni...@apache.org.
Lucene.Net.Facet: fix documentation comment formatting problems


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/638f2a11
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/638f2a11
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/638f2a11

Branch: refs/heads/api-work
Commit: 638f2a115196d11372402d313f8aa94843b50ef8
Parents: 9fb8cb1
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Feb 5 11:50:42 2017 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Sun Feb 5 14:47:47 2017 +0700

----------------------------------------------------------------------
 src/Lucene.Net.Facet/DrillDownQuery.cs          |  2 +-
 src/Lucene.Net.Facet/DrillSideways.cs           |  2 +-
 src/Lucene.Net.Facet/FacetsCollector.cs         |  4 +--
 src/Lucene.Net.Facet/FacetsConfig.cs            |  4 +--
 .../Range/DoubleRangeFacetCounts.cs             |  4 +--
 .../Range/LongRangeFacetCounts.cs               |  4 +--
 .../Taxonomy/AssociationFacetField.cs           |  2 +-
 .../Taxonomy/CachedOrdinalsReader.cs            |  4 +--
 src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs   |  4 +--
 .../Directory/DirectoryTaxonomyReader.cs        |  8 ++---
 .../Directory/DirectoryTaxonomyWriter.cs        | 33 ++++++++++----------
 .../Taxonomy/DocValuesOrdinalsReader.cs         |  2 +-
 .../Taxonomy/FloatTaxonomyFacets.cs             |  2 +-
 .../Taxonomy/IntTaxonomyFacets.cs               |  2 +-
 src/Lucene.Net.Facet/Taxonomy/LRUHashMap.cs     |  6 ++--
 .../Taxonomy/SearcherTaxonomyManager.cs         |  2 +-
 .../Taxonomy/TaxonomyFacetSumValueSource.cs     |  2 +-
 src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs |  4 +--
 src/Lucene.Net.Facet/Taxonomy/TaxonomyWriter.cs |  6 ++--
 .../WriterCache/CompactLabelToOrdinal.cs        | 15 ++++-----
 .../Taxonomy/WriterCache/TaxonomyWriterCache.cs |  2 +-
 21 files changed, 56 insertions(+), 58 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/638f2a11/src/Lucene.Net.Facet/DrillDownQuery.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/DrillDownQuery.cs b/src/Lucene.Net.Facet/DrillDownQuery.cs
index 5e7d4e5..7f93ff0 100644
--- a/src/Lucene.Net.Facet/DrillDownQuery.cs
+++ b/src/Lucene.Net.Facet/DrillDownQuery.cs
@@ -42,7 +42,7 @@ namespace Lucene.Net.Facet
     /// <para>
     /// <b>NOTE:</b> if you choose to create your own <see cref="Query"/> by calling
     /// <see cref="Term"/>, it is recommended to wrap it with <see cref="ConstantScoreQuery"/>
-    /// and set the <see cref="ConstantScoreQuery.Boost">boost</see> to <c>0.0f</c>,
+    /// and set the <see cref="Query.Boost">boost</see> to <c>0.0f</c>,
     /// so that it does not affect the scores of the documents.
     /// 
     /// @lucene.experimental

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/638f2a11/src/Lucene.Net.Facet/DrillSideways.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/DrillSideways.cs b/src/Lucene.Net.Facet/DrillSideways.cs
index 9930ddc..43791e3 100644
--- a/src/Lucene.Net.Facet/DrillSideways.cs
+++ b/src/Lucene.Net.Facet/DrillSideways.cs
@@ -273,7 +273,7 @@ namespace Lucene.Net.Facet
         public class DrillSidewaysResult
         {
             /// <summary>
-            /// Combined drill down & sideways results.
+            /// Combined drill down &amp; sideways results.
             /// </summary>
             public Facets Facets { get; private set; }
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/638f2a11/src/Lucene.Net.Facet/FacetsCollector.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/FacetsCollector.cs b/src/Lucene.Net.Facet/FacetsCollector.cs
index df3d517..a6d5db1 100644
--- a/src/Lucene.Net.Facet/FacetsCollector.cs
+++ b/src/Lucene.Net.Facet/FacetsCollector.cs
@@ -29,7 +29,7 @@ namespace Lucene.Net.Facet
     /// Collects hits for subsequent faceting.  Once you've run
     /// a search and collect hits into this, instantiate one of
     /// the <see cref="ICollector"/> subclasses to do the facet
-    /// counting.  Use the <see cref="Search"/> utility methods to
+    /// counting.  Use the Search utility methods (such as <see cref="Search(IndexSearcher, Query, int, ICollector)"/>) to
     /// perform an "ordinary" search but also collect into a
     /// <see cref="Facets"/>. 
     /// </summary>
@@ -115,7 +115,7 @@ namespace Lucene.Net.Facet
 
         /// <summary>
         /// Create this; if <paramref name="keepScores"/> is <c>true</c> then a
-        /// <see cref="float[]"/> is allocated to hold score of all hits. 
+        /// <see cref="T:float[]"/> is allocated to hold score of all hits. 
         /// </summary>
         public FacetsCollector(bool keepScores)
         {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/638f2a11/src/Lucene.Net.Facet/FacetsConfig.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/FacetsConfig.cs b/src/Lucene.Net.Facet/FacetsConfig.cs
index e6ecda8..1d5cb52 100644
--- a/src/Lucene.Net.Facet/FacetsConfig.cs
+++ b/src/Lucene.Net.Facet/FacetsConfig.cs
@@ -669,8 +669,8 @@ namespace Lucene.Net.Facet
         }
 
         /// <summary>
-        /// Turns an encoded string (from a previous call to <see cref="PathToString"/>) 
-        /// back into the original <see cref="string[]"/>. 
+        /// Turns an encoded string (from a previous call to <see cref="PathToString(string[])"/>) 
+        /// back into the original <see cref="T:string[]"/>. 
         /// </summary>
         public static string[] StringToPath(string s)
         {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/638f2a11/src/Lucene.Net.Facet/Range/DoubleRangeFacetCounts.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Range/DoubleRangeFacetCounts.cs b/src/Lucene.Net.Facet/Range/DoubleRangeFacetCounts.cs
index c9d0dba..b5ba376 100644
--- a/src/Lucene.Net.Facet/Range/DoubleRangeFacetCounts.cs
+++ b/src/Lucene.Net.Facet/Range/DoubleRangeFacetCounts.cs
@@ -32,7 +32,7 @@ namespace Lucene.Net.Facet.Range
     /// <summary>
     /// <see cref="Facets"/> implementation that computes counts for
     ///  dynamic double ranges from a provided <see cref="ValueSource"/>, 
-    ///  using <see cref="FunctionValues.DoubleVal"/>.  Use
+    ///  using <see cref="FunctionValues.DoubleVal(int)"/> or <see cref="FunctionValues.DoubleVal(int, double[])"/>.  Use
     ///  this for dimensions that change in real-time (e.g. a
     ///  relative time based dimension like "Past day", "Past 2
     ///  days", etc.) or that change for each request (e.g.
@@ -74,7 +74,7 @@ namespace Lucene.Net.Facet.Range
         /// <see cref="ValueSource"/>, and using the provided Filter as
         /// a fastmatch: only documents passing the filter are
         /// checked for the matching ranges.  The filter must be
-        /// random access (implement <see cref="DocIdSet.GetBits()"/>). 
+        /// random access (implement <see cref="DocIdSet.Bits"/>). 
         /// </summary>
         public DoubleRangeFacetCounts(string field, ValueSource valueSource, FacetsCollector hits, Filter fastMatchFilter, DoubleRange[] ranges)
             : base(field, ranges, fastMatchFilter)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/638f2a11/src/Lucene.Net.Facet/Range/LongRangeFacetCounts.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Range/LongRangeFacetCounts.cs b/src/Lucene.Net.Facet/Range/LongRangeFacetCounts.cs
index 4e82e94..7d5954e 100644
--- a/src/Lucene.Net.Facet/Range/LongRangeFacetCounts.cs
+++ b/src/Lucene.Net.Facet/Range/LongRangeFacetCounts.cs
@@ -31,7 +31,7 @@ namespace Lucene.Net.Facet.Range
     /// <summary>
     /// <see cref="Facets"/> implementation that computes counts for
     /// dynamic long ranges from a provided <see cref="ValueSource"/>,
-    /// using <see cref="FunctionValues.LongVal"/>.  Use
+    /// using <see cref="FunctionValues.LongVal(int)"/> or <see cref="FunctionValues.LongVal(int, long[])"/>.  Use
     /// this for dimensions that change in real-time (e.g. a
     /// relative time based dimension like "Past day", "Past 2
     /// days", etc.) or that change for each request (e.g. 
@@ -66,7 +66,7 @@ namespace Lucene.Net.Facet.Range
         /// <see cref="ValueSource"/>, and using the provided Filter as
         /// a fastmatch: only documents passing the filter are
         /// checked for the matching ranges.  The filter must be
-        /// random access (implement <see cref="DocIdSet.GetBits"/>). 
+        /// random access (implement <see cref="DocIdSet.Bits"/>). 
         /// </summary>
         public LongRangeFacetCounts(string field, ValueSource valueSource, 
             FacetsCollector hits, Filter fastMatchFilter, params LongRange[] ranges)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/638f2a11/src/Lucene.Net.Facet/Taxonomy/AssociationFacetField.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/AssociationFacetField.cs b/src/Lucene.Net.Facet/Taxonomy/AssociationFacetField.cs
index 66ee6ee..1e4731f 100644
--- a/src/Lucene.Net.Facet/Taxonomy/AssociationFacetField.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/AssociationFacetField.cs
@@ -27,7 +27,7 @@ namespace Lucene.Net.Facet.Taxonomy
 
     /// <summary>
     /// Add an instance of this to your <see cref="Document"/> to add
-    /// a facet label associated with an arbitrary <see cref="byte[]"/>.
+    /// a facet label associated with an arbitrary <see cref="T:byte[]"/>.
     /// This will require a custom <see cref="Facets"/>
     /// implementation at search time; see <see cref="IntAssociationFacetField"/> 
     /// and <see cref="FloatAssociationFacetField"/> to use existing 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/638f2a11/src/Lucene.Net.Facet/Taxonomy/CachedOrdinalsReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/CachedOrdinalsReader.cs b/src/Lucene.Net.Facet/Taxonomy/CachedOrdinalsReader.cs
index 31ca1a5..a63e5da 100644
--- a/src/Lucene.Net.Facet/Taxonomy/CachedOrdinalsReader.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/CachedOrdinalsReader.cs
@@ -33,7 +33,7 @@ namespace Lucene.Net.Facet.Taxonomy
 
     /// <summary>
     /// A per-segment cache of documents' facet ordinals. Every
-    /// <see cref="CachedOrds"/> holds the ordinals in a raw <see cref="int[]"/>, 
+    /// <see cref="CachedOrds"/> holds the ordinals in a raw <see cref="T:int[]"/>, 
     /// and therefore consumes as much RAM as the total
     /// number of ordinals found in the segment, but saves the
     /// CPU cost of decoding ordinals during facet counting.
@@ -123,7 +123,7 @@ namespace Lucene.Net.Facet.Taxonomy
         }
 
         /// <summary>
-        /// Holds the cached ordinals in two parallel <see cref="int[]"/> arrays.
+        /// Holds the cached ordinals in two parallel <see cref="T:int[]"/> arrays.
         /// </summary>
         public sealed class CachedOrds : IAccountable
         {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/638f2a11/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs b/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs
index 4810804..eca4372 100644
--- a/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs
@@ -173,7 +173,7 @@ namespace Lucene.Net.Facet.Taxonomy
         }
 
         /// <summary>
-        /// Copies the path components to the given <see cref="char[]"/>, starting at index
+        /// Copies the path components to the given <see cref="T:char[]"/>, starting at index
         /// <paramref name="start"/>. <paramref name="delimiter"/> is copied between the path components.
         /// Returns the number of chars copied.
         /// 
@@ -287,7 +287,7 @@ namespace Lucene.Net.Facet.Taxonomy
         /// Returns a string representation of the path, separating components with
         /// '/'.
         /// </summary>
-        /// <see cref= #toString(char) </seealso>
+        /// <seealso cref="ToString(char)"/>
         public override string ToString()
         {
             return ToString('/');

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/638f2a11/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyReader.cs b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyReader.cs
index ee2eacd..2e71b8b 100644
--- a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyReader.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyReader.cs
@@ -36,7 +36,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
     /// <summary>
     /// A <see cref="TaxonomyReader"/> which retrieves stored taxonomy information from a
     /// <see cref="Directory"/>.
-    /// <P>
+    /// <para/>
     /// Reading from the on-disk index on every method call is too slow, so this
     /// implementation employs caching: Some methods cache recent requests and their
     /// results, while other methods prefetch all the data into memory and then
@@ -156,7 +156,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
         /// any issues, unless the two instances continue to live. The reader
         /// guarantees that the two instances cannot affect each other in terms of
         /// correctness of the caches, however if the size of the cache is changed
-        /// through <see cref="CacheSize"/>, it will affect both reader instances.
+        /// through <see cref="SetCacheSize(int)"/>, it will affect both reader instances.
         /// </para>
         /// </summary>
         protected override TaxonomyReader DoOpenIfChanged()
@@ -378,11 +378,11 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
         /// <summary>
         /// <see cref="SetCacheSize"/> controls the maximum allowed size of each of the caches
         /// used by <see cref="GetPath(int)"/> and <see cref="GetOrdinal(FacetLabel)"/>.
-        /// <P>
+        /// <para/>
         /// Currently, if the given size is smaller than the current size of
         /// a cache, it will not shrink, and rather we be limited to its current
         /// size. </summary>
-        /// <param name="value"> the new maximum cache size, in number of entries. </param>
+        /// <param name="size"> the new maximum cache size, in number of entries. </param>
         public virtual void SetCacheSize(int size)
         {
             EnsureOpen();

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/638f2a11/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs
index a0e4c64..44bccb8 100644
--- a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs
@@ -132,11 +132,11 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
 
         /// <summary>
         /// Forcibly unlocks the taxonomy in the named directory.
-        /// <P>
+        /// <para/>
         /// Caution: this should only be used by failure recovery code, when it is
         /// known that no other process nor thread is in fact currently accessing
         /// this taxonomy.
-        /// <P>
+        /// <para/>
         /// This method is unnecessary if your <see cref="Store.Directory"/> uses a
         /// <see cref="NativeFSLockFactory"/> instead of the default
         /// <see cref="SimpleFSLockFactory"/>. When the "native" lock is used, a lock
@@ -250,16 +250,16 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
 
         /// <summary>
         /// Open internal index writer, which contains the taxonomy data.
-        /// <para>
+        /// <para/>
         /// Extensions may provide their own <see cref="IndexWriter"/> implementation or instance. 
-        /// <br><b>NOTE:</b> the instance this method returns will be disposed upon calling
+        /// <para/>
+        /// <b>NOTE:</b> the instance this method returns will be disposed upon calling
         /// to <see cref="Dispose()"/>.
-        /// <br><b>NOTE:</b> the merge policy in effect must not merge none adjacent segments. See
-        /// comment in <see cref="CreateIndexWriterConfig(IndexWriterConfig.OpenMode)"/> for the logic behind this.
-        ///  
-        /// </para>
+        /// <para/>
+        /// <b>NOTE:</b> the merge policy in effect must not merge none adjacent segments. See
+        /// comment in <see cref="CreateIndexWriterConfig(OpenMode)"/> for the logic behind this.
         /// </summary>
-        /// <seealso cref="CreateIndexWriterConfig(IndexWriterConfig.OpenMode)"/>
+        /// <seealso cref="CreateIndexWriterConfig(OpenMode)"/>
         /// <param name="directory">
         ///          the <see cref="Store.Directory"/> on top of which an <see cref="IndexWriter"/>
         ///          should be opened. </param>
@@ -272,11 +272,13 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
 
         /// <summary>
         /// Create the <see cref="IndexWriterConfig"/> that would be used for opening the internal index writer.
-        /// <br>Extensions can configure the <see cref="IndexWriter"/> as they see fit,
+        /// <para/>
+        /// Extensions can configure the <see cref="IndexWriter"/> as they see fit,
         /// including setting a <see cref="Index.MergeScheduler"/>, or
         /// <see cref="Index.IndexDeletionPolicy"/>, different RAM size
-        /// etc.<br>
-        /// <br><b>NOTE:</b> internal docids of the configured index must not be altered.
+        /// etc.
+        /// <para/>
+        /// <b>NOTE:</b> internal docids of the configured index must not be altered.
         /// For that, categories are never deleted from the taxonomy index.
         /// In addition, merge policy in effect must not merge none adjacent segments.
         /// </summary>
@@ -937,12 +939,12 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
         /// <summary>
         /// Mapping from old ordinal to new ordinals, used when merging indexes 
         /// wit separate taxonomies.
-        /// <para> 
+        /// <para/> 
         /// <see cref="AddMapping"/> merges one or more taxonomies into the given taxonomy
         /// (this). An <see cref="IOrdinalMap"/> is filled for each of the added taxonomies,
         /// containing the new ordinal (in the merged taxonomy) of each of the
         /// categories in the old taxonomy.
-        /// <P>  
+        /// <para/>  
         /// There exist two implementations of <see cref="IOrdinalMap"/>: <see cref="MemoryOrdinalMap"/> and
         /// <see cref="DiskOrdinalMap"/>. As their names suggest, the former keeps the map in
         /// memory and the latter in a temporary disk file. Because these maps will
@@ -950,14 +952,13 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
         /// same time, it is recommended to put the first taxonomy's map in memory,
         /// and all the rest on disk (later to be automatically read into memory one
         /// by one, when needed).
-        /// </para>
         /// </summary>
         public interface IOrdinalMap
         {
             /// <summary>
             /// Set the size of the map. This MUST be called before <see cref="AddMapping"/>.
             /// It is assumed (but not verified) that <see cref="AddMapping"/> will then be
-            /// called exactly 'size' times, with different <paramref name="origOrdinals"/> between 0
+            /// called exactly 'size' times, with different <c>origOrdinals</c> between 0
             /// and size - 1.  
             /// </summary>
             void SetSize(int taxonomySize);

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/638f2a11/src/Lucene.Net.Facet/Taxonomy/DocValuesOrdinalsReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/DocValuesOrdinalsReader.cs b/src/Lucene.Net.Facet/Taxonomy/DocValuesOrdinalsReader.cs
index b48eafd..cc8137c 100644
--- a/src/Lucene.Net.Facet/Taxonomy/DocValuesOrdinalsReader.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/DocValuesOrdinalsReader.cs
@@ -89,7 +89,7 @@
         }
 
         /// <summary>
-        /// Subclass & override if you change the encoding.
+        /// Subclass &amp; override if you change the encoding.
         /// </summary>
         protected virtual void Decode(BytesRef buf, IntsRef ordinals)
         {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/638f2a11/src/Lucene.Net.Facet/Taxonomy/FloatTaxonomyFacets.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/FloatTaxonomyFacets.cs b/src/Lucene.Net.Facet/Taxonomy/FloatTaxonomyFacets.cs
index 4304646..608300b 100644
--- a/src/Lucene.Net.Facet/Taxonomy/FloatTaxonomyFacets.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/FloatTaxonomyFacets.cs
@@ -25,7 +25,7 @@ namespace Lucene.Net.Facet.Taxonomy
 
     /// <summary>
     /// Base class for all taxonomy-based facets that aggregate
-    /// to a per-ords <see cref="float[]"/>. 
+    /// to a per-ords <see cref="T:float[]"/>. 
     /// </summary>
     public abstract class FloatTaxonomyFacets : TaxonomyFacets
     {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/638f2a11/src/Lucene.Net.Facet/Taxonomy/IntTaxonomyFacets.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/IntTaxonomyFacets.cs b/src/Lucene.Net.Facet/Taxonomy/IntTaxonomyFacets.cs
index fc279c5..2b223e6 100644
--- a/src/Lucene.Net.Facet/Taxonomy/IntTaxonomyFacets.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/IntTaxonomyFacets.cs
@@ -24,7 +24,7 @@ namespace Lucene.Net.Facet.Taxonomy
 
     /// <summary>
     /// Base class for all taxonomy-based facets that aggregate
-    /// to a per-ords <see cref="int[]"/>. 
+    /// to a per-ords <see cref="T:int[]"/>. 
     /// </summary>
 
     public abstract class IntTaxonomyFacets : TaxonomyFacets

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/638f2a11/src/Lucene.Net.Facet/Taxonomy/LRUHashMap.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/LRUHashMap.cs b/src/Lucene.Net.Facet/Taxonomy/LRUHashMap.cs
index 956922f..5034030 100644
--- a/src/Lucene.Net.Facet/Taxonomy/LRUHashMap.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/LRUHashMap.cs
@@ -23,15 +23,15 @@ namespace Lucene.Net.Facet.Taxonomy
      */
 
     /// <summary>
-    /// <see cref="LRUHashMap{TKey, TValue}"/> is similar to of Java's HashMap, which has a bounded <see cref="Capacity"/>;
-    /// When it reaches that <see cref="Capacity"/>, each time a new element is added, the least
+    /// <see cref="LRUHashMap{TKey, TValue}"/> is similar to of Java's HashMap, which has a bounded <see cref="Limit"/>;
+    /// When it reaches that <see cref="Limit"/>, each time a new element is added, the least
     /// recently used (LRU) entry is removed.
     /// <para>
     /// Unlike the Java Lucene implementation, this one is thread safe because it is backed by the <see cref="LurchTable{TKey, TValue}"/>.
     /// Do note that every time an element is read from <see cref="LRUHashMap{TKey, TValue}"/>,
     /// a write operation also takes place to update the element's last access time.
     /// This is because the LRU order needs to be remembered to determine which element
-    /// to evict when the <see cref="Capacity"/> is exceeded. 
+    /// to evict when the <see cref="Limit"/> is exceeded. 
     /// </para>
     /// <para>
     /// 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/638f2a11/src/Lucene.Net.Facet/Taxonomy/SearcherTaxonomyManager.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/SearcherTaxonomyManager.cs b/src/Lucene.Net.Facet/Taxonomy/SearcherTaxonomyManager.cs
index 8fb75d8..24ca87b 100644
--- a/src/Lucene.Net.Facet/Taxonomy/SearcherTaxonomyManager.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/SearcherTaxonomyManager.cs
@@ -96,7 +96,7 @@ namespace Lucene.Net.Facet.Taxonomy
         /// 
         /// <para>
         /// <b>NOTE:</b> you should only use this constructor if you commit and call
-        /// <see cref="Index.ReaderManager.MaybeRefresh()"/> in the same thread. Otherwise it could lead to an
+        /// <see cref="Search.ReferenceManager{G}.MaybeRefresh()"/> (on the <see cref="Index.ReaderManager"/>) in the same thread. Otherwise it could lead to an
         /// unsync'd <see cref="IndexSearcher"/> and <see cref="TaxonomyReader"/> pair.
         /// </para>
         /// </summary>

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/638f2a11/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumValueSource.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumValueSource.cs b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumValueSource.cs
index b777f52..eee089d 100644
--- a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumValueSource.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumValueSource.cs
@@ -34,7 +34,7 @@ namespace Lucene.Net.Facet.Taxonomy
     using Weight = Lucene.Net.Search.Weight;
 
     /// <summary>
-    /// Aggregates sum of values from <see cref="FunctionValues.DoubleVal"/>, 
+    /// Aggregates sum of values from <see cref="FunctionValues.DoubleVal(int)"/> and <see cref="FunctionValues.DoubleVal(int, double[])"/>, 
     /// for each facet label.
     /// 
     ///  @lucene.experimental 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/638f2a11/src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs b/src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs
index 4cf132d..b02d5f2 100644
--- a/src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs
@@ -156,11 +156,11 @@ namespace Lucene.Net.Facet.Taxonomy
         /// Implements the actual opening of a new <see cref="TaxonomyReader"/> instance if
         /// the taxonomy has changed.
         /// </summary>
-        /// <see cref= #openIfChanged(TaxonomyReader) </seealso>
+        /// <seealso cref="OpenIfChanged{T}(T)"/>
         protected abstract TaxonomyReader DoOpenIfChanged();
 
         /// <summary>
-        /// Throws <see cref="AlreadyClosedException"/> if this <see cref="IndexReader"/> is disposed
+        /// Throws <see cref="AlreadyClosedException"/> if this <see cref="Index.IndexReader"/> is disposed
         /// </summary>
         protected void EnsureOpen()
         {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/638f2a11/src/Lucene.Net.Facet/Taxonomy/TaxonomyWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/TaxonomyWriter.cs b/src/Lucene.Net.Facet/Taxonomy/TaxonomyWriter.cs
index 29e1ce8..8988b14 100644
--- a/src/Lucene.Net.Facet/Taxonomy/TaxonomyWriter.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/TaxonomyWriter.cs
@@ -60,7 +60,7 @@ namespace Lucene.Net.Facet.Taxonomy
         /// <see cref="AddCategory"/> adds a category with a given path name to the taxonomy,
         /// and returns its ordinal. If the category was already present in
         /// the taxonomy, its existing ordinal is returned.
-        /// <P>
+        /// <para/>
         /// Before adding a category, <see cref="AddCategory"/> makes sure that all its
         /// ancestor categories exist in the taxonomy as well. As result, the
         /// ordinal of a category is guaranteed to be smaller then the ordinal of
@@ -100,10 +100,10 @@ namespace Lucene.Net.Facet.Taxonomy
 
         /// <summary>
         /// <see cref="Count"/> returns the number of categories in the taxonomy.
-        /// <P>
+        /// <para/>
         /// Because categories are numbered consecutively starting with 0, it
         /// means the taxonomy contains ordinals 0 through <see cref="Count"/>-1.
-        /// <P>
+        /// <para/>
         /// Note that the number returned by <see cref="Count"/> is often slightly higher
         /// than the number of categories inserted into the taxonomy; This is
         /// because when a category is added to the taxonomy, its ancestors

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/638f2a11/src/Lucene.Net.Facet/Taxonomy/WriterCache/CompactLabelToOrdinal.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CompactLabelToOrdinal.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CompactLabelToOrdinal.cs
index 01f712a..0387063 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CompactLabelToOrdinal.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CompactLabelToOrdinal.cs
@@ -31,13 +31,12 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
     /// </para>
     /// <para>
     /// This data structure grows by adding a new HashArray whenever the number of
-    /// collisions in the <see cref="CollisionMap"/> exceeds <see cref="loadFactor"/> * 
-    /// <see cref="GetMaxOrdinal()"/>. Growing also includes reinserting all colliding
+    /// collisions in the <see cref="CollisionMap"/> exceeds <see cref="loadFactor"/>
+    /// <c>GetMaxOrdinal().</c> Growing also includes reinserting all colliding
     /// labels into the <see cref="HashArray"/>s to possibly reduce the number of collisions.
     /// 
     /// For setting the <see cref="loadFactor"/> see 
     /// <see cref="CompactLabelToOrdinal(int, float, int)"/>. 
-    /// 
     /// </para>
     /// <para>
     /// This data structure has a much lower memory footprint (~30%) compared to a
@@ -396,12 +395,10 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
         /// </summary>
         internal static CompactLabelToOrdinal Open(FileInfo file, float loadFactor, int numHashArrays)
         {
-            /// <summary>
-            /// Part of the file is the labelRepository, which needs to be rehashed
-            /// and label offsets re-added to the object. I am unsure as to why we
-            /// can't just store these off in the file as well, but in keeping with
-            /// the spirit of the original code, I did it this way. (ssuppe)
-            /// </summary>
+            // Part of the file is the labelRepository, which needs to be rehashed
+            // and label offsets re-added to the object. I am unsure as to why we
+            // can't just store these off in the file as well, but in keeping with
+            // the spirit of the original code, I did it this way. (ssuppe)
             CompactLabelToOrdinal l2o = new CompactLabelToOrdinal();
             l2o.loadFactor = loadFactor;
             l2o.hashArrays = new HashArray[numHashArrays];

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/638f2a11/src/Lucene.Net.Facet/Taxonomy/WriterCache/TaxonomyWriterCache.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/TaxonomyWriterCache.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/TaxonomyWriterCache.cs
index 127e752..1f7e712 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/TaxonomyWriterCache.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/TaxonomyWriterCache.cs
@@ -92,7 +92,7 @@
         bool IsFull { get; }
 
         /// <summary>
-        /// Clears the content of the cache. Unlike <see cref="Dispose()"/>, the caller can
+        /// Clears the content of the cache. Unlike <see cref="IDisposable.Dispose()"/>, the caller can
         /// assume that the cache is still operable after this method returns.
         /// </summary>
         void Clear();


[24/27] lucenenet git commit: Lucene.Net.Core.Util.ArrayUtil: Added CLSCompliant(false) attribute to Grow and Shrink overloads that accept jagged arrays. Changed (unused) GetHashCode(sbyte[], int, int) method to GetHashCode(byte[], int, int) to make it C

Posted by ni...@apache.org.
Lucene.Net.Core.Util.ArrayUtil: Added CLSCompliant(false) attribute to Grow and Shrink overloads that accept jagged arrays. Changed (unused) GetHashCode(sbyte[], int, int) method to GetHashCode(byte[], int, int) to make it CLS compliant.


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/29e65313
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/29e65313
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/29e65313

Branch: refs/heads/api-work
Commit: 29e65313d965dba5a89ecdbb3b7aa63f97b6c439
Parents: 0c711c8
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Feb 5 19:02:54 2017 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Sun Feb 5 19:02:54 2017 +0700

----------------------------------------------------------------------
 src/Lucene.Net.Core/Util/ArrayUtil.cs | 23 +++++++++++++++--------
 1 file changed, 15 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/29e65313/src/Lucene.Net.Core/Util/ArrayUtil.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Util/ArrayUtil.cs b/src/Lucene.Net.Core/Util/ArrayUtil.cs
index 1ca217c..2df630e 100644
--- a/src/Lucene.Net.Core/Util/ArrayUtil.cs
+++ b/src/Lucene.Net.Core/Util/ArrayUtil.cs
@@ -414,7 +414,8 @@ namespace Lucene.Net.Util
             }
         }
 
-        public static sbyte[] Grow(sbyte[] array, int minSize) // LUCENENET TODO: remove this overload, mark it non-CLS compliant, or mark internal
+        [CLSCompliant(false)]
+        public static sbyte[] Grow(sbyte[] array, int minSize)
         {
             Debug.Assert(minSize >= 0, "size must be positive (got " + minSize + "): likely integer overflow?");
             if (array.Length < minSize)
@@ -537,7 +538,8 @@ namespace Lucene.Net.Util
             }
         }
 
-        public static int[][] Grow(int[][] array, int minSize) // LUCENENET TODO: CLS compliance issue
+        [CLSCompliant(false)]
+        public static int[][] Grow(int[][] array, int minSize)
         {
             Debug.Assert(minSize >= 0, "size must be positive (got " + minSize + "): likely integer overflow?");
             if (array.Length < minSize)
@@ -552,12 +554,14 @@ namespace Lucene.Net.Util
             }
         }
 
-        public static int[][] Grow(int[][] array) // LUCENENET TODO: CLS compliance issue
+        [CLSCompliant(false)]
+        public static int[][] Grow(int[][] array)
         {
             return Grow(array, 1 + array.Length);
         }
 
-        public static int[][] Shrink(int[][] array, int targetSize) // LUCENENET TODO: CLS compliance issue
+        [CLSCompliant(false)]
+        public static int[][] Shrink(int[][] array, int targetSize)
         {
             Debug.Assert(targetSize >= 0, "size must be positive (got " + targetSize + "): likely integer overflow?");
             int newSize = GetShrinkSize(array.Length, targetSize, RamUsageEstimator.NUM_BYTES_OBJECT_REF);
@@ -573,7 +577,8 @@ namespace Lucene.Net.Util
             }
         }
 
-        public static float[][] Grow(float[][] array, int minSize) // LUCENENET TODO: CLS compliance issue
+        [CLSCompliant(false)]
+        public static float[][] Grow(float[][] array, int minSize)
         {
             Debug.Assert(minSize >= 0, "size must be positive (got " + minSize + "): likely integer overflow?");
             if (array.Length < minSize)
@@ -588,12 +593,14 @@ namespace Lucene.Net.Util
             }
         }
 
-        public static float[][] Grow(float[][] array) // LUCENENET TODO: CLS compliance issue
+        [CLSCompliant(false)]
+        public static float[][] Grow(float[][] array)
         {
             return Grow(array, 1 + array.Length);
         }
 
-        public static float[][] Shrink(float[][] array, int targetSize) // LUCENENET TODO: CLS compliance issue
+        [CLSCompliant(false)]
+        public static float[][] Shrink(float[][] array, int targetSize)
         {
             Debug.Assert(targetSize >= 0, "size must be positive (got " + targetSize + "): likely integer overflow?");
             int newSize = GetShrinkSize(array.Length, targetSize, RamUsageEstimator.NUM_BYTES_OBJECT_REF);
@@ -627,7 +634,7 @@ namespace Lucene.Net.Util
         /// Returns hash of bytes in range start (inclusive) to
         /// end (inclusive)
         /// </summary>
-        public static int GetHashCode(sbyte[] array, int start, int end) // LUCENENET TODO: chnage to byte ?
+        public static int GetHashCode(byte[] array, int start, int end)
         {
             int code = 0;
             for (int i = end - 1; i >= start; i--)


[13/27] lucenenet git commit: Lucene.Net.Spatial: fix documentation comment formatting problems

Posted by ni...@apache.org.
Lucene.Net.Spatial: fix documentation comment formatting problems


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/c95f6ab9
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/c95f6ab9
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/c95f6ab9

Branch: refs/heads/api-work
Commit: c95f6ab9900eca772062f52785fd84cad5d2955d
Parents: a538f19
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Feb 5 13:31:15 2017 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Sun Feb 5 14:47:54 2017 +0700

----------------------------------------------------------------------
 src/Lucene.Net.Spatial/DisjointSpatialFilter.cs                  | 2 +-
 .../Prefix/AbstractVisitingPrefixTreeFilter.cs                   | 4 ++--
 src/Lucene.Net.Spatial/Prefix/ContainsPrefixTreeFilter.cs        | 2 +-
 src/Lucene.Net.Spatial/Prefix/IntersectsPrefixTreeFilter.cs      | 2 +-
 src/Lucene.Net.Spatial/Prefix/PrefixTreeStrategy.cs              | 4 ++--
 src/Lucene.Net.Spatial/Prefix/Tree/GeohashPrefixTree.cs          | 2 +-
 src/Lucene.Net.Spatial/Prefix/Tree/SpatialPrefixTree.cs          | 4 ++--
 src/Lucene.Net.Spatial/Query/SpatialArgsParser.cs                | 2 +-
 src/Lucene.Net.Spatial/Vector/PointVectorStrategy.cs             | 2 +-
 9 files changed, 12 insertions(+), 12 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/c95f6ab9/src/Lucene.Net.Spatial/DisjointSpatialFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Spatial/DisjointSpatialFilter.cs b/src/Lucene.Net.Spatial/DisjointSpatialFilter.cs
index 18baece..13512c8 100644
--- a/src/Lucene.Net.Spatial/DisjointSpatialFilter.cs
+++ b/src/Lucene.Net.Spatial/DisjointSpatialFilter.cs
@@ -43,7 +43,7 @@ namespace Lucene.Net.Spatial
         /// <param name="args">Used in spatial intersection</param>
         /// <param name="field">
         /// This field is used to determine which docs have spatial data via
-        /// <see cref="FieldCache.GetDocsWithField(AtomicReader, string)"/>.
+        /// <see cref="IFieldCache.GetDocsWithField(AtomicReader, string)"/>.
         /// Passing null will assume all docs have spatial data.
         /// </param>
         public DisjointSpatialFilter(SpatialStrategy strategy, SpatialArgs args, string field)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/c95f6ab9/src/Lucene.Net.Spatial/Prefix/AbstractVisitingPrefixTreeFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Spatial/Prefix/AbstractVisitingPrefixTreeFilter.cs b/src/Lucene.Net.Spatial/Prefix/AbstractVisitingPrefixTreeFilter.cs
index d4a58e9..5d507b9 100644
--- a/src/Lucene.Net.Spatial/Prefix/AbstractVisitingPrefixTreeFilter.cs
+++ b/src/Lucene.Net.Spatial/Prefix/AbstractVisitingPrefixTreeFilter.cs
@@ -28,7 +28,7 @@ namespace Lucene.Net.Spatial.Prefix
      */
 
     /// <summary>
-    /// Traverses a <see cref="SpatialPrefixTree">SpatialPrefixTree</see> indexed field, using the template &
+    /// Traverses a <see cref="SpatialPrefixTree">SpatialPrefixTree</see> indexed field, using the template &amp;
     /// visitor design patterns for subclasses to guide the traversal and collect
     /// matching documents.
     /// <para/>
@@ -299,7 +299,7 @@ namespace Lucene.Net.Spatial.Prefix
             }
 
             /// <summary>
-            /// Called when doing a divide & conquer to find the next intersecting cells
+            /// Called when doing a divide &amp; conquer to find the next intersecting cells
             /// of the query shape that are beneath <paramref name="cell"/>. <paramref name="cell"/> is
             /// guaranteed to have an intersection and thus this must return some number
             /// of nodes.

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/c95f6ab9/src/Lucene.Net.Spatial/Prefix/ContainsPrefixTreeFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Spatial/Prefix/ContainsPrefixTreeFilter.cs b/src/Lucene.Net.Spatial/Prefix/ContainsPrefixTreeFilter.cs
index bac5e28..7cfa526 100644
--- a/src/Lucene.Net.Spatial/Prefix/ContainsPrefixTreeFilter.cs
+++ b/src/Lucene.Net.Spatial/Prefix/ContainsPrefixTreeFilter.cs
@@ -27,7 +27,7 @@ namespace Lucene.Net.Spatial.Prefix
      */
 
     /// <summary>
-    /// Finds docs where its indexed shape <see cref="Queries.SpatialOperation.CONTAINS"/>
+    /// Finds docs where its indexed shape <see cref="Queries.SpatialOperation.Contains"/>
     /// the query shape. For use on <see cref="RecursivePrefixTreeStrategy"/>.
     /// 
     /// @lucene.experimental

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/c95f6ab9/src/Lucene.Net.Spatial/Prefix/IntersectsPrefixTreeFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Spatial/Prefix/IntersectsPrefixTreeFilter.cs b/src/Lucene.Net.Spatial/Prefix/IntersectsPrefixTreeFilter.cs
index 7d54c81..fe449a7 100644
--- a/src/Lucene.Net.Spatial/Prefix/IntersectsPrefixTreeFilter.cs
+++ b/src/Lucene.Net.Spatial/Prefix/IntersectsPrefixTreeFilter.cs
@@ -25,7 +25,7 @@ namespace Lucene.Net.Spatial.Prefix
      */
 
     /// <summary>
-    /// A Filter matching documents that have an <see cref="SpatialRelation.Intersects"/>
+    /// A Filter matching documents that have an <see cref="SpatialRelation.INTERSECTS"/>
     /// (i.e. not DISTINCT) relationship with a provided query shape.
     /// 
     /// @lucene.internal

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/c95f6ab9/src/Lucene.Net.Spatial/Prefix/PrefixTreeStrategy.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Spatial/Prefix/PrefixTreeStrategy.cs b/src/Lucene.Net.Spatial/Prefix/PrefixTreeStrategy.cs
index 10b6cc0..61ed934 100644
--- a/src/Lucene.Net.Spatial/Prefix/PrefixTreeStrategy.cs
+++ b/src/Lucene.Net.Spatial/Prefix/PrefixTreeStrategy.cs
@@ -69,7 +69,7 @@ namespace Lucene.Net.Spatial.Prefix
     /// does most of the work, for example returning
     /// a list of terms representing grids of various sizes for a supplied shape.
     /// An important
-    /// configuration item is <see cref="SetDistErrPct(double)"/> which balances
+    /// configuration item is <see cref="DistErrPct"/> which balances
     /// shape precision against scalability.  See those docs.
     /// 
     /// @lucene.internal
@@ -114,7 +114,7 @@ namespace Lucene.Net.Spatial.Prefix
         /// maximum precision (<see cref="Lucene.Net.Spatial.Prefix.Tree.SpatialPrefixTree.MaxLevels"/>);
         /// this applies to all other shapes. Specific shapes at index and query time
         /// can use something different than this default value.  If you don't set a
-        /// default then the default is <see cref="SpatialArgs.DefaultDisterrpct"/> --
+        /// default then the default is <see cref="SpatialArgs.DEFAULT_DISTERRPCT"/> --
         /// 2.5%.
         /// </remarks>
         /// <seealso cref="Lucene.Net.Spatial.Queries.SpatialArgs.DistErrPct"/>

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/c95f6ab9/src/Lucene.Net.Spatial/Prefix/Tree/GeohashPrefixTree.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Spatial/Prefix/Tree/GeohashPrefixTree.cs b/src/Lucene.Net.Spatial/Prefix/Tree/GeohashPrefixTree.cs
index 99f2958..8bf1542 100644
--- a/src/Lucene.Net.Spatial/Prefix/Tree/GeohashPrefixTree.cs
+++ b/src/Lucene.Net.Spatial/Prefix/Tree/GeohashPrefixTree.cs
@@ -69,7 +69,7 @@ namespace Lucene.Net.Spatial.Prefix.Tree
             }
         }
 
-        /// <summary>Any more than this and there's no point (double lat & lon are the same).</summary>
+        /// <summary>Any more than this and there's no point (double lat &amp; lon are the same).</summary>
         public static int MaxLevelsPossible
         {
             get { return GeohashUtils.MAX_PRECISION; }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/c95f6ab9/src/Lucene.Net.Spatial/Prefix/Tree/SpatialPrefixTree.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Spatial/Prefix/Tree/SpatialPrefixTree.cs b/src/Lucene.Net.Spatial/Prefix/Tree/SpatialPrefixTree.cs
index 4922b6e..b09cb54 100644
--- a/src/Lucene.Net.Spatial/Prefix/Tree/SpatialPrefixTree.cs
+++ b/src/Lucene.Net.Spatial/Prefix/Tree/SpatialPrefixTree.cs
@@ -172,7 +172,7 @@ namespace Lucene.Net.Spatial.Prefix.Tree
         /// leaf and none of its children are added.
         /// <para/>
         /// This implementation checks if shape is a <see cref="IPoint"/> and if so returns
-        /// <see cref="GetCells(Point, int, bool)"/>.
+        /// <see cref="GetCells(IPoint, int, bool)"/>.
         /// </remarks>
         /// <param name="shape">the shape; non-null</param>
         /// <param name="detailLevel">the maximum detail level to get cells for</param>
@@ -260,7 +260,7 @@ namespace Lucene.Net.Spatial.Prefix.Tree
 
         /// <summary>
         /// A Point-optimized implementation of
-        /// <see cref="GetCells(Shape, int, bool, bool)"/>. That
+        /// <see cref="GetCells(IShape, int, bool, bool)"/>. That
         /// method in facts calls this for points.
         /// <para/>
         /// This implementation depends on <see cref="GetCell(string)"/> being fast, as its

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/c95f6ab9/src/Lucene.Net.Spatial/Query/SpatialArgsParser.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Spatial/Query/SpatialArgsParser.cs b/src/Lucene.Net.Spatial/Query/SpatialArgsParser.cs
index 6a1a3d8..6254ab7 100644
--- a/src/Lucene.Net.Spatial/Query/SpatialArgsParser.cs
+++ b/src/Lucene.Net.Spatial/Query/SpatialArgsParser.cs
@@ -29,7 +29,7 @@ namespace Lucene.Net.Spatial.Queries
     /// <summary>
     /// Parses a string that usually looks like "OPERATION(SHAPE)" into a <see cref="SpatialArgs"/>
     /// object. The set of operations supported are defined in <see cref="SpatialOperation"/>, such
-    /// as "Intersects" being a common one. The shape portion is defined by WKT <see cref="Spatial4n.Core.Io.WktShapeParser"/>,
+    /// as "Intersects" being a common one. The shape portion is defined by WKT <see cref="Spatial4n.Core.IO.WktShapeParser"/>,
     /// but it can be overridden/customized via <see cref="ParseShape(string, SpatialContext)"/>.
     /// There are some optional name-value pair parameters that follow the closing parenthesis.  Example:
     /// <code>

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/c95f6ab9/src/Lucene.Net.Spatial/Vector/PointVectorStrategy.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Spatial/Vector/PointVectorStrategy.cs b/src/Lucene.Net.Spatial/Vector/PointVectorStrategy.cs
index b33d346..e8a0e6a 100644
--- a/src/Lucene.Net.Spatial/Vector/PointVectorStrategy.cs
+++ b/src/Lucene.Net.Spatial/Vector/PointVectorStrategy.cs
@@ -41,7 +41,7 @@ namespace Lucene.Net.Spatial.Vector
     /// 
     /// <h4>Implementation:</h4>
     /// This is a simple Strategy.  Search works with <see cref="NumericRangeQuery"/>s on
-    /// an x & y pair of fields.  A Circle query does the same bbox query but adds a
+    /// an x &amp; y pair of fields.  A Circle query does the same bbox query but adds a
     /// ValueSource filter on <see cref="SpatialStrategy.MakeDistanceValueSource(IPoint)"/>.
     /// <para/>
     /// One performance shortcoming with this strategy is that a scenario involving


[15/27] lucenenet git commit: Lucene.Net.Core.Index.IndexWriter refactor: GetNextMerge() > NextMerge() (for consistency with iterators)

Posted by ni...@apache.org.
Lucene.Net.Core.Index.IndexWriter refactor: GetNextMerge() > NextMerge() (for consistency with iterators)


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/f3839209
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/f3839209
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/f3839209

Branch: refs/heads/api-work
Commit: f38392096c9597f7e23b66340ebc41309389670e
Parents: 1ace780
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Feb 5 14:39:54 2017 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Sun Feb 5 14:47:56 2017 +0700

----------------------------------------------------------------------
 src/Lucene.Net.Core/Index/ConcurrentMergeScheduler.cs         | 4 ++--
 src/Lucene.Net.Core/Index/IndexWriter.cs                      | 2 +-
 src/Lucene.Net.Core/Index/SerialMergeScheduler.cs             | 2 +-
 src/Lucene.Net.Core/Index/TaskMergeScheduler.cs               | 4 ++--
 src/Lucene.Net.TestFramework/Index/BaseMergePolicyTestCase.cs | 2 +-
 src/Lucene.Net.Tests/core/Index/TestIndexWriterMerging.cs     | 2 +-
 src/Lucene.Net.Tests/core/TestMergeSchedulerExternal.cs       | 2 +-
 7 files changed, 9 insertions(+), 9 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/f3839209/src/Lucene.Net.Core/Index/ConcurrentMergeScheduler.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Index/ConcurrentMergeScheduler.cs b/src/Lucene.Net.Core/Index/ConcurrentMergeScheduler.cs
index 5026907..28164cd 100644
--- a/src/Lucene.Net.Core/Index/ConcurrentMergeScheduler.cs
+++ b/src/Lucene.Net.Core/Index/ConcurrentMergeScheduler.cs
@@ -473,7 +473,7 @@ namespace Lucene.Net.Index
                         }
                     }
 
-                    MergePolicy.OneMerge merge = writer.GetNextMerge();
+                    MergePolicy.OneMerge merge = writer.NextMerge();
                     if (merge == null)
                     {
                         if (IsVerbose)
@@ -651,7 +651,7 @@ namespace Lucene.Net.Index
 
                         // Subsequent times through the loop we do any new
                         // merge that writer says is necessary:
-                        merge = tWriter.GetNextMerge();
+                        merge = tWriter.NextMerge();
 
                         // Notify here in case any threads were stalled;
                         // they will notice that the pending merge has

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/f3839209/src/Lucene.Net.Core/Index/IndexWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Index/IndexWriter.cs b/src/Lucene.Net.Core/Index/IndexWriter.cs
index d929cf1..0897287 100644
--- a/src/Lucene.Net.Core/Index/IndexWriter.cs
+++ b/src/Lucene.Net.Core/Index/IndexWriter.cs
@@ -2451,7 +2451,7 @@ namespace Lucene.Net.Index
         ///
         /// @lucene.experimental
         /// </summary>
-        public virtual MergePolicy.OneMerge GetNextMerge() // LUCENENET TODO: Rename NextMerge() (consistency with iterator.Next())
+        public virtual MergePolicy.OneMerge NextMerge()
         {
             lock (this)
             {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/f3839209/src/Lucene.Net.Core/Index/SerialMergeScheduler.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Index/SerialMergeScheduler.cs b/src/Lucene.Net.Core/Index/SerialMergeScheduler.cs
index 7a7b664..06b0d79 100644
--- a/src/Lucene.Net.Core/Index/SerialMergeScheduler.cs
+++ b/src/Lucene.Net.Core/Index/SerialMergeScheduler.cs
@@ -40,7 +40,7 @@ namespace Lucene.Net.Index
             {
                 while (true)
                 {
-                    MergePolicy.OneMerge merge = writer.GetNextMerge();
+                    MergePolicy.OneMerge merge = writer.NextMerge();
                     if (merge == null)
                     {
                         break;

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/f3839209/src/Lucene.Net.Core/Index/TaskMergeScheduler.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Index/TaskMergeScheduler.cs b/src/Lucene.Net.Core/Index/TaskMergeScheduler.cs
index 804e7bb..36d0e9b 100644
--- a/src/Lucene.Net.Core/Index/TaskMergeScheduler.cs
+++ b/src/Lucene.Net.Core/Index/TaskMergeScheduler.cs
@@ -275,7 +275,7 @@ namespace Lucene.Net.Index
                         }
                     }
 
-                    MergePolicy.OneMerge merge = writer.GetNextMerge();
+                    MergePolicy.OneMerge merge = writer.NextMerge();
                     if (merge == null)
                     {
                         if (Verbose)
@@ -583,7 +583,7 @@ namespace Lucene.Net.Index
 
                         // Subsequent times through the loop we do any new
                         // merge that writer says is necessary:
-                        merge = _writer.GetNextMerge();
+                        merge = _writer.NextMerge();
 
                         // Notify here in case any threads were stalled;
                         // they will notice that the pending merge has

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/f3839209/src/Lucene.Net.TestFramework/Index/BaseMergePolicyTestCase.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Index/BaseMergePolicyTestCase.cs b/src/Lucene.Net.TestFramework/Index/BaseMergePolicyTestCase.cs
index ee2e7e0..5da390d 100644
--- a/src/Lucene.Net.TestFramework/Index/BaseMergePolicyTestCase.cs
+++ b/src/Lucene.Net.TestFramework/Index/BaseMergePolicyTestCase.cs
@@ -83,7 +83,7 @@ namespace Lucene.Net.Index
             {
                 lock (this)
                 {
-                    if (!MayMerge.Get() && writer.GetNextMerge() != null)
+                    if (!MayMerge.Get() && writer.NextMerge() != null)
                     {
                         throw new InvalidOperationException();
                     }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/f3839209/src/Lucene.Net.Tests/core/Index/TestIndexWriterMerging.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/core/Index/TestIndexWriterMerging.cs b/src/Lucene.Net.Tests/core/Index/TestIndexWriterMerging.cs
index 879418b..37fc3cd 100644
--- a/src/Lucene.Net.Tests/core/Index/TestIndexWriterMerging.cs
+++ b/src/Lucene.Net.Tests/core/Index/TestIndexWriterMerging.cs
@@ -306,7 +306,7 @@ namespace Lucene.Net.Index
                 {
                     while (true)
                     {
-                        MergePolicy.OneMerge merge = writer.GetNextMerge();
+                        MergePolicy.OneMerge merge = writer.NextMerge();
                         if (merge == null)
                         {
                             break;

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/f3839209/src/Lucene.Net.Tests/core/TestMergeSchedulerExternal.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/core/TestMergeSchedulerExternal.cs b/src/Lucene.Net.Tests/core/TestMergeSchedulerExternal.cs
index 5e10c04..b6a8a79 100644
--- a/src/Lucene.Net.Tests/core/TestMergeSchedulerExternal.cs
+++ b/src/Lucene.Net.Tests/core/TestMergeSchedulerExternal.cs
@@ -139,7 +139,7 @@ namespace Lucene.Net
             public override void Merge(IndexWriter writer, MergeTrigger trigger, bool newMergesFound)
             {
                 MergePolicy.OneMerge merge = null;
-                while ((merge = writer.GetNextMerge()) != null)
+                while ((merge = writer.NextMerge()) != null)
                 {
                     if (VERBOSE)
                     {