You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucenenet.apache.org by ni...@apache.org on 2017/02/05 16:51:39 UTC
[04/27] lucenenet git commit: Lucene.Net.Facet: fix documentation
comment formatting problems
Lucene.Net.Facet: fix documentation comment formatting problems
Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/638f2a11
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/638f2a11
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/638f2a11
Branch: refs/heads/api-work
Commit: 638f2a115196d11372402d313f8aa94843b50ef8
Parents: 9fb8cb1
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Feb 5 11:50:42 2017 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Sun Feb 5 14:47:47 2017 +0700
----------------------------------------------------------------------
src/Lucene.Net.Facet/DrillDownQuery.cs | 2 +-
src/Lucene.Net.Facet/DrillSideways.cs | 2 +-
src/Lucene.Net.Facet/FacetsCollector.cs | 4 +--
src/Lucene.Net.Facet/FacetsConfig.cs | 4 +--
.../Range/DoubleRangeFacetCounts.cs | 4 +--
.../Range/LongRangeFacetCounts.cs | 4 +--
.../Taxonomy/AssociationFacetField.cs | 2 +-
.../Taxonomy/CachedOrdinalsReader.cs | 4 +--
src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs | 4 +--
.../Directory/DirectoryTaxonomyReader.cs | 8 ++---
.../Directory/DirectoryTaxonomyWriter.cs | 33 ++++++++++----------
.../Taxonomy/DocValuesOrdinalsReader.cs | 2 +-
.../Taxonomy/FloatTaxonomyFacets.cs | 2 +-
.../Taxonomy/IntTaxonomyFacets.cs | 2 +-
src/Lucene.Net.Facet/Taxonomy/LRUHashMap.cs | 6 ++--
.../Taxonomy/SearcherTaxonomyManager.cs | 2 +-
.../Taxonomy/TaxonomyFacetSumValueSource.cs | 2 +-
src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs | 4 +--
src/Lucene.Net.Facet/Taxonomy/TaxonomyWriter.cs | 6 ++--
.../WriterCache/CompactLabelToOrdinal.cs | 15 ++++-----
.../Taxonomy/WriterCache/TaxonomyWriterCache.cs | 2 +-
21 files changed, 56 insertions(+), 58 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/638f2a11/src/Lucene.Net.Facet/DrillDownQuery.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/DrillDownQuery.cs b/src/Lucene.Net.Facet/DrillDownQuery.cs
index 5e7d4e5..7f93ff0 100644
--- a/src/Lucene.Net.Facet/DrillDownQuery.cs
+++ b/src/Lucene.Net.Facet/DrillDownQuery.cs
@@ -42,7 +42,7 @@ namespace Lucene.Net.Facet
/// <para>
/// <b>NOTE:</b> if you choose to create your own <see cref="Query"/> by calling
/// <see cref="Term"/>, it is recommended to wrap it with <see cref="ConstantScoreQuery"/>
- /// and set the <see cref="ConstantScoreQuery.Boost">boost</see> to <c>0.0f</c>,
+ /// and set the <see cref="Query.Boost">boost</see> to <c>0.0f</c>,
/// so that it does not affect the scores of the documents.
///
/// @lucene.experimental
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/638f2a11/src/Lucene.Net.Facet/DrillSideways.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/DrillSideways.cs b/src/Lucene.Net.Facet/DrillSideways.cs
index 9930ddc..43791e3 100644
--- a/src/Lucene.Net.Facet/DrillSideways.cs
+++ b/src/Lucene.Net.Facet/DrillSideways.cs
@@ -273,7 +273,7 @@ namespace Lucene.Net.Facet
public class DrillSidewaysResult
{
/// <summary>
- /// Combined drill down & sideways results.
+ /// Combined drill down & sideways results.
/// </summary>
public Facets Facets { get; private set; }
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/638f2a11/src/Lucene.Net.Facet/FacetsCollector.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/FacetsCollector.cs b/src/Lucene.Net.Facet/FacetsCollector.cs
index df3d517..a6d5db1 100644
--- a/src/Lucene.Net.Facet/FacetsCollector.cs
+++ b/src/Lucene.Net.Facet/FacetsCollector.cs
@@ -29,7 +29,7 @@ namespace Lucene.Net.Facet
/// Collects hits for subsequent faceting. Once you've run
/// a search and collect hits into this, instantiate one of
/// the <see cref="ICollector"/> subclasses to do the facet
- /// counting. Use the <see cref="Search"/> utility methods to
+ /// counting. Use the Search utility methods (such as <see cref="Search(IndexSearcher, Query, int, ICollector)"/>) to
/// perform an "ordinary" search but also collect into a
/// <see cref="Facets"/>.
/// </summary>
@@ -115,7 +115,7 @@ namespace Lucene.Net.Facet
/// <summary>
/// Create this; if <paramref name="keepScores"/> is <c>true</c> then a
- /// <see cref="float[]"/> is allocated to hold score of all hits.
+ /// <see cref="T:float[]"/> is allocated to hold score of all hits.
/// </summary>
public FacetsCollector(bool keepScores)
{
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/638f2a11/src/Lucene.Net.Facet/FacetsConfig.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/FacetsConfig.cs b/src/Lucene.Net.Facet/FacetsConfig.cs
index e6ecda8..1d5cb52 100644
--- a/src/Lucene.Net.Facet/FacetsConfig.cs
+++ b/src/Lucene.Net.Facet/FacetsConfig.cs
@@ -669,8 +669,8 @@ namespace Lucene.Net.Facet
}
/// <summary>
- /// Turns an encoded string (from a previous call to <see cref="PathToString"/>)
- /// back into the original <see cref="string[]"/>.
+ /// Turns an encoded string (from a previous call to <see cref="PathToString(string[])"/>)
+ /// back into the original <see cref="T:string[]"/>.
/// </summary>
public static string[] StringToPath(string s)
{
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/638f2a11/src/Lucene.Net.Facet/Range/DoubleRangeFacetCounts.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Range/DoubleRangeFacetCounts.cs b/src/Lucene.Net.Facet/Range/DoubleRangeFacetCounts.cs
index c9d0dba..b5ba376 100644
--- a/src/Lucene.Net.Facet/Range/DoubleRangeFacetCounts.cs
+++ b/src/Lucene.Net.Facet/Range/DoubleRangeFacetCounts.cs
@@ -32,7 +32,7 @@ namespace Lucene.Net.Facet.Range
/// <summary>
/// <see cref="Facets"/> implementation that computes counts for
/// dynamic double ranges from a provided <see cref="ValueSource"/>,
- /// using <see cref="FunctionValues.DoubleVal"/>. Use
+ /// using <see cref="FunctionValues.DoubleVal(int)"/> or <see cref="FunctionValues.DoubleVal(int, double[])"/>. Use
/// this for dimensions that change in real-time (e.g. a
/// relative time based dimension like "Past day", "Past 2
/// days", etc.) or that change for each request (e.g.
@@ -74,7 +74,7 @@ namespace Lucene.Net.Facet.Range
/// <see cref="ValueSource"/>, and using the provided Filter as
/// a fastmatch: only documents passing the filter are
/// checked for the matching ranges. The filter must be
- /// random access (implement <see cref="DocIdSet.GetBits()"/>).
+ /// random access (implement <see cref="DocIdSet.Bits"/>).
/// </summary>
public DoubleRangeFacetCounts(string field, ValueSource valueSource, FacetsCollector hits, Filter fastMatchFilter, DoubleRange[] ranges)
: base(field, ranges, fastMatchFilter)
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/638f2a11/src/Lucene.Net.Facet/Range/LongRangeFacetCounts.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Range/LongRangeFacetCounts.cs b/src/Lucene.Net.Facet/Range/LongRangeFacetCounts.cs
index 4e82e94..7d5954e 100644
--- a/src/Lucene.Net.Facet/Range/LongRangeFacetCounts.cs
+++ b/src/Lucene.Net.Facet/Range/LongRangeFacetCounts.cs
@@ -31,7 +31,7 @@ namespace Lucene.Net.Facet.Range
/// <summary>
/// <see cref="Facets"/> implementation that computes counts for
/// dynamic long ranges from a provided <see cref="ValueSource"/>,
- /// using <see cref="FunctionValues.LongVal"/>. Use
+ /// using <see cref="FunctionValues.LongVal(int)"/> or <see cref="FunctionValues.LongVal(int, long[])"/>. Use
/// this for dimensions that change in real-time (e.g. a
/// relative time based dimension like "Past day", "Past 2
/// days", etc.) or that change for each request (e.g.
@@ -66,7 +66,7 @@ namespace Lucene.Net.Facet.Range
/// <see cref="ValueSource"/>, and using the provided Filter as
/// a fastmatch: only documents passing the filter are
/// checked for the matching ranges. The filter must be
- /// random access (implement <see cref="DocIdSet.GetBits"/>).
+ /// random access (implement <see cref="DocIdSet.Bits"/>).
/// </summary>
public LongRangeFacetCounts(string field, ValueSource valueSource,
FacetsCollector hits, Filter fastMatchFilter, params LongRange[] ranges)
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/638f2a11/src/Lucene.Net.Facet/Taxonomy/AssociationFacetField.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/AssociationFacetField.cs b/src/Lucene.Net.Facet/Taxonomy/AssociationFacetField.cs
index 66ee6ee..1e4731f 100644
--- a/src/Lucene.Net.Facet/Taxonomy/AssociationFacetField.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/AssociationFacetField.cs
@@ -27,7 +27,7 @@ namespace Lucene.Net.Facet.Taxonomy
/// <summary>
/// Add an instance of this to your <see cref="Document"/> to add
- /// a facet label associated with an arbitrary <see cref="byte[]"/>.
+ /// a facet label associated with an arbitrary <see cref="T:byte[]"/>.
/// This will require a custom <see cref="Facets"/>
/// implementation at search time; see <see cref="IntAssociationFacetField"/>
/// and <see cref="FloatAssociationFacetField"/> to use existing
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/638f2a11/src/Lucene.Net.Facet/Taxonomy/CachedOrdinalsReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/CachedOrdinalsReader.cs b/src/Lucene.Net.Facet/Taxonomy/CachedOrdinalsReader.cs
index 31ca1a5..a63e5da 100644
--- a/src/Lucene.Net.Facet/Taxonomy/CachedOrdinalsReader.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/CachedOrdinalsReader.cs
@@ -33,7 +33,7 @@ namespace Lucene.Net.Facet.Taxonomy
/// <summary>
/// A per-segment cache of documents' facet ordinals. Every
- /// <see cref="CachedOrds"/> holds the ordinals in a raw <see cref="int[]"/>,
+ /// <see cref="CachedOrds"/> holds the ordinals in a raw <see cref="T:int[]"/>,
/// and therefore consumes as much RAM as the total
/// number of ordinals found in the segment, but saves the
/// CPU cost of decoding ordinals during facet counting.
@@ -123,7 +123,7 @@ namespace Lucene.Net.Facet.Taxonomy
}
/// <summary>
- /// Holds the cached ordinals in two parallel <see cref="int[]"/> arrays.
+ /// Holds the cached ordinals in two parallel <see cref="T:int[]"/> arrays.
/// </summary>
public sealed class CachedOrds : IAccountable
{
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/638f2a11/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs b/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs
index 4810804..eca4372 100644
--- a/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs
@@ -173,7 +173,7 @@ namespace Lucene.Net.Facet.Taxonomy
}
/// <summary>
- /// Copies the path components to the given <see cref="char[]"/>, starting at index
+ /// Copies the path components to the given <see cref="T:char[]"/>, starting at index
/// <paramref name="start"/>. <paramref name="delimiter"/> is copied between the path components.
/// Returns the number of chars copied.
///
@@ -287,7 +287,7 @@ namespace Lucene.Net.Facet.Taxonomy
/// Returns a string representation of the path, separating components with
/// '/'.
/// </summary>
- /// <see cref= #toString(char) </seealso>
+ /// <seealso cref="ToString(char)"/>
public override string ToString()
{
return ToString('/');
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/638f2a11/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyReader.cs b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyReader.cs
index ee2eacd..2e71b8b 100644
--- a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyReader.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyReader.cs
@@ -36,7 +36,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
/// <summary>
/// A <see cref="TaxonomyReader"/> which retrieves stored taxonomy information from a
/// <see cref="Directory"/>.
- /// <P>
+ /// <para/>
/// Reading from the on-disk index on every method call is too slow, so this
/// implementation employs caching: Some methods cache recent requests and their
/// results, while other methods prefetch all the data into memory and then
@@ -156,7 +156,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
/// any issues, unless the two instances continue to live. The reader
/// guarantees that the two instances cannot affect each other in terms of
/// correctness of the caches, however if the size of the cache is changed
- /// through <see cref="CacheSize"/>, it will affect both reader instances.
+ /// through <see cref="SetCacheSize(int)"/>, it will affect both reader instances.
/// </para>
/// </summary>
protected override TaxonomyReader DoOpenIfChanged()
@@ -378,11 +378,11 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
/// <summary>
/// <see cref="SetCacheSize"/> controls the maximum allowed size of each of the caches
/// used by <see cref="GetPath(int)"/> and <see cref="GetOrdinal(FacetLabel)"/>.
- /// <P>
+ /// <para/>
/// Currently, if the given size is smaller than the current size of
/// a cache, it will not shrink, and rather we be limited to its current
/// size. </summary>
- /// <param name="value"> the new maximum cache size, in number of entries. </param>
+ /// <param name="size"> the new maximum cache size, in number of entries. </param>
public virtual void SetCacheSize(int size)
{
EnsureOpen();
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/638f2a11/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs
index a0e4c64..44bccb8 100644
--- a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs
@@ -132,11 +132,11 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
/// <summary>
/// Forcibly unlocks the taxonomy in the named directory.
- /// <P>
+ /// <para/>
/// Caution: this should only be used by failure recovery code, when it is
/// known that no other process nor thread is in fact currently accessing
/// this taxonomy.
- /// <P>
+ /// <para/>
/// This method is unnecessary if your <see cref="Store.Directory"/> uses a
/// <see cref="NativeFSLockFactory"/> instead of the default
/// <see cref="SimpleFSLockFactory"/>. When the "native" lock is used, a lock
@@ -250,16 +250,16 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
/// <summary>
/// Open internal index writer, which contains the taxonomy data.
- /// <para>
+ /// <para/>
/// Extensions may provide their own <see cref="IndexWriter"/> implementation or instance.
- /// <br><b>NOTE:</b> the instance this method returns will be disposed upon calling
+ /// <para/>
+ /// <b>NOTE:</b> the instance this method returns will be disposed upon calling
/// to <see cref="Dispose()"/>.
- /// <br><b>NOTE:</b> the merge policy in effect must not merge none adjacent segments. See
- /// comment in <see cref="CreateIndexWriterConfig(IndexWriterConfig.OpenMode)"/> for the logic behind this.
- ///
- /// </para>
+ /// <para/>
+ /// <b>NOTE:</b> the merge policy in effect must not merge none adjacent segments. See
+ /// comment in <see cref="CreateIndexWriterConfig(OpenMode)"/> for the logic behind this.
/// </summary>
- /// <seealso cref="CreateIndexWriterConfig(IndexWriterConfig.OpenMode)"/>
+ /// <seealso cref="CreateIndexWriterConfig(OpenMode)"/>
/// <param name="directory">
/// the <see cref="Store.Directory"/> on top of which an <see cref="IndexWriter"/>
/// should be opened. </param>
@@ -272,11 +272,13 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
/// <summary>
/// Create the <see cref="IndexWriterConfig"/> that would be used for opening the internal index writer.
- /// <br>Extensions can configure the <see cref="IndexWriter"/> as they see fit,
+ /// <para/>
+ /// Extensions can configure the <see cref="IndexWriter"/> as they see fit,
/// including setting a <see cref="Index.MergeScheduler"/>, or
/// <see cref="Index.IndexDeletionPolicy"/>, different RAM size
- /// etc.<br>
- /// <br><b>NOTE:</b> internal docids of the configured index must not be altered.
+ /// etc.
+ /// <para/>
+ /// <b>NOTE:</b> internal docids of the configured index must not be altered.
/// For that, categories are never deleted from the taxonomy index.
/// In addition, merge policy in effect must not merge none adjacent segments.
/// </summary>
@@ -937,12 +939,12 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
/// <summary>
/// Mapping from old ordinal to new ordinals, used when merging indexes
/// wit separate taxonomies.
- /// <para>
+ /// <para/>
/// <see cref="AddMapping"/> merges one or more taxonomies into the given taxonomy
/// (this). An <see cref="IOrdinalMap"/> is filled for each of the added taxonomies,
/// containing the new ordinal (in the merged taxonomy) of each of the
/// categories in the old taxonomy.
- /// <P>
+ /// <para/>
/// There exist two implementations of <see cref="IOrdinalMap"/>: <see cref="MemoryOrdinalMap"/> and
/// <see cref="DiskOrdinalMap"/>. As their names suggest, the former keeps the map in
/// memory and the latter in a temporary disk file. Because these maps will
@@ -950,14 +952,13 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
/// same time, it is recommended to put the first taxonomy's map in memory,
/// and all the rest on disk (later to be automatically read into memory one
/// by one, when needed).
- /// </para>
/// </summary>
public interface IOrdinalMap
{
/// <summary>
/// Set the size of the map. This MUST be called before <see cref="AddMapping"/>.
/// It is assumed (but not verified) that <see cref="AddMapping"/> will then be
- /// called exactly 'size' times, with different <paramref name="origOrdinals"/> between 0
+ /// called exactly 'size' times, with different <c>origOrdinals</c> between 0
/// and size - 1.
/// </summary>
void SetSize(int taxonomySize);
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/638f2a11/src/Lucene.Net.Facet/Taxonomy/DocValuesOrdinalsReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/DocValuesOrdinalsReader.cs b/src/Lucene.Net.Facet/Taxonomy/DocValuesOrdinalsReader.cs
index b48eafd..cc8137c 100644
--- a/src/Lucene.Net.Facet/Taxonomy/DocValuesOrdinalsReader.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/DocValuesOrdinalsReader.cs
@@ -89,7 +89,7 @@
}
/// <summary>
- /// Subclass & override if you change the encoding.
+ /// Subclass & override if you change the encoding.
/// </summary>
protected virtual void Decode(BytesRef buf, IntsRef ordinals)
{
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/638f2a11/src/Lucene.Net.Facet/Taxonomy/FloatTaxonomyFacets.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/FloatTaxonomyFacets.cs b/src/Lucene.Net.Facet/Taxonomy/FloatTaxonomyFacets.cs
index 4304646..608300b 100644
--- a/src/Lucene.Net.Facet/Taxonomy/FloatTaxonomyFacets.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/FloatTaxonomyFacets.cs
@@ -25,7 +25,7 @@ namespace Lucene.Net.Facet.Taxonomy
/// <summary>
/// Base class for all taxonomy-based facets that aggregate
- /// to a per-ords <see cref="float[]"/>.
+ /// to a per-ords <see cref="T:float[]"/>.
/// </summary>
public abstract class FloatTaxonomyFacets : TaxonomyFacets
{
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/638f2a11/src/Lucene.Net.Facet/Taxonomy/IntTaxonomyFacets.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/IntTaxonomyFacets.cs b/src/Lucene.Net.Facet/Taxonomy/IntTaxonomyFacets.cs
index fc279c5..2b223e6 100644
--- a/src/Lucene.Net.Facet/Taxonomy/IntTaxonomyFacets.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/IntTaxonomyFacets.cs
@@ -24,7 +24,7 @@ namespace Lucene.Net.Facet.Taxonomy
/// <summary>
/// Base class for all taxonomy-based facets that aggregate
- /// to a per-ords <see cref="int[]"/>.
+ /// to a per-ords <see cref="T:int[]"/>.
/// </summary>
public abstract class IntTaxonomyFacets : TaxonomyFacets
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/638f2a11/src/Lucene.Net.Facet/Taxonomy/LRUHashMap.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/LRUHashMap.cs b/src/Lucene.Net.Facet/Taxonomy/LRUHashMap.cs
index 956922f..5034030 100644
--- a/src/Lucene.Net.Facet/Taxonomy/LRUHashMap.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/LRUHashMap.cs
@@ -23,15 +23,15 @@ namespace Lucene.Net.Facet.Taxonomy
*/
/// <summary>
- /// <see cref="LRUHashMap{TKey, TValue}"/> is similar to of Java's HashMap, which has a bounded <see cref="Capacity"/>;
- /// When it reaches that <see cref="Capacity"/>, each time a new element is added, the least
+ /// <see cref="LRUHashMap{TKey, TValue}"/> is similar to of Java's HashMap, which has a bounded <see cref="Limit"/>;
+ /// When it reaches that <see cref="Limit"/>, each time a new element is added, the least
/// recently used (LRU) entry is removed.
/// <para>
/// Unlike the Java Lucene implementation, this one is thread safe because it is backed by the <see cref="LurchTable{TKey, TValue}"/>.
/// Do note that every time an element is read from <see cref="LRUHashMap{TKey, TValue}"/>,
/// a write operation also takes place to update the element's last access time.
/// This is because the LRU order needs to be remembered to determine which element
- /// to evict when the <see cref="Capacity"/> is exceeded.
+ /// to evict when the <see cref="Limit"/> is exceeded.
/// </para>
/// <para>
///
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/638f2a11/src/Lucene.Net.Facet/Taxonomy/SearcherTaxonomyManager.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/SearcherTaxonomyManager.cs b/src/Lucene.Net.Facet/Taxonomy/SearcherTaxonomyManager.cs
index 8fb75d8..24ca87b 100644
--- a/src/Lucene.Net.Facet/Taxonomy/SearcherTaxonomyManager.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/SearcherTaxonomyManager.cs
@@ -96,7 +96,7 @@ namespace Lucene.Net.Facet.Taxonomy
///
/// <para>
/// <b>NOTE:</b> you should only use this constructor if you commit and call
- /// <see cref="Index.ReaderManager.MaybeRefresh()"/> in the same thread. Otherwise it could lead to an
+ /// <see cref="Search.ReferenceManager{G}.MaybeRefresh()"/> (on the <see cref="Index.ReaderManager"/>) in the same thread. Otherwise it could lead to an
/// unsync'd <see cref="IndexSearcher"/> and <see cref="TaxonomyReader"/> pair.
/// </para>
/// </summary>
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/638f2a11/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumValueSource.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumValueSource.cs b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumValueSource.cs
index b777f52..eee089d 100644
--- a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumValueSource.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumValueSource.cs
@@ -34,7 +34,7 @@ namespace Lucene.Net.Facet.Taxonomy
using Weight = Lucene.Net.Search.Weight;
/// <summary>
- /// Aggregates sum of values from <see cref="FunctionValues.DoubleVal"/>,
+ /// Aggregates sum of values from <see cref="FunctionValues.DoubleVal(int)"/> and <see cref="FunctionValues.DoubleVal(int, double[])"/>,
/// for each facet label.
///
/// @lucene.experimental
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/638f2a11/src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs b/src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs
index 4cf132d..b02d5f2 100644
--- a/src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs
@@ -156,11 +156,11 @@ namespace Lucene.Net.Facet.Taxonomy
/// Implements the actual opening of a new <see cref="TaxonomyReader"/> instance if
/// the taxonomy has changed.
/// </summary>
- /// <see cref= #openIfChanged(TaxonomyReader) </seealso>
+ /// <seealso cref="OpenIfChanged{T}(T)"/>
protected abstract TaxonomyReader DoOpenIfChanged();
/// <summary>
- /// Throws <see cref="AlreadyClosedException"/> if this <see cref="IndexReader"/> is disposed
+ /// Throws <see cref="AlreadyClosedException"/> if this <see cref="Index.IndexReader"/> is disposed
/// </summary>
protected void EnsureOpen()
{
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/638f2a11/src/Lucene.Net.Facet/Taxonomy/TaxonomyWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/TaxonomyWriter.cs b/src/Lucene.Net.Facet/Taxonomy/TaxonomyWriter.cs
index 29e1ce8..8988b14 100644
--- a/src/Lucene.Net.Facet/Taxonomy/TaxonomyWriter.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/TaxonomyWriter.cs
@@ -60,7 +60,7 @@ namespace Lucene.Net.Facet.Taxonomy
/// <see cref="AddCategory"/> adds a category with a given path name to the taxonomy,
/// and returns its ordinal. If the category was already present in
/// the taxonomy, its existing ordinal is returned.
- /// <P>
+ /// <para/>
/// Before adding a category, <see cref="AddCategory"/> makes sure that all its
/// ancestor categories exist in the taxonomy as well. As result, the
/// ordinal of a category is guaranteed to be smaller then the ordinal of
@@ -100,10 +100,10 @@ namespace Lucene.Net.Facet.Taxonomy
/// <summary>
/// <see cref="Count"/> returns the number of categories in the taxonomy.
- /// <P>
+ /// <para/>
/// Because categories are numbered consecutively starting with 0, it
/// means the taxonomy contains ordinals 0 through <see cref="Count"/>-1.
- /// <P>
+ /// <para/>
/// Note that the number returned by <see cref="Count"/> is often slightly higher
/// than the number of categories inserted into the taxonomy; This is
/// because when a category is added to the taxonomy, its ancestors
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/638f2a11/src/Lucene.Net.Facet/Taxonomy/WriterCache/CompactLabelToOrdinal.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CompactLabelToOrdinal.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CompactLabelToOrdinal.cs
index 01f712a..0387063 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CompactLabelToOrdinal.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CompactLabelToOrdinal.cs
@@ -31,13 +31,12 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
/// </para>
/// <para>
/// This data structure grows by adding a new HashArray whenever the number of
- /// collisions in the <see cref="CollisionMap"/> exceeds <see cref="loadFactor"/> *
- /// <see cref="GetMaxOrdinal()"/>. Growing also includes reinserting all colliding
+ /// collisions in the <see cref="CollisionMap"/> exceeds <see cref="loadFactor"/>
+ /// <c>GetMaxOrdinal().</c> Growing also includes reinserting all colliding
/// labels into the <see cref="HashArray"/>s to possibly reduce the number of collisions.
///
/// For setting the <see cref="loadFactor"/> see
/// <see cref="CompactLabelToOrdinal(int, float, int)"/>.
- ///
/// </para>
/// <para>
/// This data structure has a much lower memory footprint (~30%) compared to a
@@ -396,12 +395,10 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
/// </summary>
internal static CompactLabelToOrdinal Open(FileInfo file, float loadFactor, int numHashArrays)
{
- /// <summary>
- /// Part of the file is the labelRepository, which needs to be rehashed
- /// and label offsets re-added to the object. I am unsure as to why we
- /// can't just store these off in the file as well, but in keeping with
- /// the spirit of the original code, I did it this way. (ssuppe)
- /// </summary>
+ // Part of the file is the labelRepository, which needs to be rehashed
+ // and label offsets re-added to the object. I am unsure as to why we
+ // can't just store these off in the file as well, but in keeping with
+ // the spirit of the original code, I did it this way. (ssuppe)
CompactLabelToOrdinal l2o = new CompactLabelToOrdinal();
l2o.loadFactor = loadFactor;
l2o.hashArrays = new HashArray[numHashArrays];
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/638f2a11/src/Lucene.Net.Facet/Taxonomy/WriterCache/TaxonomyWriterCache.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/TaxonomyWriterCache.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/TaxonomyWriterCache.cs
index 127e752..1f7e712 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/TaxonomyWriterCache.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/TaxonomyWriterCache.cs
@@ -92,7 +92,7 @@
bool IsFull { get; }
/// <summary>
- /// Clears the content of the cache. Unlike <see cref="Dispose()"/>, the caller can
+ /// Clears the content of the cache. Unlike <see cref="IDisposable.Dispose()"/>, the caller can
/// assume that the cache is still operable after this method returns.
/// </summary>
void Clear();