You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucenenet.apache.org by sy...@apache.org on 2016/10/04 20:01:31 UTC

[01/46] lucenenet git commit: Fixed Facet.Taxonomy.LRUHashMap implementation to correctly remove the eldest item from the cache when an item is added (test passing).

Repository: lucenenet
Updated Branches:
  refs/heads/master 87d05125b -> ddfb46c10


Fixed Facet.Taxonomy.LRUHashMap implementation to correctly remove the eldest item from the cache when an item is added (test passing).


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/1ca08dfb
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/1ca08dfb
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/1ca08dfb

Branch: refs/heads/master
Commit: 1ca08dfb1609699219da6922138a523b007dff3b
Parents: 87d0512
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Fri Sep 23 21:07:16 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Mon Oct 3 23:30:30 2016 +0700

----------------------------------------------------------------------
 .../Directory/DirectoryTaxonomyReader.cs        |  35 ++--
 src/Lucene.Net.Facet/Taxonomy/LRUHashMap.cs     | 162 ++++++++++++-------
 .../Taxonomy/TestLRUHashMap.cs                  |   2 +-
 3 files changed, 118 insertions(+), 81 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1ca08dfb/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyReader.cs b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyReader.cs
index a567210..da82cbf 100644
--- a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyReader.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyReader.cs
@@ -353,24 +353,21 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
 
             // TODO: can we use an int-based hash impl, such as IntToObjectMap,
             // wrapped as LRU?
-            int catIDInteger = Convert.ToInt32(ordinal);
-            lock (categoryCache)
+
+            // LUCENENET NOTE: We don't need to convert int to int here.
+            // Also, our cache implementation is thread safe, so we can nix the
+            // locks.
+            FacetLabel res;
+            if (categoryCache.TryGetValue(ordinal, out res))
             {
-                var res = categoryCache.Get(catIDInteger,false);
-                if (res != null)
-                {
-                    return res;
-                }
+                return res;
             }
 
             Document doc = indexReader.Document(ordinal);
-            FacetLabel ret = new FacetLabel(FacetsConfig.StringToPath(doc.Get(Consts.FULL)));
-            lock (categoryCache)
-            {
-                categoryCache.Put(catIDInteger, ret);
-            }
+            res = new FacetLabel(FacetsConfig.StringToPath(doc.Get(Consts.FULL)));
+            categoryCache.Put(ordinal, res);
 
-            return ret;
+            return res;
         }
 
         public override int Size
@@ -395,14 +392,10 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
             set
             {
                 EnsureOpen();
-                lock (categoryCache)
-                {
-                    categoryCache.MaxSize = value;
-                }
-                lock (ordinalCache)
-                {
-                    ordinalCache.MaxSize = value;
-                }
+                // LUCENENET NOTE: No locking required here,
+                // since our LRU implementation is thread-safe
+                categoryCache.MaxSize = value;
+                ordinalCache.MaxSize = value;
             }
         }
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1ca08dfb/src/Lucene.Net.Facet/Taxonomy/LRUHashMap.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/LRUHashMap.cs b/src/Lucene.Net.Facet/Taxonomy/LRUHashMap.cs
index d442992..5d51036 100644
--- a/src/Lucene.Net.Facet/Taxonomy/LRUHashMap.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/LRUHashMap.cs
@@ -1,13 +1,9 @@
 \ufeffusing System;
-using System.Collections.Concurrent;
 using System.Collections.Generic;
 using System.Linq;
-using System.Threading;
-using Lucene.Net.Support;
 
 namespace Lucene.Net.Facet.Taxonomy
 {
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -25,7 +21,6 @@ namespace Lucene.Net.Facet.Taxonomy
      * limitations under the License.
      */
 
-
     /// <summary>
     /// LRUHashMap is an extension of Java's HashMap, which has a bounded size();
     /// When it reaches that size, each time a new element is added, the least
@@ -58,97 +53,146 @@ namespace Lucene.Net.Facet.Taxonomy
     /// @lucene.experimental
     /// </para>
     /// </summary>
-    public class LRUHashMap<TV, TU> where TU : class //this is implementation of LRU Cache
+    public class LRUHashMap<TKey, TValue> where TValue : class //this is implementation of LRU Cache
     {
-
-        public int MaxSize { get; set; }
-        private int CleanSize;
-        private TimeSpan MaxDuration;
-
-
-        private readonly ConcurrentDictionary<TV, CacheDataObject<TU>> _cache = new ConcurrentDictionary<TV, CacheDataObject<TU>>();
-
-        public LRUHashMap(int maxSize = 50000, int cleanPercentage = 30, TimeSpan maxDuration = default(TimeSpan))
+        private readonly Dictionary<TKey, CacheDataObject> cache;
+        // We can't use a ReaderWriterLockSlim because every read is also a 
+        // write, so we gain nothing by doing so
+        private readonly object syncLock = new object();
+        // Record last access so we can tie break if 2 calls make it in within
+        // the same millisecond.
+        private long lastAccess;
+        private int maxSize;
+
+        public LRUHashMap(int maxSize)
         {
-            MaxSize = maxSize;
-            CleanSize = (int)Math.Max(MaxSize * (1.0 * cleanPercentage / 100), 1);
-            if (maxDuration == default(TimeSpan))
+            if (maxSize < 1)
             {
-                MaxDuration = TimeSpan.FromDays(1);
+                throw new ArgumentOutOfRangeException("maxSize must be at least 1");
             }
-            else
+            this.maxSize = maxSize;
+            this.cache = new Dictionary<TKey, CacheDataObject>(maxSize);
+        }
+
+        public virtual int MaxSize
+        {
+            get { return maxSize; }
+            set
             {
-                MaxDuration = maxDuration;
+                if (value < 1)
+                {
+                    throw new ArgumentOutOfRangeException("MaxSize must be at least 1");
+                }
+                maxSize = value;
             }
         }
 
-        
-        public bool Put(TV cacheKey, TU value)
+        public bool Put(TKey key, TValue value)
         {
-            return AddToCache(cacheKey, value);
+            lock (syncLock)
+            { 
+                CacheDataObject cdo;
+                if (cache.TryGetValue(key, out cdo))
+                {
+                    // Item already exists, update our last access time
+                    cdo.Timestamp = GetTimestamp();
+                }
+                else
+                {
+                    cache[key] = new CacheDataObject
+                    {
+                        Value = value,
+                        Timestamp = GetTimestamp()
+                    };
+                    // We have added a new item, so we may need to remove the eldest
+                    if (cache.Count > MaxSize)
+                    {
+                        // Remove the eldest item (lowest timestamp) from the cache
+                        cache.Remove(cache.OrderBy(x => x.Value.Timestamp).First().Key);
+                    }
+                }
+            }
+            return true;
         }
 
-        public bool AddToCache(TV cacheKey, TU value)
+        public TValue Get(TKey key)
         {
-            var cachedResult = new CacheDataObject<TU>
-            {
-                Usage = 1, //value == null ? 1 : value.Usage + 1,
-                Value = value,
-                Timestamp = DateTime.UtcNow
-            };
-
-            _cache.AddOrUpdate(cacheKey, cachedResult, (_, __) => cachedResult);
-            if (_cache.Count > MaxSize)
+            lock (syncLock)
             {
-                foreach (var source in _cache
-                    .OrderByDescending(x => x.Value.Usage)
-                    .ThenBy(x => x.Value.Timestamp)
-                    .Skip(MaxSize - CleanSize))
+                CacheDataObject cdo;
+                if (cache.TryGetValue(key, out cdo))
                 {
-                    if (EqualityComparer<TV>.Default.Equals(source.Key, cacheKey))
-                        continue; // we don't want to remove the one we just added
-                    CacheDataObject<TU> ignored;
-                    _cache.TryRemove(source.Key, out ignored);
+                    // Write our last access time
+                    cdo.Timestamp = GetTimestamp();
+
+                    return cdo.Value;
                 }
             }
-            return true;
+            return null;
         }
 
-        public TU Get(TV cacheKey, bool increment = false)
+        public bool TryGetValue(TKey key, out TValue value)
         {
-            CacheDataObject<TU> value;
-            if (_cache.TryGetValue(cacheKey, out value) && (DateTime.UtcNow - value.Timestamp) <= MaxDuration)
+            lock (syncLock)
             {
-                if (increment)
+                CacheDataObject cdo;
+                if (cache.TryGetValue(key, out cdo))
                 {
-                    Interlocked.Increment(ref value.Usage);
+                    // Write our last access time
+                    cdo.Timestamp = GetTimestamp();
+                    value = cdo.Value;
+
+                    return true;
                 }
-                return value.Value;
+
+                value = null;
+                return false;
             }
-            return null;
         }
 
-        public bool IsExistInCache(TV cacheKey)
+        public bool ContainsKey(TKey key)
         {
-            return (_cache.ContainsKey(cacheKey));
+            return cache.ContainsKey(key);
         }
 
+        // LUCENENET TODO: Rename to Count (.NETify)
         public int Size()
         {
-            return _cache.Count;
+            return cache.Count;
+        }
+
+        private long GetTimestamp()
+        {
+            long ticks = DateTime.UtcNow.Ticks;
+            if (ticks <= lastAccess)
+            {
+                // Tie break by incrementing
+                // when 2 calls happen within the
+                // same millisecond
+                ticks = ++lastAccess;
+            }
+            else
+            {
+                lastAccess = ticks;
+            }
+            return ticks;
         }
+        
 
         #region Nested type: CacheDataObject
 
-        private class CacheDataObject<T> where T : class
+        private class CacheDataObject
         {
-            public DateTime Timestamp;
-            public int Usage;
-            public T Value;
+            // Ticks representing the last access time
+            public long Timestamp;
+            public TValue Value;
+
+            public override string ToString()
+            {
+                return "Last Access: " + Timestamp.ToString() + " - " + Value.ToString();
+            }
         }
 
         #endregion
-
     }
-
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1ca08dfb/src/Lucene.Net.Tests.Facet/Taxonomy/TestLRUHashMap.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/TestLRUHashMap.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/TestLRUHashMap.cs
index 71b62c5..c08bca8 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/TestLRUHashMap.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/TestLRUHashMap.cs
@@ -30,7 +30,7 @@ namespace Lucene.Net.Facet.Taxonomy
         [Test]
         public virtual void TestLru()
         {
-            LRUHashMap<string, string> lru = new LRUHashMap<string, string>(3,1);
+            LRUHashMap<string, string> lru = new LRUHashMap<string, string>(3);
             Assert.AreEqual(0, lru.Size());
             lru.Put("one", "Hello world");
             Assert.AreEqual(1, lru.Size());


[06/46] lucenenet git commit: Facet.FacetResult & Facet.LabelAndValue: Fixed string format bugs by adding extra constructors to track the underlying data type of return value and formatting accordingly in ToString().

Posted by sy...@apache.org.
Facet.FacetResult & Facet.LabelAndValue: Fixed string format bugs by adding extra constructors to track the underlying data type of return value and formatting accordingly in ToString().


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/abf096ef
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/abf096ef
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/abf096ef

Branch: refs/heads/master
Commit: abf096ef554a2e3a5dcba3c83a71681792761816
Parents: 602bbcc
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sat Sep 24 17:32:38 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Mon Oct 3 23:30:44 2016 +0700

----------------------------------------------------------------------
 src/Lucene.Net.Facet/FacetResult.cs             | 42 ++++++++++++++++++--
 src/Lucene.Net.Facet/LabelAndValue.cs           | 28 ++++++++++++-
 .../Taxonomy/FloatAssociationFacetField.cs      |  3 +-
 3 files changed, 67 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/abf096ef/src/Lucene.Net.Facet/FacetResult.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/FacetResult.cs b/src/Lucene.Net.Facet/FacetResult.cs
index be619b2..924f8d5 100644
--- a/src/Lucene.Net.Facet/FacetResult.cs
+++ b/src/Lucene.Net.Facet/FacetResult.cs
@@ -1,6 +1,8 @@
 \ufeffusing System.Text;
 using Lucene.Net.Support;
 using Lucene.Net.Util;
+using System.Globalization;
+using System;
 
 namespace Lucene.Net.Facet
 {
@@ -51,12 +53,39 @@ namespace Lucene.Net.Facet
         public readonly LabelAndValue[] LabelValues;
 
         /// <summary>
-        /// Sole constructor. </summary>
+        /// The original data type of <see cref="Value"/> that was passed through the constructor.
+        /// </summary>
+        public readonly Type typeOfValue;
+
+        /// <summary>
+        /// Constructor for <see cref="float"/> <paramref name="value"/>. Makes the <see cref="ToString()"/> method 
+        /// print the <paramref name="value"/> as a <see cref="float"/> with at least 1 number after the decimal.
+        /// </summary>
         public FacetResult(string dim, string[] path, float value, LabelAndValue[] labelValues, int childCount)
+            : this(dim, path, labelValues, childCount)
+        {
+            this.Value = value;
+            this.typeOfValue = typeof(float);
+        }
+
+        /// <summary>
+        /// Constructor for <see cref="int"/> <paramref name="value"/>. Makes the <see cref="ToString()"/> method 
+        /// print the <paramref name="value"/> as an <see cref="int"/> with no decimal.
+        /// </summary>
+        public FacetResult(string dim, string[] path, int value, LabelAndValue[] labelValues, int childCount)
+            : this(dim, path, labelValues, childCount)
+        {
+            this.Value = value;
+            this.typeOfValue = typeof(int);
+        }
+
+        /// <summary>
+        /// Private constructor for shared parameters to be called by public constructors.
+        /// </summary>
+        private FacetResult(string dim, string[] path, LabelAndValue[] labelValues, int childCount)
         {
             this.Dim = dim;
             this.Path = path;
-            this.Value = value;
             this.LabelValues = labelValues;
             this.ChildCount = childCount;
         }
@@ -69,7 +98,14 @@ namespace Lucene.Net.Facet
             sb.Append(" path=");
             sb.Append("[" + Arrays.ToString(Path) + "]");
             sb.Append(" value=");
-            sb.Append(Value);
+            if (typeOfValue == typeof(int))
+            {
+                sb.AppendFormat(CultureInfo.InvariantCulture, "{0:0}", Value); // No formatting (looks like int)
+            }
+            else
+            {
+                sb.AppendFormat(CultureInfo.InvariantCulture, "{0:0.0#####}", Value); // Decimal formatting
+            }
             sb.Append(" childCount=");
             sb.Append(ChildCount);
             sb.Append('\n');

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/abf096ef/src/Lucene.Net.Facet/LabelAndValue.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/LabelAndValue.cs b/src/Lucene.Net.Facet/LabelAndValue.cs
index 0cbcddc..6cf1991 100644
--- a/src/Lucene.Net.Facet/LabelAndValue.cs
+++ b/src/Lucene.Net.Facet/LabelAndValue.cs
@@ -1,4 +1,6 @@
 \ufeffusing Lucene.Net.Support;
+using System;
+using System.Globalization;
 
 namespace Lucene.Net.Facet
 {
@@ -35,16 +37,38 @@ namespace Lucene.Net.Facet
         public readonly float value;
 
         /// <summary>
-        /// Sole constructor. </summary>
+        /// The original data type of <see cref="value"/> that was passed through the constructor.
+        /// </summary>
+        public readonly Type typeOfValue;
+
+        /// <summary>
+        /// Constructor for <see cref="float"/> <paramref name="value"/>. Makes the <see cref="ToString()"/> method 
+        /// print the <paramref name="value"/> as a <see cref="float"/> with at least 1 number after the decimal.
+        /// </summary>
         public LabelAndValue(string label, float value)
         {
             this.label = label;
             this.value = value;
+            this.typeOfValue = typeof(float);
+        }
+
+        /// <summary>
+        /// Constructor for <see cref="int"/> <paramref name="value"/>. Makes the <see cref="ToString()"/> method 
+        /// print the <paramref name="value"/> as an <see cref="int"/> with no decimal.
+        /// </summary>
+        public LabelAndValue(string label, int value)
+        {
+            this.label = label;
+            this.value = value;
+            this.typeOfValue = typeof(int);
         }
 
         public override string ToString()
         {
-            return label + " (" + value + ")";
+            string valueString = (typeOfValue == typeof(int)) 
+                ? value.ToString("0", CultureInfo.InvariantCulture) 
+                : value.ToString("0.0#####", CultureInfo.InvariantCulture);
+            return label + " (" + valueString + ")";
         }
 
         public override bool Equals(object _other)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/abf096ef/src/Lucene.Net.Facet/Taxonomy/FloatAssociationFacetField.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/FloatAssociationFacetField.cs b/src/Lucene.Net.Facet/Taxonomy/FloatAssociationFacetField.cs
index cc90e61..54e8ef2 100644
--- a/src/Lucene.Net.Facet/Taxonomy/FloatAssociationFacetField.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/FloatAssociationFacetField.cs
@@ -1,4 +1,5 @@
 \ufeffusing Lucene.Net.Support;
+using System.Globalization;
 
 namespace Lucene.Net.Facet.Taxonomy
 {
@@ -58,7 +59,7 @@ namespace Lucene.Net.Facet.Taxonomy
 
 	  public override string ToString()
 	  {
-		return "FloatAssociationFacetField(dim=" + dim + " path=" + Arrays.ToString(path) + " value=" + bytesRefToFloat(assoc) + ")";
+		return "FloatAssociationFacetField(dim=" + dim + " path=" + Arrays.ToString(path) + " value=" + bytesRefToFloat(assoc).ToString("0.0#####", CultureInfo.InvariantCulture) + ")";
 	  }
 	}
 


[18/46] lucenenet git commit: .NETify Facet: Field names should be camelCase. Fields should not be public (changed to properties with PascalCase names).

Posted by sy...@apache.org.
.NETify Facet: Field names should be camelCase. Fields should not be public (changed to properties with PascalCase names).


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/e0a73b45
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/e0a73b45
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/e0a73b45

Branch: refs/heads/master
Commit: e0a73b45dd4a0faf6b128a271971a9d787b85cc3
Parents: 9604c0f
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Sep 25 10:33:03 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Mon Oct 3 23:31:13 2016 +0700

----------------------------------------------------------------------
 src/Lucene.Net.Facet/DrillSideways.cs           |  4 +-
 src/Lucene.Net.Facet/DrillSidewaysScorer.cs     |  4 +-
 src/Lucene.Net.Facet/FacetField.cs              | 10 +--
 src/Lucene.Net.Facet/FacetResult.cs             | 18 ++--
 src/Lucene.Net.Facet/FacetsCollector.cs         | 16 ++--
 src/Lucene.Net.Facet/FacetsConfig.cs            | 39 ++++-----
 src/Lucene.Net.Facet/LabelAndValue.cs           | 32 +++----
 .../RandomSamplingFacetsCollector.cs            | 14 ++--
 src/Lucene.Net.Facet/Range/DoubleRange.cs       |  8 +-
 .../Range/DoubleRangeFacetCounts.cs             | 16 ++--
 src/Lucene.Net.Facet/Range/LongRange.cs         | 16 ++--
 .../Range/LongRangeFacetCounts.cs               | 16 ++--
 src/Lucene.Net.Facet/Range/Range.cs             |  2 +-
 src/Lucene.Net.Facet/Range/RangeFacetCounts.cs  | 30 +++----
 .../SortedSet/SortedSetDocValuesFacetCounts.cs  | 10 +--
 .../SortedSet/SortedSetDocValuesFacetField.cs   |  4 +-
 .../SortedSet/SortedSetDocValuesReaderState.cs  |  4 +-
 .../Taxonomy/AssociationFacetField.cs           | 14 ++--
 .../Taxonomy/CachedOrdinalsReader.cs            | 28 +++----
 src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs   | 88 ++++++++++----------
 src/Lucene.Net.Facet/Taxonomy/FacetLabel.cs     |  4 +-
 .../Taxonomy/FastTaxonomyFacetCounts.cs         |  4 +-
 .../Taxonomy/FloatAssociationFacetField.cs      |  4 +-
 .../Taxonomy/FloatTaxonomyFacets.cs             | 30 +++----
 .../Taxonomy/IntAssociationFacetField.cs        |  2 +-
 .../Taxonomy/IntTaxonomyFacets.cs               | 22 ++---
 src/Lucene.Net.Facet/Taxonomy/LRUHashMap.cs     | 22 ++---
 .../Taxonomy/SearcherTaxonomyManager.cs         | 30 +++----
 .../Taxonomy/TaxonomyFacetCounts.cs             |  4 +-
 .../TaxonomyFacetSumFloatAssociations.cs        |  4 +-
 .../Taxonomy/TaxonomyFacetSumIntAssociations.cs |  4 +-
 .../Taxonomy/TaxonomyFacetSumValueSource.cs     |  8 +-
 src/Lucene.Net.Facet/Taxonomy/TaxonomyFacets.cs | 36 ++++----
 src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs |  8 +-
 .../Taxonomy/WriterCache/CollisionMap.cs        | 30 +++----
 src/Lucene.Net.Facet/TopOrdAndFloatQueue.cs     | 10 +--
 src/Lucene.Net.Facet/TopOrdAndIntQueue.cs       |  4 +-
 src/Lucene.Net.Tests.Facet/FacetTestCase.cs     | 18 ++--
 .../Range/TestRangeFacetCounts.cs               | 24 +++---
 .../Directory/TestConcurrentFacetedIndexing.cs  |  2 +-
 .../Taxonomy/TestSearcherTaxonomyManager.cs     | 16 ++--
 .../Taxonomy/TestTaxonomyFacetCounts.cs         |  8 +-
 .../Taxonomy/TestTaxonomyFacetCounts2.cs        | 34 ++++----
 src/Lucene.Net.Tests.Facet/TestDrillSideways.cs |  8 +-
 src/Lucene.Net.Tests.Facet/TestFacetsConfig.cs  |  2 +-
 .../TestRandomSamplingFacetsCollector.cs        |  6 +-
 46 files changed, 359 insertions(+), 358 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e0a73b45/src/Lucene.Net.Facet/DrillSideways.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/DrillSideways.cs b/src/Lucene.Net.Facet/DrillSideways.cs
index 2c71be4..457bed3 100644
--- a/src/Lucene.Net.Facet/DrillSideways.cs
+++ b/src/Lucene.Net.Facet/DrillSideways.cs
@@ -272,11 +272,11 @@ namespace Lucene.Net.Facet
         {
             /// <summary>
             /// Combined drill down & sideways results. </summary>
-            public readonly Facets Facets;
+            public Facets Facets { get; private set; }
 
             /// <summary>
             /// Hits. </summary>
-            public readonly TopDocs Hits;
+            public TopDocs Hits { get; private set; }
 
             /// <summary>
             /// Sole constructor. </summary>

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e0a73b45/src/Lucene.Net.Facet/DrillSidewaysScorer.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/DrillSidewaysScorer.cs b/src/Lucene.Net.Facet/DrillSidewaysScorer.cs
index bd045c3..13d51e6 100644
--- a/src/Lucene.Net.Facet/DrillSidewaysScorer.cs
+++ b/src/Lucene.Net.Facet/DrillSidewaysScorer.cs
@@ -723,8 +723,8 @@ namespace Lucene.Net.Facet
         {
             private readonly DrillSidewaysScorer outerInstance;
 
-            internal float score_Renamed;
-            internal int doc;
+            //internal float score_Renamed; // not used
+            //internal int doc; // not used
 
             public FakeScorer(DrillSidewaysScorer outerInstance)
                 : base(null)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e0a73b45/src/Lucene.Net.Facet/FacetField.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/FacetField.cs b/src/Lucene.Net.Facet/FacetField.cs
index 5c85936..794ec31 100644
--- a/src/Lucene.Net.Facet/FacetField.cs
+++ b/src/Lucene.Net.Facet/FacetField.cs
@@ -42,11 +42,11 @@ namespace Lucene.Net.Facet
 
         /// <summary>
         /// Dimension for this field. </summary>
-        public readonly string dim;
+        public string Dim { get; private set; }
 
         /// <summary>
         /// Path for this field. </summary>
-        public readonly string[] path;
+        public string[] Path { get; private set; }
 
         /// <summary>
         /// Creates the this from {@code dim} and
@@ -60,17 +60,17 @@ namespace Lucene.Net.Facet
             {
                 VerifyLabel(label);
             }
-            this.dim = dim;
+            this.Dim = dim;
             if (path.Length == 0)
             {
                 throw new System.ArgumentException("path must have at least one element");
             }
-            this.path = path;
+            this.Path = path;
         }
 
         public override string ToString()
         {
-            return "FacetField(dim=" + dim + " path=[" + Arrays.ToString(path) + "])";
+            return "FacetField(dim=" + Dim + " path=[" + Arrays.ToString(Path) + "])";
         }
 
         /// <summary>

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e0a73b45/src/Lucene.Net.Facet/FacetResult.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/FacetResult.cs b/src/Lucene.Net.Facet/FacetResult.cs
index 5699e6c..7dc73b9 100644
--- a/src/Lucene.Net.Facet/FacetResult.cs
+++ b/src/Lucene.Net.Facet/FacetResult.cs
@@ -28,31 +28,31 @@ namespace Lucene.Net.Facet
     {
         /// <summary>
         /// Dimension that was requested. </summary>
-        public readonly string Dim;
+        public string Dim { get; private set; }
 
         /// <summary>
         /// Path whose children were requested. </summary>
-        public readonly string[] Path;
+        public string[] Path { get; private set; }
 
         /// <summary>
         /// Total value for this path (sum of all child counts, or
         ///  sum of all child values), even those not included in
         ///  the topN. 
         /// </summary>
-        public readonly float Value;
+        public float Value { get; private set; }
 
         /// <summary>
         /// How many child labels were encountered. </summary>
-        public readonly int ChildCount;
+        public int ChildCount { get; private set; }
 
         /// <summary>
         /// Child counts. </summary>
-        public readonly LabelAndValue[] LabelValues;
+        public LabelAndValue[] LabelValues { get; private set; }
 
         /// <summary>
         /// The original data type of <see cref="Value"/> that was passed through the constructor.
         /// </summary>
-        public readonly Type typeOfValue;
+        public Type TypeOfValue { get; private set; }
 
         /// <summary>
         /// Constructor for <see cref="float"/> <paramref name="value"/>. Makes the <see cref="ToString()"/> method 
@@ -62,7 +62,7 @@ namespace Lucene.Net.Facet
             : this(dim, path, labelValues, childCount)
         {
             this.Value = value;
-            this.typeOfValue = typeof(float);
+            this.TypeOfValue = typeof(float);
         }
 
         /// <summary>
@@ -73,7 +73,7 @@ namespace Lucene.Net.Facet
             : this(dim, path, labelValues, childCount)
         {
             this.Value = value;
-            this.typeOfValue = typeof(int);
+            this.TypeOfValue = typeof(int);
         }
 
         /// <summary>
@@ -95,7 +95,7 @@ namespace Lucene.Net.Facet
             sb.Append(" path=");
             sb.Append("[" + Arrays.ToString(Path) + "]");
             sb.Append(" value=");
-            if (typeOfValue == typeof(int))
+            if (TypeOfValue == typeof(int))
             {
                 sb.AppendFormat(CultureInfo.InvariantCulture, "{0:0}", Value); // No formatting (looks like int)
             }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e0a73b45/src/Lucene.Net.Facet/FacetsCollector.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/FacetsCollector.cs b/src/Lucene.Net.Facet/FacetsCollector.cs
index 0739367..d7be2f0 100644
--- a/src/Lucene.Net.Facet/FacetsCollector.cs
+++ b/src/Lucene.Net.Facet/FacetsCollector.cs
@@ -72,28 +72,28 @@ namespace Lucene.Net.Facet
 
             /// <summary>
             /// Context for this segment. </summary>
-            public readonly AtomicReaderContext context;
+            public AtomicReaderContext Context { get; private set; }
 
             /// <summary>
             /// Which documents were seen. </summary>
-            public readonly DocIdSet bits;
+            public DocIdSet Bits { get; private set; }
 
             /// <summary>
             /// Non-sparse scores array. </summary>
-            public readonly float[] scores;
+            public float[] Scores { get; private set; }
 
             /// <summary>
             /// Total number of hits </summary>
-            public readonly int totalHits;
+            public int TotalHits { get; private set; }
 
             /// <summary>
             /// Sole constructor. </summary>
             public MatchingDocs(AtomicReaderContext context, DocIdSet bits, int totalHits, float[] scores)
             {
-                this.context = context;
-                this.bits = bits;
-                this.scores = scores;
-                this.totalHits = totalHits;
+                this.Context = context;
+                this.Bits = bits;
+                this.Scores = scores;
+                this.TotalHits = totalHits;
             }
         }
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e0a73b45/src/Lucene.Net.Facet/FacetsConfig.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/FacetsConfig.cs b/src/Lucene.Net.Facet/FacetsConfig.cs
index 16a0ea3..48e97a1 100644
--- a/src/Lucene.Net.Facet/FacetsConfig.cs
+++ b/src/Lucene.Net.Facet/FacetsConfig.cs
@@ -80,28 +80,29 @@ namespace Lucene.Net.Facet
         {
             /// <summary>
             /// True if this dimension is hierarchical. </summary>
-            public bool Hierarchical;
+            public bool Hierarchical { get; set; }
 
             /// <summary>
             /// True if this dimension is multi-valued. </summary>
-            public bool MultiValued;
+            public bool MultiValued { get; set; }
 
             /// <summary>
             /// True if the count/aggregate for the entire dimension
             ///  is required, which is unusual (default is false). 
             /// </summary>
-            public bool RequireDimCount;
+            public bool RequireDimCount { get; set; }
 
             /// <summary>
             /// Actual field where this dimension's facet labels
             ///  should be indexed 
             /// </summary>
-            public string IndexFieldName = DEFAULT_INDEX_FIELD_NAME;
+            public string IndexFieldName { get; set; }
 
             /// <summary>
             /// Default constructor. </summary>
             public DimConfig()
             {
+                IndexFieldName = DEFAULT_INDEX_FIELD_NAME;
             }
         }
 
@@ -288,10 +289,10 @@ namespace Lucene.Net.Facet
                 if (field.FieldType == FacetField.TYPE)
                 {
                     FacetField facetField = (FacetField)field;
-                    FacetsConfig.DimConfig dimConfig = GetDimConfig(facetField.dim);
+                    FacetsConfig.DimConfig dimConfig = GetDimConfig(facetField.Dim);
                     if (dimConfig.MultiValued == false)
                     {
-                        CheckSeen(seenDims, facetField.dim);
+                        CheckSeen(seenDims, facetField.Dim);
                     }
                     string indexFieldName = dimConfig.IndexFieldName;
                     IList<FacetField> fields;
@@ -324,18 +325,18 @@ namespace Lucene.Net.Facet
                 if (field.FieldType == AssociationFacetField.TYPE)
                 {
                     AssociationFacetField facetField = (AssociationFacetField)field;
-                    FacetsConfig.DimConfig dimConfig = GetDimConfig(facetField.dim);
+                    FacetsConfig.DimConfig dimConfig = GetDimConfig(facetField.Dim);
                     if (dimConfig.MultiValued == false)
                     {
-                        CheckSeen(seenDims, facetField.dim);
+                        CheckSeen(seenDims, facetField.Dim);
                     }
                     if (dimConfig.Hierarchical)
                     {
-                        throw new System.ArgumentException("AssociationFacetField cannot be hierarchical (dim=\"" + facetField.dim + "\")");
+                        throw new System.ArgumentException("AssociationFacetField cannot be hierarchical (dim=\"" + facetField.Dim + "\")");
                     }
                     if (dimConfig.RequireDimCount)
                     {
-                        throw new System.ArgumentException("AssociationFacetField cannot requireDimCount (dim=\"" + facetField.dim + "\")");
+                        throw new System.ArgumentException("AssociationFacetField cannot requireDimCount (dim=\"" + facetField.Dim + "\")");
                     }
 
                     string indexFieldName = dimConfig.IndexFieldName;
@@ -408,13 +409,13 @@ namespace Lucene.Net.Facet
                 foreach (FacetField facetField in ent.Value)
                 {
 
-                    FacetsConfig.DimConfig ft = GetDimConfig(facetField.dim);
-                    if (facetField.path.Length > 1 && ft.Hierarchical == false)
+                    FacetsConfig.DimConfig ft = GetDimConfig(facetField.Dim);
+                    if (facetField.Path.Length > 1 && ft.Hierarchical == false)
                     {
-                        throw new System.ArgumentException("dimension \"" + facetField.dim + "\" is not hierarchical yet has " + facetField.path.Length + " components");
+                        throw new System.ArgumentException("dimension \"" + facetField.Dim + "\" is not hierarchical yet has " + facetField.Path.Length + " components");
                     }
 
-                    FacetLabel cp = new FacetLabel(facetField.dim, facetField.path);
+                    FacetLabel cp = new FacetLabel(facetField.Dim, facetField.Path);
 
                     CheckTaxoWriter(taxoWriter);
                     int ordinal = taxoWriter.AddCategory(cp);
@@ -497,7 +498,7 @@ namespace Lucene.Net.Facet
                 {
                     // NOTE: we don't add parents for associations
                     CheckTaxoWriter(taxoWriter);
-                    FacetLabel label = new FacetLabel(field.dim, field.path);
+                    FacetLabel label = new FacetLabel(field.Dim, field.Path);
                     int ordinal = taxoWriter.AddCategory(label);
                     if (upto + 4 > bytes.Length)
                     {
@@ -508,12 +509,12 @@ namespace Lucene.Net.Facet
                     bytes[upto++] = (byte)(ordinal >> 16);
                     bytes[upto++] = (byte)(ordinal >> 8);
                     bytes[upto++] = (byte)ordinal;
-                    if (upto + field.assoc.Length > bytes.Length)
+                    if (upto + field.Assoc.Length > bytes.Length)
                     {
-                        bytes = ArrayUtil.Grow(bytes, upto + field.assoc.Length);
+                        bytes = ArrayUtil.Grow(bytes, upto + field.Assoc.Length);
                     }
-                    Array.Copy(field.assoc.Bytes, field.assoc.Offset, bytes, upto, field.assoc.Length);
-                    upto += field.assoc.Length;
+                    Array.Copy(field.Assoc.Bytes, field.Assoc.Offset, bytes, upto, field.Assoc.Length);
+                    upto += field.Assoc.Length;
 
                     // Drill down:
                     for (int i = 1; i <= label.Length; i++)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e0a73b45/src/Lucene.Net.Facet/LabelAndValue.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/LabelAndValue.cs b/src/Lucene.Net.Facet/LabelAndValue.cs
index 1503e3e..bf6271c 100644
--- a/src/Lucene.Net.Facet/LabelAndValue.cs
+++ b/src/Lucene.Net.Facet/LabelAndValue.cs
@@ -28,16 +28,16 @@ namespace Lucene.Net.Facet
     {
         /// <summary>
         /// Facet's label. </summary>
-        public readonly string label;
+        public string Label { get; private set; }
 
         /// <summary>
         /// Value associated with this label. </summary>
-        public readonly float value;
+        public float Value { get; private set; }
 
         /// <summary>
-        /// The original data type of <see cref="value"/> that was passed through the constructor.
+        /// The original data type of <see cref="Value"/> that was passed through the constructor.
         /// </summary>
-        public readonly Type typeOfValue;
+        public Type TypeOfValue { get; private set; }
 
         /// <summary>
         /// Constructor for <see cref="float"/> <paramref name="value"/>. Makes the <see cref="ToString()"/> method 
@@ -45,9 +45,9 @@ namespace Lucene.Net.Facet
         /// </summary>
         public LabelAndValue(string label, float value)
         {
-            this.label = label;
-            this.value = value;
-            this.typeOfValue = typeof(float);
+            this.Label = label;
+            this.Value = value;
+            this.TypeOfValue = typeof(float);
         }
 
         /// <summary>
@@ -56,17 +56,17 @@ namespace Lucene.Net.Facet
         /// </summary>
         public LabelAndValue(string label, int value)
         {
-            this.label = label;
-            this.value = value;
-            this.typeOfValue = typeof(int);
+            this.Label = label;
+            this.Value = value;
+            this.TypeOfValue = typeof(int);
         }
 
         public override string ToString()
         {
-            string valueString = (typeOfValue == typeof(int))
-                ? value.ToString("0", CultureInfo.InvariantCulture)
-                : value.ToString("0.0#####", CultureInfo.InvariantCulture);
-            return label + " (" + valueString + ")";
+            string valueString = (TypeOfValue == typeof(int))
+                ? Value.ToString("0", CultureInfo.InvariantCulture)
+                : Value.ToString("0.0#####", CultureInfo.InvariantCulture);
+            return Label + " (" + valueString + ")";
         }
 
         public override bool Equals(object _other)
@@ -76,12 +76,12 @@ namespace Lucene.Net.Facet
                 return false;
             }
             LabelAndValue other = (LabelAndValue)_other;
-            return label.Equals(other.label) && value.Equals(other.value);
+            return Label.Equals(other.Label) && Value.Equals(other.Value);
         }
 
         public override int GetHashCode()
         {
-            return label.GetHashCode() + 1439 * value.GetHashCode();
+            return Label.GetHashCode() + 1439 * Value.GetHashCode();
         }
     }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e0a73b45/src/Lucene.Net.Facet/RandomSamplingFacetsCollector.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/RandomSamplingFacetsCollector.cs b/src/Lucene.Net.Facet/RandomSamplingFacetsCollector.cs
index a662b6a..6c00c40 100644
--- a/src/Lucene.Net.Facet/RandomSamplingFacetsCollector.cs
+++ b/src/Lucene.Net.Facet/RandomSamplingFacetsCollector.cs
@@ -143,7 +143,7 @@ namespace Lucene.Net.Facet
                     totalHits = 0;
                     foreach (MatchingDocs md in matchingDocs)
                     {
-                        totalHits += md.totalHits;
+                        totalHits += md.TotalHits;
                     }
                 }
 
@@ -187,7 +187,7 @@ namespace Lucene.Net.Facet
         /// Create a sampled of the given hits. </summary>
         private MatchingDocs CreateSample(MatchingDocs docs)
         {
-            int maxdoc = docs.context.Reader.MaxDoc;
+            int maxdoc = docs.Context.Reader.MaxDoc;
 
             // TODO: we could try the WAH8DocIdSet here as well, as the results will be sparse
             FixedBitSet sampleDocs = new FixedBitSet(maxdoc);
@@ -210,7 +210,7 @@ namespace Lucene.Net.Facet
                     limit = binSize;
                     randomIndex = random.NextInt(binSize);
                 }
-                DocIdSetIterator it = docs.bits.GetIterator();
+                DocIdSetIterator it = docs.Bits.GetIterator();
                 for (int doc = it.NextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = it.NextDoc())
                 {
                     if (counter == randomIndex)
@@ -250,7 +250,7 @@ namespace Lucene.Net.Facet
                     }
                 }
 
-                return new MatchingDocs(docs.context, sampleDocs, docs.totalHits, null);
+                return new MatchingDocs(docs.Context, sampleDocs, docs.TotalHits, null);
             }
             catch (IOException)
             {
@@ -282,12 +282,12 @@ namespace Lucene.Net.Facet
 
             for (int i = 0; i < res.LabelValues.Length; i++)
             {
-                childPath[res.Path.Length + 1] = res.LabelValues[i].label;
+                childPath[res.Path.Length + 1] = res.LabelValues[i].Label;
                 string fullPath = FacetsConfig.PathToString(childPath, childPath.Length);
                 int max = reader.DocFreq(new Term(dimConfig.IndexFieldName, fullPath));
-                int correctedCount = (int)((double)res.LabelValues[i].value / samplingRate);
+                int correctedCount = (int)((double)res.LabelValues[i].Value / samplingRate);
                 correctedCount = Math.Min(max, correctedCount);
-                fixedLabelValues[i] = new LabelAndValue(res.LabelValues[i].label, correctedCount);
+                fixedLabelValues[i] = new LabelAndValue(res.LabelValues[i].Label, correctedCount);
             }
 
             // cap the total count on the total number of non-deleted documents in the reader

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e0a73b45/src/Lucene.Net.Facet/Range/DoubleRange.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Range/DoubleRange.cs b/src/Lucene.Net.Facet/Range/DoubleRange.cs
index 93f85cb..058ff52 100644
--- a/src/Lucene.Net.Facet/Range/DoubleRange.cs
+++ b/src/Lucene.Net.Facet/Range/DoubleRange.cs
@@ -40,19 +40,19 @@ namespace Lucene.Net.Facet.Range
 
         /// <summary>
         /// Minimum. </summary>
-        public readonly double Min;
+        public double Min { get; private set; }
 
         /// <summary>
         /// Maximum. </summary>
-        public readonly double Max;
+        public double Max { get; private set; }
 
         /// <summary>
         /// True if the minimum value is inclusive. </summary>
-        public readonly bool MinInclusive;
+        public bool MinInclusive { get; private set; }
 
         /// <summary>
         /// True if the maximum value is inclusive. </summary>
-        public readonly bool MaxInclusive;
+        public bool MaxInclusive { get; private set; }
 
         private const double EPSILON = 1E-14;
         /// <summary>

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e0a73b45/src/Lucene.Net.Facet/Range/DoubleRangeFacetCounts.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Range/DoubleRangeFacetCounts.cs b/src/Lucene.Net.Facet/Range/DoubleRangeFacetCounts.cs
index 1033a42..52a3ad5 100644
--- a/src/Lucene.Net.Facet/Range/DoubleRangeFacetCounts.cs
+++ b/src/Lucene.Net.Facet/Range/DoubleRangeFacetCounts.cs
@@ -85,7 +85,7 @@ namespace Lucene.Net.Facet.Range
         private void Count(ValueSource valueSource, IEnumerable<MatchingDocs> matchingDocs)
         {
 
-            DoubleRange[] ranges = (DoubleRange[])this.Ranges;
+            DoubleRange[] ranges = (DoubleRange[])this.ranges;
 
             LongRange[] longRanges = new LongRange[ranges.Length];
             for (int i = 0; i < ranges.Length; i++)
@@ -99,13 +99,13 @@ namespace Lucene.Net.Facet.Range
             int missingCount = 0;
             foreach (MatchingDocs hits in matchingDocs)
             {
-                FunctionValues fv = valueSource.GetValues(new Dictionary<string, object>(), hits.context);
+                FunctionValues fv = valueSource.GetValues(new Dictionary<string, object>(), hits.Context);
 
-                TotCount += hits.totalHits;
+                totCount += hits.TotalHits;
                 Bits bits;
-                if (FastMatchFilter != null)
+                if (fastMatchFilter != null)
                 {
-                    DocIdSet dis = FastMatchFilter.GetDocIdSet(hits.context, null);
+                    DocIdSet dis = fastMatchFilter.GetDocIdSet(hits.Context, null);
                     if (dis == null)
                     {
                         // No documents match
@@ -122,7 +122,7 @@ namespace Lucene.Net.Facet.Range
                     bits = null;
                 }
 
-                DocIdSetIterator docs = hits.bits.GetIterator();
+                DocIdSetIterator docs = hits.Bits.GetIterator();
 
                 int doc;
                 while ((doc = docs.NextDoc()) != DocIdSetIterator.NO_MORE_DOCS)
@@ -144,8 +144,8 @@ namespace Lucene.Net.Facet.Range
                 }
             }
 
-            missingCount += counter.FillCounts(Counts);
-            TotCount -= missingCount;
+            missingCount += counter.FillCounts(counts);
+            totCount -= missingCount;
         }
     }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e0a73b45/src/Lucene.Net.Facet/Range/LongRange.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Range/LongRange.cs b/src/Lucene.Net.Facet/Range/LongRange.cs
index c54456d..eb7671b 100644
--- a/src/Lucene.Net.Facet/Range/LongRange.cs
+++ b/src/Lucene.Net.Facet/Range/LongRange.cs
@@ -39,19 +39,19 @@ namespace Lucene.Net.Facet.Range
 
         /// <summary>
         /// Minimum. </summary>
-        public readonly long min;
+        public long Min { get; private set; }
 
         /// <summary>
         /// Maximum. </summary>
-        public readonly long max;
+        public long Max { get; private set; }
 
         /// <summary>
         /// True if the minimum value is inclusive. </summary>
-        public readonly bool minInclusive;
+        public bool MinInclusive { get; private set; }
 
         /// <summary>
         /// True if the maximum value is inclusive. </summary>
-        public readonly bool maxInclusive;
+        public bool MaxInclusive { get; private set; }
 
         // TODO: can we require fewer args? (same for
         // Double/FloatRange too)
@@ -61,10 +61,10 @@ namespace Lucene.Net.Facet.Range
         public LongRange(string label, long minIn, bool minInclusive, long maxIn, bool maxInclusive)
             : base(label)
         {
-            this.min = minIn;
-            this.max = maxIn;
-            this.minInclusive = minInclusive;
-            this.maxInclusive = maxInclusive;
+            this.Min = minIn;
+            this.Max = maxIn;
+            this.MinInclusive = minInclusive;
+            this.MaxInclusive = maxInclusive;
 
             if (!minInclusive)
             {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e0a73b45/src/Lucene.Net.Facet/Range/LongRangeFacetCounts.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Range/LongRangeFacetCounts.cs b/src/Lucene.Net.Facet/Range/LongRangeFacetCounts.cs
index 167eb85..d906cf8 100644
--- a/src/Lucene.Net.Facet/Range/LongRangeFacetCounts.cs
+++ b/src/Lucene.Net.Facet/Range/LongRangeFacetCounts.cs
@@ -78,20 +78,20 @@ namespace Lucene.Net.Facet.Range
         private void Count(ValueSource valueSource, IList<MatchingDocs> matchingDocs)
         {
 
-            LongRange[] ranges = (LongRange[])this.Ranges;
+            LongRange[] ranges = (LongRange[])this.ranges;
 
             LongRangeCounter counter = new LongRangeCounter(ranges);
 
             int missingCount = 0;
             foreach (MatchingDocs hits in matchingDocs)
             {
-                FunctionValues fv = valueSource.GetValues(new Dictionary<string, object>(), hits.context);
+                FunctionValues fv = valueSource.GetValues(new Dictionary<string, object>(), hits.Context);
 
-                TotCount += hits.totalHits;
+                totCount += hits.TotalHits;
                 Bits bits;
-                if (FastMatchFilter != null)
+                if (fastMatchFilter != null)
                 {
-                    DocIdSet dis = FastMatchFilter.GetDocIdSet(hits.context, null);
+                    DocIdSet dis = fastMatchFilter.GetDocIdSet(hits.Context, null);
                     if (dis == null)
                     {
                         // No documents match
@@ -108,7 +108,7 @@ namespace Lucene.Net.Facet.Range
                     bits = null;
                 }
 
-                DocIdSetIterator docs = hits.bits.GetIterator();
+                DocIdSetIterator docs = hits.Bits.GetIterator();
                 int doc;
                 while ((doc = docs.NextDoc()) != DocIdSetIterator.NO_MORE_DOCS)
                 {
@@ -129,12 +129,12 @@ namespace Lucene.Net.Facet.Range
                 }
             }
 
-            int x = counter.FillCounts(Counts);
+            int x = counter.FillCounts(counts);
 
             missingCount += x;
 
             //System.out.println("totCount " + totCount + " missingCount " + counter.missingCount);
-            TotCount -= missingCount;
+            totCount -= missingCount;
         }
     }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e0a73b45/src/Lucene.Net.Facet/Range/Range.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Range/Range.cs b/src/Lucene.Net.Facet/Range/Range.cs
index c207031..32ea724 100644
--- a/src/Lucene.Net.Facet/Range/Range.cs
+++ b/src/Lucene.Net.Facet/Range/Range.cs
@@ -30,7 +30,7 @@
 
         /// <summary>
         /// Label that identifies this range. </summary>
-        public readonly string Label;
+        public string Label { get; private set; }
 
         /// <summary>
         /// Sole constructor. </summary>

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e0a73b45/src/Lucene.Net.Facet/Range/RangeFacetCounts.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Range/RangeFacetCounts.cs b/src/Lucene.Net.Facet/Range/RangeFacetCounts.cs
index 53a4d26..e051712 100644
--- a/src/Lucene.Net.Facet/Range/RangeFacetCounts.cs
+++ b/src/Lucene.Net.Facet/Range/RangeFacetCounts.cs
@@ -30,11 +30,11 @@ namespace Lucene.Net.Facet.Range
     {
         /// <summary>
         /// Ranges passed to constructor. </summary>
-        protected internal readonly Range[] Ranges;
+        protected internal readonly Range[] ranges;
 
         /// <summary>
         /// Counts, initialized in by subclass. </summary>
-        protected internal readonly int[] Counts;
+        protected internal readonly int[] counts;
 
         /// <summary>
         /// Optional: if specified, we first test this Filter to
@@ -42,42 +42,42 @@ namespace Lucene.Net.Facet.Range
         ///  matching ranges.  If this is null, all documents are
         ///  checked. 
         /// </summary>
-        protected internal readonly Filter FastMatchFilter;
+        protected internal readonly Filter fastMatchFilter;
 
         /// <summary>
         /// Our field name. </summary>
-        protected internal readonly string Field;
+        protected internal readonly string field;
 
         /// <summary>
         /// Total number of hits. </summary>
-        protected internal int TotCount;
+        protected internal int totCount;
 
         /// <summary>
         /// Create {@code RangeFacetCounts} </summary>
         protected internal RangeFacetCounts(string field, Range[] ranges, Filter fastMatchFilter)
         {
-            this.Field = field;
-            this.Ranges = ranges;
-            this.FastMatchFilter = fastMatchFilter;
-            Counts = new int[ranges.Length];
+            this.field = field;
+            this.ranges = ranges;
+            this.fastMatchFilter = fastMatchFilter;
+            counts = new int[ranges.Length];
         }
 
         public override FacetResult GetTopChildren(int topN, string dim, params string[] path)
         {
-            if (dim.Equals(Field) == false)
+            if (dim.Equals(field) == false)
             {
-                throw new System.ArgumentException("invalid dim \"" + dim + "\"; should be \"" + Field + "\"");
+                throw new System.ArgumentException("invalid dim \"" + dim + "\"; should be \"" + field + "\"");
             }
             if (path.Length != 0)
             {
                 throw new System.ArgumentException("path.length should be 0");
             }
-            LabelAndValue[] labelValues = new LabelAndValue[Counts.Length];
-            for (int i = 0; i < Counts.Length; i++)
+            LabelAndValue[] labelValues = new LabelAndValue[counts.Length];
+            for (int i = 0; i < counts.Length; i++)
             {
-                labelValues[i] = new LabelAndValue(Ranges[i].Label, Counts[i]);
+                labelValues[i] = new LabelAndValue(ranges[i].Label, counts[i]);
             }
-            return new FacetResult(dim, path, TotCount, labelValues, labelValues.Length);
+            return new FacetResult(dim, path, totCount, labelValues, labelValues.Length);
         }
 
         public override float GetSpecificValue(string dim, params string[] path)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e0a73b45/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetCounts.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetCounts.cs b/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetCounts.cs
index 1039fff..057d274 100644
--- a/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetCounts.cs
+++ b/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetCounts.cs
@@ -172,13 +172,13 @@ namespace Lucene.Net.Facet.SortedSet
 
             foreach (FacetsCollector.MatchingDocs hits in matchingDocs)
             {
-                var reader = hits.context.AtomicReader;
+                var reader = hits.Context.AtomicReader;
                 //System.out.println("  reader=" + reader);
                 // LUCENE-5090: make sure the provided reader context "matches"
                 // the top-level reader passed to the
                 // SortedSetDocValuesReaderState, else cryptic
                 // AIOOBE can happen:
-                if (!Equals(ReaderUtil.GetTopLevelContext(hits.context).Reader, origReader))
+                if (!Equals(ReaderUtil.GetTopLevelContext(hits.Context).Reader, origReader))
                 {
                     throw new InvalidOperationException("the SortedSetDocValuesReaderState provided to this class does not match the reader being searched; you must create a new SortedSetDocValuesReaderState every time you open a new IndexReader");
                 }
@@ -189,7 +189,7 @@ namespace Lucene.Net.Facet.SortedSet
                     continue;
                 }
 
-                DocIdSetIterator docs = hits.bits.GetIterator();
+                DocIdSetIterator docs = hits.Bits.GetIterator();
 
                 // TODO: yet another option is to count all segs
                 // first, only in seg-ord space, and then do a
@@ -202,11 +202,11 @@ namespace Lucene.Net.Facet.SortedSet
                 // segs)
                 if (ordinalMap != null)
                 {
-                    int segOrd = hits.context.Ord;
+                    int segOrd = hits.Context.Ord;
 
                     int numSegOrds = (int)segValues.ValueCount;
 
-                    if (hits.totalHits < numSegOrds / 10)
+                    if (hits.TotalHits < numSegOrds / 10)
                     {
                         //System.out.println("    remap as-we-go");
                         // Remap every ord to global ord as we iterate:

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e0a73b45/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetField.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetField.cs b/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetField.cs
index 3729583..ba1f360 100644
--- a/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetField.cs
+++ b/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetField.cs
@@ -37,11 +37,11 @@
 
         /// <summary>
         /// Dimension. </summary>
-        public readonly string Dim;
+        public string Dim { get; private set; }
 
         /// <summary>
         /// Label. </summary>
-        public readonly string Label;
+        public string Label { get; private set; }
 
         /// <summary>
         /// Sole constructor. </summary>

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e0a73b45/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesReaderState.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesReaderState.cs b/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesReaderState.cs
index 2b3c2b1..f895210 100644
--- a/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesReaderState.cs
+++ b/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesReaderState.cs
@@ -51,10 +51,10 @@ namespace Lucene.Net.Facet.SortedSet
         {
             /// <summary>
             /// Start of range, inclusive: </summary>
-            public readonly int Start;
+            public int Start { get; private set; }
             /// <summary>
             /// End of range, inclusive: </summary>
-            public readonly int End;
+            public int End { get; private set; }
 
             /// <summary>
             /// Start and end are inclusive. </summary>

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e0a73b45/src/Lucene.Net.Facet/Taxonomy/AssociationFacetField.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/AssociationFacetField.cs b/src/Lucene.Net.Facet/Taxonomy/AssociationFacetField.cs
index 6092f9a..f2e70a7 100644
--- a/src/Lucene.Net.Facet/Taxonomy/AssociationFacetField.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/AssociationFacetField.cs
@@ -48,15 +48,15 @@ namespace Lucene.Net.Facet.Taxonomy
 
         /// <summary>
         /// Dimension for this field. </summary>
-        public readonly string dim;
+        public string Dim { get; private set; }
 
         /// <summary>
         /// Facet path for this field. </summary>
-        public readonly string[] path;
+        public string[] Path { get; private set; }
 
         /// <summary>
         /// Associated value. </summary>
-        public readonly BytesRef assoc;
+        public BytesRef Assoc { get; private set; }
 
         /// <summary>
         /// Creates this from {@code dim} and {@code path} and an
@@ -70,18 +70,18 @@ namespace Lucene.Net.Facet.Taxonomy
             {
                 FacetField.VerifyLabel(label);
             }
-            this.dim = dim;
-            this.assoc = assoc;
+            this.Dim = dim;
+            this.Assoc = assoc;
             if (path.Length == 0)
             {
                 throw new System.ArgumentException("path must have at least one element");
             }
-            this.path = path;
+            this.Path = path;
         }
 
         public override string ToString()
         {
-            return "AssociationFacetField(dim=" + dim + " path=" + Arrays.ToString(path) + " bytes=" + assoc + ")";
+            return "AssociationFacetField(dim=" + Dim + " path=" + Arrays.ToString(Path) + " bytes=" + Assoc + ")";
         }
     }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e0a73b45/src/Lucene.Net.Facet/Taxonomy/CachedOrdinalsReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/CachedOrdinalsReader.cs b/src/Lucene.Net.Facet/Taxonomy/CachedOrdinalsReader.cs
index 70bbd26..62ee95f 100644
--- a/src/Lucene.Net.Facet/Taxonomy/CachedOrdinalsReader.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/CachedOrdinalsReader.cs
@@ -115,9 +115,9 @@ namespace Lucene.Net.Facet.Taxonomy
 
             public override void Get(int docID, IntsRef ordinals)
             {
-                ordinals.Ints = cachedOrds.ordinals;
-                ordinals.Offset = cachedOrds.offsets[docID];
-                ordinals.Length = cachedOrds.offsets[docID + 1] - ordinals.Offset;
+                ordinals.Ints = cachedOrds.Ordinals;
+                ordinals.Offset = cachedOrds.Offsets[docID];
+                ordinals.Length = cachedOrds.Offsets[docID + 1] - ordinals.Offset;
             }
         }
 
@@ -127,11 +127,11 @@ namespace Lucene.Net.Facet.Taxonomy
         {
             /// <summary>
             /// Index into <seealso cref="#ordinals"/> for each document. </summary>
-            public readonly int[] offsets;
+            public int[] Offsets { get; private set; }
 
             /// <summary>
             /// Holds ords for all docs. </summary>
-            public readonly int[] ordinals;
+            public int[] Ordinals { get; private set; }
 
             /// <summary>
             /// Creates a new <seealso cref="CachedOrds"/> from the <seealso cref="BinaryDocValues"/>.
@@ -139,7 +139,7 @@ namespace Lucene.Net.Facet.Taxonomy
             /// </summary>
             public CachedOrds(OrdinalsSegmentReader source, int maxDoc)
             {
-                offsets = new int[maxDoc + 1];
+                Offsets = new int[maxDoc + 1];
                 int[] ords = new int[maxDoc]; // let's assume one ordinal per-document as an initial size
 
                 // this aggregator is limited to Integer.MAX_VALUE total ordinals.
@@ -147,7 +147,7 @@ namespace Lucene.Net.Facet.Taxonomy
                 IntsRef values = new IntsRef(32);
                 for (int docID = 0; docID < maxDoc; docID++)
                 {
-                    offsets[docID] = (int)totOrds;
+                    Offsets[docID] = (int)totOrds;
                     source.Get(docID, values);
                     long nextLength = totOrds + values.Length;
                     if (nextLength > ords.Length)
@@ -161,26 +161,26 @@ namespace Lucene.Net.Facet.Taxonomy
                     Array.Copy(values.Ints, 0, ords, (int)totOrds, values.Length);
                     totOrds = nextLength;
                 }
-                offsets[maxDoc] = (int)totOrds;
+                Offsets[maxDoc] = (int)totOrds;
 
                 // if ords array is bigger by more than 10% of what we really need, shrink it
                 if ((double)totOrds / ords.Length < 0.9)
                 {
-                    this.ordinals = new int[(int)totOrds];
-                    Array.Copy(ords, 0, this.ordinals, 0, (int)totOrds);
+                    this.Ordinals = new int[(int)totOrds];
+                    Array.Copy(ords, 0, this.Ordinals, 0, (int)totOrds);
                 }
                 else
                 {
-                    this.ordinals = ords;
+                    this.Ordinals = ords;
                 }
             }
 
             public long RamBytesUsed()
             {
-                long mem = RamUsageEstimator.ShallowSizeOf(this) + RamUsageEstimator.SizeOf(offsets);
-                if (offsets != ordinals)
+                long mem = RamUsageEstimator.ShallowSizeOf(this) + RamUsageEstimator.SizeOf(Offsets);
+                if (Offsets != Ordinals)
                 {
-                    mem += RamUsageEstimator.SizeOf(ordinals);
+                    mem += RamUsageEstimator.SizeOf(Ordinals);
                 }
                 return mem;
             }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e0a73b45/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs b/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs
index d00c50a..f168e87 100644
--- a/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs
@@ -40,17 +40,17 @@ namespace Lucene.Net.Facet.Taxonomy
         /// <seealso cref="#subpath(int)"/>, therefore you should traverse the array up to
         /// <seealso cref="#length"/> for this path's components.
         /// </summary>
-        public readonly string[] components;
+        public string[] Components { get; private set; }
 
         /// <summary>
         /// The number of components of this <seealso cref="CategoryPath"/>. </summary>
-        public readonly int length;
+        public int Length { get; private set; }
 
         // Used by singleton EMPTY
         private CategoryPath()
         {
-            components = null;
-            length = 0;
+            Components = null;
+            Length = 0;
         }
 
         // Used by subpath
@@ -59,9 +59,9 @@ namespace Lucene.Net.Facet.Taxonomy
             // while the code which calls this method is safe, at some point a test
             // tripped on AIOOBE in toString, but we failed to reproduce. adding the
             // assert as a safety check.
-            Debug.Assert(prefixLen > 0 && prefixLen <= copyFrom.components.Length, "prefixLen cannot be negative nor larger than the given components' length: prefixLen=" + prefixLen + " components.length=" + copyFrom.components.Length);
-            this.components = copyFrom.components;
-            length = prefixLen;
+            Debug.Assert(prefixLen > 0 && prefixLen <= copyFrom.Components.Length, "prefixLen cannot be negative nor larger than the given components' length: prefixLen=" + prefixLen + " components.length=" + copyFrom.Components.Length);
+            this.Components = copyFrom.Components;
+            Length = prefixLen;
         }
 
         /// <summary>
@@ -76,8 +76,8 @@ namespace Lucene.Net.Facet.Taxonomy
                     throw new System.ArgumentException("empty or null components not allowed: " + Arrays.ToString(components));
                 }
             }
-            this.components = components;
-            length = components.Length;
+            this.Components = components;
+            Length = components.Length;
         }
 
         /// <summary>
@@ -87,8 +87,8 @@ namespace Lucene.Net.Facet.Taxonomy
             string[] comps = pathString.Split(new[] { delimiter }, StringSplitOptions.RemoveEmptyEntries);
             if (comps.Length == 1 && comps[0].Length == 0)
             {
-                components = null;
-                length = 0;
+                Components = null;
+                Length = 0;
             }
             else
             {
@@ -99,8 +99,8 @@ namespace Lucene.Net.Facet.Taxonomy
                         throw new System.ArgumentException("empty or null components not allowed: " + Arrays.ToString(comps));
                     }
                 }
-                components = comps;
-                length = components.Length;
+                Components = comps;
+                Length = Components.Length;
             }
         }
 
@@ -111,17 +111,17 @@ namespace Lucene.Net.Facet.Taxonomy
         /// </summary>
         public virtual int FullPathLength()
         {
-            if (length == 0)
+            if (Length == 0)
             {
                 return 0;
             }
 
             int charsNeeded = 0;
-            for (int i = 0; i < length; i++)
+            for (int i = 0; i < Length; i++)
             {
-                charsNeeded += components[i].Length;
+                charsNeeded += Components[i].Length;
             }
-            charsNeeded += length - 1; // num delimter chars
+            charsNeeded += Length - 1; // num delimter chars
             return charsNeeded;
         }
 
@@ -131,10 +131,10 @@ namespace Lucene.Net.Facet.Taxonomy
         /// </summary>
         public virtual int CompareTo(CategoryPath other)
         {
-            int len = length < other.length ? length : other.length;
+            int len = Length < other.Length ? Length : other.Length;
             for (int i = 0, j = 0; i < len; i++, j++)
             {
-                int cmp = components[i].CompareTo(other.components[j]);
+                int cmp = Components[i].CompareTo(other.Components[j]);
                 if (cmp < 0) // this is 'before'
                 {
                     return -1;
@@ -146,7 +146,7 @@ namespace Lucene.Net.Facet.Taxonomy
             }
 
             // one is a prefix of the other
-            return length - other.length;
+            return Length - other.Length;
         }
 
         private void HasDelimiter(string offender, char delimiter)
@@ -179,25 +179,25 @@ namespace Lucene.Net.Facet.Taxonomy
         /// </summary>
         public virtual int CopyFullPath(char[] buf, int start, char delimiter)
         {
-            if (length == 0)
+            if (Length == 0)
             {
                 return 0;
             }
 
             int idx = start;
-            int upto = length - 1;
+            int upto = Length - 1;
             for (int i = 0; i < upto; i++)
             {
-                int len = components[i].Length;
-                components[i].CopyTo(0, buf, idx, len - 0);
+                int len = Components[i].Length;
+                Components[i].CopyTo(0, buf, idx, len - 0);
                 NoDelimiter(buf, idx, len, delimiter);
                 idx += len;
                 buf[idx++] = delimiter;
             }
-            components[upto].CopyTo(0, buf, idx, components[upto].Length - 0);
-            NoDelimiter(buf, idx, components[upto].Length, delimiter);
+            Components[upto].CopyTo(0, buf, idx, Components[upto].Length - 0);
+            NoDelimiter(buf, idx, Components[upto].Length, delimiter);
 
-            return idx + components[upto].Length - start;
+            return idx + Components[upto].Length - start;
         }
 
         public override bool Equals(object obj)
@@ -208,16 +208,16 @@ namespace Lucene.Net.Facet.Taxonomy
             }
 
             CategoryPath other = (CategoryPath)obj;
-            if (length != other.length)
+            if (Length != other.Length)
             {
                 return false; // not same length, cannot be equal
             }
 
             // CategoryPaths are more likely to differ at the last components, so start
             // from last-first
-            for (int i = length - 1; i >= 0; i--)
+            for (int i = Length - 1; i >= 0; i--)
             {
-                if (!components[i].Equals(other.components[i]))
+                if (!Components[i].Equals(other.Components[i]))
                 {
                     return false;
                 }
@@ -227,15 +227,15 @@ namespace Lucene.Net.Facet.Taxonomy
 
         public override int GetHashCode()
         {
-            if (length == 0)
+            if (Length == 0)
             {
                 return 0;
             }
 
-            int hash = length;
-            for (int i = 0; i < length; i++)
+            int hash = Length;
+            for (int i = 0; i < Length; i++)
             {
-                hash = hash * 31 + components[i].GetHashCode();
+                hash = hash * 31 + Components[i].GetHashCode();
             }
             return hash;
         }
@@ -244,15 +244,15 @@ namespace Lucene.Net.Facet.Taxonomy
         /// Calculate a 64-bit hash function for this path. </summary>
         public virtual long LongHashCode()
         {
-            if (length == 0)
+            if (Length == 0)
             {
                 return 0;
             }
 
-            long hash = length;
-            for (int i = 0; i < length; i++)
+            long hash = Length;
+            for (int i = 0; i < Length; i++)
             {
-                hash = hash * 65599 + components[i].GetHashCode();
+                hash = hash * 65599 + Components[i].GetHashCode();
             }
             return hash;
         }
@@ -261,7 +261,7 @@ namespace Lucene.Net.Facet.Taxonomy
         /// Returns a sub-path of this path up to {@code length} components. </summary>
         public virtual CategoryPath Subpath(int length)
         {
-            if (length >= this.length || length < 0)
+            if (length >= this.Length || length < 0)
             {
                 return this;
             }
@@ -291,19 +291,19 @@ namespace Lucene.Net.Facet.Taxonomy
         /// </summary>
         public virtual string ToString(char delimiter)
         {
-            if (length == 0)
+            if (Length == 0)
             {
                 return "";
             }
 
             StringBuilder sb = new StringBuilder();
-            for (int i = 0; i < length; i++)
+            for (int i = 0; i < Length; i++)
             {
-                if (components[i].IndexOf(delimiter) != -1)
+                if (Components[i].IndexOf(delimiter) != -1)
                 {
-                    HasDelimiter(components[i], delimiter);
+                    HasDelimiter(Components[i], delimiter);
                 }
-                sb.Append(components[i]).Append(delimiter);
+                sb.Append(Components[i]).Append(delimiter);
             }
             sb.Length = sb.Length - 1; // remove last delimiter
             return sb.ToString();

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e0a73b45/src/Lucene.Net.Facet/Taxonomy/FacetLabel.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/FacetLabel.cs b/src/Lucene.Net.Facet/Taxonomy/FacetLabel.cs
index 94eb2d6..26dc493 100644
--- a/src/Lucene.Net.Facet/Taxonomy/FacetLabel.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/FacetLabel.cs
@@ -50,11 +50,11 @@ namespace Lucene.Net.Facet.Taxonomy
         /// <seealso cref="#subpath(int)"/>, therefore you should traverse the array up to
         /// <seealso cref="#length"/> for this path's components.
         /// </summary>
-        public readonly string[] Components;
+        public string[] Components { get; private set; }
 
         /// <summary>
         /// The number of components of this <seealso cref="FacetLabel"/>. </summary>
-        public readonly int Length;
+        public int Length { get; private set; }
 
         // Used by subpath
         private FacetLabel(FacetLabel copyFrom, int prefixLen)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e0a73b45/src/Lucene.Net.Facet/Taxonomy/FastTaxonomyFacetCounts.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/FastTaxonomyFacetCounts.cs b/src/Lucene.Net.Facet/Taxonomy/FastTaxonomyFacetCounts.cs
index f54b70b..e303394 100644
--- a/src/Lucene.Net.Facet/Taxonomy/FastTaxonomyFacetCounts.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/FastTaxonomyFacetCounts.cs
@@ -58,13 +58,13 @@ namespace Lucene.Net.Facet.Taxonomy
         {
             foreach (FacetsCollector.MatchingDocs hits in matchingDocs)
             {
-                BinaryDocValues dv = hits.context.AtomicReader.GetBinaryDocValues(IndexFieldName);
+                BinaryDocValues dv = hits.Context.AtomicReader.GetBinaryDocValues(indexFieldName);
                 if (dv == null) // this reader does not have DocValues for the requested category list
                 {
                     continue;
                 }
 
-                DocIdSetIterator docs = hits.bits.GetIterator();
+                DocIdSetIterator docs = hits.Bits.GetIterator();
 
                 int doc;
                 BytesRef bytesRef = new BytesRef();

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e0a73b45/src/Lucene.Net.Facet/Taxonomy/FloatAssociationFacetField.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/FloatAssociationFacetField.cs b/src/Lucene.Net.Facet/Taxonomy/FloatAssociationFacetField.cs
index 8b47430..1b070a6 100644
--- a/src/Lucene.Net.Facet/Taxonomy/FloatAssociationFacetField.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/FloatAssociationFacetField.cs
@@ -58,8 +58,8 @@ namespace Lucene.Net.Facet.Taxonomy
 
         public override string ToString()
         {
-            return "FloatAssociationFacetField(dim=" + dim + " path=" + Arrays.ToString(path) + 
-                " value=" + BytesRefToFloat(assoc).ToString("0.0#####", CultureInfo.InvariantCulture) + ")";
+            return "FloatAssociationFacetField(dim=" + Dim + " path=" + Arrays.ToString(Path) + 
+                " value=" + BytesRefToFloat(Assoc).ToString("0.0#####", CultureInfo.InvariantCulture) + ")";
         }
     }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e0a73b45/src/Lucene.Net.Facet/Taxonomy/FloatTaxonomyFacets.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/FloatTaxonomyFacets.cs b/src/Lucene.Net.Facet/Taxonomy/FloatTaxonomyFacets.cs
index e0b3688..f2e79a5 100644
--- a/src/Lucene.Net.Facet/Taxonomy/FloatTaxonomyFacets.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/FloatTaxonomyFacets.cs
@@ -46,15 +46,15 @@ namespace Lucene.Net.Facet.Taxonomy
         protected virtual void Rollup()
         {
             // Rollup any necessary dims:
-            foreach (KeyValuePair<string, FacetsConfig.DimConfig> ent in Config.DimConfigs)
+            foreach (KeyValuePair<string, FacetsConfig.DimConfig> ent in config.DimConfigs)
             {
                 string dim = ent.Key;
                 FacetsConfig.DimConfig ft = ent.Value;
                 if (ft.Hierarchical && ft.MultiValued == false)
                 {
-                    int dimRootOrd = TaxoReader.GetOrdinal(new FacetLabel(dim));
+                    int dimRootOrd = taxoReader.GetOrdinal(new FacetLabel(dim));
                     Debug.Assert(dimRootOrd > 0);
-                    values[dimRootOrd] += Rollup(Children[dimRootOrd]);
+                    values[dimRootOrd] += Rollup(children[dimRootOrd]);
                 }
             }
         }
@@ -64,10 +64,10 @@ namespace Lucene.Net.Facet.Taxonomy
             float sum = 0;
             while (ord != TaxonomyReader.INVALID_ORDINAL)
             {
-                float childValue = values[ord] + Rollup(Children[ord]);
+                float childValue = values[ord] + Rollup(children[ord]);
                 values[ord] = childValue;
                 sum += childValue;
-                ord = Siblings[ord];
+                ord = siblings[ord];
             }
             return sum;
         }
@@ -90,7 +90,7 @@ namespace Lucene.Net.Facet.Taxonomy
                     throw new System.ArgumentException("cannot return dimension-level value alone; use getTopChildren instead");
                 }
             }
-            int ord = TaxoReader.GetOrdinal(new FacetLabel(dim, path));
+            int ord = taxoReader.GetOrdinal(new FacetLabel(dim, path));
             if (ord < 0)
             {
                 return -1;
@@ -106,16 +106,16 @@ namespace Lucene.Net.Facet.Taxonomy
             }
             FacetsConfig.DimConfig dimConfig = VerifyDim(dim);
             FacetLabel cp = new FacetLabel(dim, path);
-            int dimOrd = TaxoReader.GetOrdinal(cp);
+            int dimOrd = taxoReader.GetOrdinal(cp);
             if (dimOrd == -1)
             {
                 return null;
             }
 
-            TopOrdAndFloatQueue q = new TopOrdAndFloatQueue(Math.Min(TaxoReader.Size, topN));
+            TopOrdAndFloatQueue q = new TopOrdAndFloatQueue(Math.Min(taxoReader.Size, topN));
             float bottomValue = 0;
 
-            int ord = Children[dimOrd];
+            int ord = children[dimOrd];
             float sumValues = 0;
             int childCount = 0;
 
@@ -132,17 +132,17 @@ namespace Lucene.Net.Facet.Taxonomy
                         {
                             reuse = new TopOrdAndFloatQueue.OrdAndValue();
                         }
-                        reuse.ord = ord;
-                        reuse.value = values[ord];
+                        reuse.Ord = ord;
+                        reuse.Value = values[ord];
                         reuse = q.InsertWithOverflow(reuse);
                         if (q.Size() == topN)
                         {
-                            bottomValue = q.Top().value;
+                            bottomValue = q.Top().Value;
                         }
                     }
                 }
 
-                ord = Siblings[ord];
+                ord = siblings[ord];
             }
 
             if (sumValues == 0)
@@ -171,8 +171,8 @@ namespace Lucene.Net.Facet.Taxonomy
             for (int i = labelValues.Length - 1; i >= 0; i--)
             {
                 TopOrdAndFloatQueue.OrdAndValue ordAndValue = q.Pop();
-                FacetLabel child = TaxoReader.GetPath(ordAndValue.ord);
-                labelValues[i] = new LabelAndValue(child.Components[cp.Length], ordAndValue.value);
+                FacetLabel child = taxoReader.GetPath(ordAndValue.Ord);
+                labelValues[i] = new LabelAndValue(child.Components[cp.Length], ordAndValue.Value);
             }
 
             return new FacetResult(dim, path, sumValues, labelValues, childCount);

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e0a73b45/src/Lucene.Net.Facet/Taxonomy/IntAssociationFacetField.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/IntAssociationFacetField.cs b/src/Lucene.Net.Facet/Taxonomy/IntAssociationFacetField.cs
index 1c854fd..e7b56d7 100644
--- a/src/Lucene.Net.Facet/Taxonomy/IntAssociationFacetField.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/IntAssociationFacetField.cs
@@ -67,7 +67,7 @@ namespace Lucene.Net.Facet.Taxonomy
 
         public override string ToString()
         {
-            return "IntAssociationFacetField(dim=" + dim + " path=" + Arrays.ToString(path) + " value=" + BytesRefToInt(assoc) + ")";
+            return "IntAssociationFacetField(dim=" + Dim + " path=" + Arrays.ToString(Path) + " value=" + BytesRefToInt(Assoc) + ")";
         }
     }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e0a73b45/src/Lucene.Net.Facet/Taxonomy/IntTaxonomyFacets.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/IntTaxonomyFacets.cs b/src/Lucene.Net.Facet/Taxonomy/IntTaxonomyFacets.cs
index 11f76f0..c9a789b 100644
--- a/src/Lucene.Net.Facet/Taxonomy/IntTaxonomyFacets.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/IntTaxonomyFacets.cs
@@ -46,18 +46,18 @@ namespace Lucene.Net.Facet.Taxonomy
         protected virtual void Rollup()
         {
             // Rollup any necessary dims:
-            foreach (KeyValuePair<string, FacetsConfig.DimConfig> ent in Config.DimConfigs)
+            foreach (KeyValuePair<string, FacetsConfig.DimConfig> ent in config.DimConfigs)
             {
                 string dim = ent.Key;
                 FacetsConfig.DimConfig ft = ent.Value;
                 if (ft.Hierarchical && ft.MultiValued == false)
                 {
-                    int dimRootOrd = TaxoReader.GetOrdinal(new FacetLabel(dim));
+                    int dimRootOrd = taxoReader.GetOrdinal(new FacetLabel(dim));
                     // It can be -1 if this field was declared in the
                     // config but never indexed:
                     if (dimRootOrd > 0)
                     {
-                        values[dimRootOrd] += Rollup(Children[dimRootOrd]);
+                        values[dimRootOrd] += Rollup(children[dimRootOrd]);
                     }
                 }
             }
@@ -68,10 +68,10 @@ namespace Lucene.Net.Facet.Taxonomy
             int sum = 0;
             while (ord != TaxonomyReader.INVALID_ORDINAL)
             {
-                int childValue = values[ord] + Rollup(Children[ord]);
+                int childValue = values[ord] + Rollup(children[ord]);
                 values[ord] = childValue;
                 sum += childValue;
-                ord = Siblings[ord];
+                ord = siblings[ord];
             }
             return sum;
         }
@@ -94,7 +94,7 @@ namespace Lucene.Net.Facet.Taxonomy
                     throw new System.ArgumentException("cannot return dimension-level value alone; use getTopChildren instead");
                 }
             }
-            int ord = TaxoReader.GetOrdinal(new FacetLabel(dim, path));
+            int ord = taxoReader.GetOrdinal(new FacetLabel(dim, path));
             if (ord < 0)
             {
                 return -1;
@@ -110,17 +110,17 @@ namespace Lucene.Net.Facet.Taxonomy
             }
             var dimConfig = VerifyDim(dim);
             FacetLabel cp = new FacetLabel(dim, path);
-            int dimOrd = TaxoReader.GetOrdinal(cp);
+            int dimOrd = taxoReader.GetOrdinal(cp);
             if (dimOrd == -1)
             {
                 return null;
             }
 
-            TopOrdAndIntQueue q = new TopOrdAndIntQueue(Math.Min(TaxoReader.Size, topN));
+            TopOrdAndIntQueue q = new TopOrdAndIntQueue(Math.Min(taxoReader.Size, topN));
 
             int bottomValue = 0;
 
-            int ord = Children[dimOrd];
+            int ord = children[dimOrd];
             int totValue = 0;
             int childCount = 0;
 
@@ -147,7 +147,7 @@ namespace Lucene.Net.Facet.Taxonomy
                     }
                 }
 
-                ord = Siblings[ord];
+                ord = siblings[ord];
             }
 
             if (totValue == 0)
@@ -176,7 +176,7 @@ namespace Lucene.Net.Facet.Taxonomy
             for (int i = labelValues.Length - 1; i >= 0; i--)
             {
                 TopOrdAndIntQueue.OrdAndValue ordAndValue = q.Pop();
-                FacetLabel child = TaxoReader.GetPath(ordAndValue.Ord);
+                FacetLabel child = taxoReader.GetPath(ordAndValue.Ord);
                 labelValues[i] = new LabelAndValue(child.Components[cp.Length], ordAndValue.Value);
             }
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e0a73b45/src/Lucene.Net.Facet/Taxonomy/LRUHashMap.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/LRUHashMap.cs b/src/Lucene.Net.Facet/Taxonomy/LRUHashMap.cs
index 5d51036..1f7883f 100644
--- a/src/Lucene.Net.Facet/Taxonomy/LRUHashMap.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/LRUHashMap.cs
@@ -95,20 +95,20 @@ namespace Lucene.Net.Facet.Taxonomy
                 if (cache.TryGetValue(key, out cdo))
                 {
                     // Item already exists, update our last access time
-                    cdo.Timestamp = GetTimestamp();
+                    cdo.timestamp = GetTimestamp();
                 }
                 else
                 {
                     cache[key] = new CacheDataObject
                     {
-                        Value = value,
-                        Timestamp = GetTimestamp()
+                        value = value,
+                        timestamp = GetTimestamp()
                     };
                     // We have added a new item, so we may need to remove the eldest
                     if (cache.Count > MaxSize)
                     {
                         // Remove the eldest item (lowest timestamp) from the cache
-                        cache.Remove(cache.OrderBy(x => x.Value.Timestamp).First().Key);
+                        cache.Remove(cache.OrderBy(x => x.Value.timestamp).First().Key);
                     }
                 }
             }
@@ -123,9 +123,9 @@ namespace Lucene.Net.Facet.Taxonomy
                 if (cache.TryGetValue(key, out cdo))
                 {
                     // Write our last access time
-                    cdo.Timestamp = GetTimestamp();
+                    cdo.timestamp = GetTimestamp();
 
-                    return cdo.Value;
+                    return cdo.value;
                 }
             }
             return null;
@@ -139,8 +139,8 @@ namespace Lucene.Net.Facet.Taxonomy
                 if (cache.TryGetValue(key, out cdo))
                 {
                     // Write our last access time
-                    cdo.Timestamp = GetTimestamp();
-                    value = cdo.Value;
+                    cdo.timestamp = GetTimestamp();
+                    value = cdo.value;
 
                     return true;
                 }
@@ -184,12 +184,12 @@ namespace Lucene.Net.Facet.Taxonomy
         private class CacheDataObject
         {
             // Ticks representing the last access time
-            public long Timestamp;
-            public TValue Value;
+            public long timestamp;
+            public TValue value;
 
             public override string ToString()
             {
-                return "Last Access: " + Timestamp.ToString() + " - " + Value.ToString();
+                return "Last Access: " + timestamp.ToString() + " - " + value.ToString();
             }
         }
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e0a73b45/src/Lucene.Net.Facet/Taxonomy/SearcherTaxonomyManager.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/SearcherTaxonomyManager.cs b/src/Lucene.Net.Facet/Taxonomy/SearcherTaxonomyManager.cs
index 6686188..2c8edb3 100644
--- a/src/Lucene.Net.Facet/Taxonomy/SearcherTaxonomyManager.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/SearcherTaxonomyManager.cs
@@ -44,24 +44,24 @@ namespace Lucene.Net.Facet.Taxonomy
     {
         /// <summary>
         /// Holds a matched pair of <seealso cref="IndexSearcher"/> and
-        ///  <seealso cref="TaxonomyReader"/> 
+        ///  <seealso cref="Taxonomy.TaxonomyReader"/> 
         /// </summary>
         public class SearcherAndTaxonomy
         {
             /// <summary>
             /// Point-in-time <seealso cref="IndexSearcher"/>. </summary>
-            public readonly IndexSearcher searcher;
+            public IndexSearcher Searcher { get; private set; }
 
             /// <summary>
             /// Matching point-in-time <seealso cref="DirectoryTaxonomyReader"/>. </summary>
-            public readonly DirectoryTaxonomyReader taxonomyReader;
+            public DirectoryTaxonomyReader TaxonomyReader { get; private set; }
 
             /// <summary>
             /// Create a SearcherAndTaxonomy </summary>
             public SearcherAndTaxonomy(IndexSearcher searcher, DirectoryTaxonomyReader taxonomyReader)
             {
-                this.searcher = searcher;
-                this.taxonomyReader = taxonomyReader;
+                this.Searcher = searcher;
+                this.TaxonomyReader = taxonomyReader;
             }
         }
 
@@ -113,7 +113,7 @@ namespace Lucene.Net.Facet.Taxonomy
 
         protected override void DecRef(SearcherAndTaxonomy @ref)
         {
-            @ref.searcher.IndexReader.DecRef();
+            @ref.Searcher.IndexReader.DecRef();
 
             // This decRef can fail, and then in theory we should
             // tryIncRef the searcher to put back the ref count
@@ -122,20 +122,20 @@ namespace Lucene.Net.Facet.Taxonomy
             // during close, in which case 2) very likely the
             // searcher was also just closed by the above decRef and
             // a tryIncRef would fail:
-            @ref.taxonomyReader.DecRef();
+            @ref.TaxonomyReader.DecRef();
         }
 
         protected override bool TryIncRef(SearcherAndTaxonomy @ref)
         {
-            if (@ref.searcher.IndexReader.TryIncRef())
+            if (@ref.Searcher.IndexReader.TryIncRef())
             {
-                if (@ref.taxonomyReader.TryIncRef())
+                if (@ref.TaxonomyReader.TryIncRef())
                 {
                     return true;
                 }
                 else
                 {
-                    @ref.searcher.IndexReader.DecRef();
+                    @ref.Searcher.IndexReader.DecRef();
                 }
             }
             return false;
@@ -146,7 +146,7 @@ namespace Lucene.Net.Facet.Taxonomy
             // Must re-open searcher first, otherwise we may get a
             // new reader that references ords not yet known to the
             // taxonomy reader:
-            IndexReader r = @ref.searcher.IndexReader;
+            IndexReader r = @ref.Searcher.IndexReader;
             IndexReader newReader = DirectoryReader.OpenIfChanged((DirectoryReader)r);
             if (newReader == null)
             {
@@ -154,11 +154,11 @@ namespace Lucene.Net.Facet.Taxonomy
             }
             else
             {
-                var tr = TaxonomyReader.OpenIfChanged(@ref.taxonomyReader);
+                var tr = TaxonomyReader.OpenIfChanged(@ref.TaxonomyReader);
                 if (tr == null)
                 {
-                    @ref.taxonomyReader.IncRef();
-                    tr = @ref.taxonomyReader;
+                    @ref.TaxonomyReader.IncRef();
+                    tr = @ref.TaxonomyReader;
                 }
                 else if (taxoWriter != null && taxoWriter.TaxonomyEpoch != taxoEpoch)
                 {
@@ -172,7 +172,7 @@ namespace Lucene.Net.Facet.Taxonomy
 
         protected override int GetRefCount(SearcherAndTaxonomy reference)
         {
-            return reference.searcher.IndexReader.RefCount;
+            return reference.Searcher.IndexReader.RefCount;
         }
     }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e0a73b45/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetCounts.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetCounts.cs b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetCounts.cs
index af008c7..059822c 100644
--- a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetCounts.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetCounts.cs
@@ -53,8 +53,8 @@ namespace Lucene.Net.Facet.Taxonomy
             IntsRef scratch = new IntsRef();
             foreach (FacetsCollector.MatchingDocs hits in matchingDocs)
             {
-                OrdinalsReader.OrdinalsSegmentReader ords = ordinalsReader.GetReader(hits.context);
-                DocIdSetIterator docs = hits.bits.GetIterator();
+                OrdinalsReader.OrdinalsSegmentReader ords = ordinalsReader.GetReader(hits.Context);
+                DocIdSetIterator docs = hits.Bits.GetIterator();
 
                 int doc;
                 while ((doc = docs.NextDoc()) != DocIdSetIterator.NO_MORE_DOCS)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e0a73b45/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumFloatAssociations.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumFloatAssociations.cs b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumFloatAssociations.cs
index 821c942..b943004 100644
--- a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumFloatAssociations.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumFloatAssociations.cs
@@ -58,13 +58,13 @@ namespace Lucene.Net.Facet.Taxonomy
             //System.out.println("count matchingDocs=" + matchingDocs + " facetsField=" + facetsFieldName);
             foreach (FacetsCollector.MatchingDocs hits in matchingDocs)
             {
-                BinaryDocValues dv = hits.context.AtomicReader.GetBinaryDocValues(IndexFieldName);
+                BinaryDocValues dv = hits.Context.AtomicReader.GetBinaryDocValues(indexFieldName);
                 if (dv == null) // this reader does not have DocValues for the requested category list
                 {
                     continue;
                 }
 
-                DocIdSetIterator docs = hits.bits.GetIterator();
+                DocIdSetIterator docs = hits.Bits.GetIterator();
 
                 int doc;
                 while ((doc = docs.NextDoc()) != DocIdSetIterator.NO_MORE_DOCS)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e0a73b45/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumIntAssociations.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumIntAssociations.cs b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumIntAssociations.cs
index 8d835fb..d053dfe 100644
--- a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumIntAssociations.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumIntAssociations.cs
@@ -57,13 +57,13 @@ namespace Lucene.Net.Facet.Taxonomy
             //System.out.println("count matchingDocs=" + matchingDocs + " facetsField=" + facetsFieldName);
             foreach (FacetsCollector.MatchingDocs hits in matchingDocs)
             {
-                BinaryDocValues dv = hits.context.AtomicReader.GetBinaryDocValues(IndexFieldName);
+                BinaryDocValues dv = hits.Context.AtomicReader.GetBinaryDocValues(indexFieldName);
                 if (dv == null) // this reader does not have DocValues for the requested category list
                 {
                     continue;
                 }
 
-                DocIdSetIterator docs = hits.bits.GetIterator();
+                DocIdSetIterator docs = hits.Bits.GetIterator();
 
                 int doc;
                 while ((doc = docs.NextDoc()) != DocIdSetIterator.NO_MORE_DOCS)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e0a73b45/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumValueSource.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumValueSource.cs b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumValueSource.cs
index 4981f33..2ae0f49 100644
--- a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumValueSource.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumValueSource.cs
@@ -129,13 +129,13 @@ namespace Lucene.Net.Facet.Taxonomy
             IntsRef scratch = new IntsRef();
             foreach (MatchingDocs hits in matchingDocs)
             {
-                OrdinalsReader.OrdinalsSegmentReader ords = ordinalsReader.GetReader(hits.context);
+                OrdinalsReader.OrdinalsSegmentReader ords = ordinalsReader.GetReader(hits.Context);
 
                 int scoresIdx = 0;
-                float[] scores = hits.scores;
+                float[] scores = hits.Scores;
 
-                FunctionValues functionValues = valueSource.GetValues(context, hits.context);
-                DocIdSetIterator docs = hits.bits.GetIterator();
+                FunctionValues functionValues = valueSource.GetValues(context, hits.Context);
+                DocIdSetIterator docs = hits.Bits.GetIterator();
 
                 int doc;
                 while ((doc = docs.NextDoc()) != DocIdSetIterator.NO_MORE_DOCS)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e0a73b45/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacets.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacets.cs b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacets.cs
index 03baa55..c0ae758 100644
--- a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacets.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacets.cs
@@ -54,39 +54,39 @@ namespace Lucene.Net.Facet.Taxonomy
 
         /// <summary>
         /// Index field name provided to the constructor. </summary>
-        protected readonly string IndexFieldName;
+        protected readonly string indexFieldName;
 
         /// <summary>
         /// {@code TaxonomyReader} provided to the constructor. </summary>
-        protected readonly TaxonomyReader TaxoReader;
+        protected readonly TaxonomyReader taxoReader;
 
         /// <summary>
         /// {@code FacetsConfig} provided to the constructor. </summary>
-        protected readonly FacetsConfig Config;
+        protected readonly FacetsConfig config;
 
         /// <summary>
         /// Maps parent ordinal to its child, or -1 if the parent
         ///  is childless. 
         /// </summary>
-        protected readonly int[] Children;
+        protected readonly int[] children;
 
         /// <summary>
         /// Maps an ordinal to its sibling, or -1 if there is no
         ///  sibling. 
         /// </summary>
-        protected readonly int[] Siblings;
+        protected readonly int[] siblings;
 
         /// <summary>
         /// Sole constructor. 
         /// </summary>
         protected internal TaxonomyFacets(string indexFieldName, TaxonomyReader taxoReader, FacetsConfig config)
         {
-            this.IndexFieldName = indexFieldName;
-            this.TaxoReader = taxoReader;
-            this.Config = config;
+            this.indexFieldName = indexFieldName;
+            this.taxoReader = taxoReader;
+            this.config = config;
             ParallelTaxonomyArrays pta = taxoReader.ParallelTaxonomyArrays;
-            Children = pta.Children();
-            Siblings = pta.Siblings();
+            children = pta.Children();
+            siblings = pta.Siblings();
         }
 
         /// <summary>
@@ -96,23 +96,23 @@ namespace Lucene.Net.Facet.Taxonomy
         /// </summary>
         protected internal virtual DimConfig VerifyDim(string dim)
         {
-            DimConfig dimConfig = Config.GetDimConfig(dim);
-            if (!dimConfig.IndexFieldName.Equals(IndexFieldName))
+            DimConfig dimConfig = config.GetDimConfig(dim);
+            if (!dimConfig.IndexFieldName.Equals(indexFieldName))
             {
-                throw new System.ArgumentException("dimension \"" + dim + "\" was not indexed into field \"" + IndexFieldName);
+                throw new System.ArgumentException("dimension \"" + dim + "\" was not indexed into field \"" + indexFieldName);
             }
             return dimConfig;
         }
 
         public override IList<FacetResult> GetAllDims(int topN)
         {
-            int ord = Children[TaxonomyReader.ROOT_ORDINAL];
+            int ord = children[TaxonomyReader.ROOT_ORDINAL];
             IList<FacetResult> results = new List<FacetResult>();
             while (ord != TaxonomyReader.INVALID_ORDINAL)
             {
-                string dim = TaxoReader.GetPath(ord).Components[0];
-                DimConfig dimConfig = Config.GetDimConfig(dim);
-                if (dimConfig.IndexFieldName.Equals(IndexFieldName))
+                string dim = taxoReader.GetPath(ord).Components[0];
+                DimConfig dimConfig = config.GetDimConfig(dim);
+                if (dimConfig.IndexFieldName.Equals(indexFieldName))
                 {
                     FacetResult result = GetTopChildren(topN, dim);
                     if (result != null)
@@ -120,7 +120,7 @@ namespace Lucene.Net.Facet.Taxonomy
                         results.Add(result);
                     }
                 }
-                ord = Siblings[ord];
+                ord = siblings[ord];
             }
 
             // Sort by highest value, tie break by dim:

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e0a73b45/src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs b/src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs
index f303abb..f94ebe4 100644
--- a/src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs
@@ -133,7 +133,7 @@ namespace Lucene.Net.Facet.Taxonomy
             return newTaxoReader;
         }
 
-        private volatile bool Closed = false;
+        private volatile bool closed = false;
 
         // set refCount to 1 at start
         private readonly AtomicInteger refCount = new AtomicInteger(1);
@@ -168,10 +168,10 @@ namespace Lucene.Net.Facet.Taxonomy
             {
                 lock (this)
                 {
-                    if (!Closed)
+                    if (!closed)
                     {
                         DecRef();
-                        Closed = true;
+                        closed = true;
                     }
                 }
             }
@@ -192,7 +192,7 @@ namespace Lucene.Net.Facet.Taxonomy
                 try
                 {
                     DoClose();
-                    Closed = true;
+                    closed = true;
                     success = true;
                 }
                 finally


[36/46] lucenenet git commit: Renamed Facet.Taxonomy.ITaxonomyWriter.Size to Count (.NETified)

Posted by sy...@apache.org.
Renamed Facet.Taxonomy.ITaxonomyWriter.Size to Count (.NETified)


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/64750067
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/64750067
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/64750067

Branch: refs/heads/master
Commit: 64750067f5b462403d2069e8e7edd1ddcf5df992
Parents: bcb5d13
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Sep 25 17:27:09 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Mon Oct 3 23:32:01 2016 +0700

----------------------------------------------------------------------
 .../Directory/DirectoryTaxonomyWriter.cs        |  2 +-
 src/Lucene.Net.Facet/Taxonomy/TaxonomyWriter.cs |  2 +-
 .../Directory/TestDirectoryTaxonomyWriter.cs    |  2 +-
 .../Taxonomy/TestSearcherTaxonomyManager.cs     |  4 +--
 .../Taxonomy/TestTaxonomyCombined.cs            | 30 ++++++++++----------
 5 files changed, 20 insertions(+), 20 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/64750067/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs
index 9a7e4f9..dd497fd 100644
--- a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs
@@ -723,7 +723,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
             }
         }
 
-        public virtual int Size
+        public virtual int Count
         {
             get
             {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/64750067/src/Lucene.Net.Facet/Taxonomy/TaxonomyWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/TaxonomyWriter.cs b/src/Lucene.Net.Facet/Taxonomy/TaxonomyWriter.cs
index 6358dc1..fdec22f 100644
--- a/src/Lucene.Net.Facet/Taxonomy/TaxonomyWriter.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/TaxonomyWriter.cs
@@ -102,7 +102,7 @@ namespace Lucene.Net.Facet.Taxonomy
         /// are also added automatically (including the root, which always get
         /// ordinal 0).
         /// </summary>
-        int Size { get; }
+        int Count { get; }
 
         /// <summary>
         /// Sets the commit user data map. That method is considered a transaction and

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/64750067/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestDirectoryTaxonomyWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestDirectoryTaxonomyWriter.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestDirectoryTaxonomyWriter.cs
index bd32d53..4e6dd85 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestDirectoryTaxonomyWriter.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestDirectoryTaxonomyWriter.cs
@@ -419,7 +419,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
 
             // LUCENE-4633: make sure that category "a" is not added again in any case
             taxoWriter.AddTaxonomy(input, new MemoryOrdinalMap());
-            Assert.AreEqual(2, taxoWriter.Size, "no categories should have been added"); // root + 'a'
+            Assert.AreEqual(2, taxoWriter.Count, "no categories should have been added"); // root + 'a'
             Assert.AreEqual(ordA, taxoWriter.AddCategory(new FacetLabel("a")), "category 'a' received new ordinal?");
 
             // add the same category again -- it should not receive the same ordinal !

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/64750067/src/Lucene.Net.Tests.Facet/Taxonomy/TestSearcherTaxonomyManager.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/TestSearcherTaxonomyManager.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/TestSearcherTaxonomyManager.cs
index 257ec1b..9e5e7b6 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/TestSearcherTaxonomyManager.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/TestSearcherTaxonomyManager.cs
@@ -114,10 +114,10 @@ namespace Lucene.Net.Facet.Taxonomy
 
                         if (VERBOSE)
                         {
-                            Console.WriteLine("TW size=" + tw.Size + " vs " + ordLimit);
+                            Console.WriteLine("TW size=" + tw.Count + " vs " + ordLimit);
                         }
 
-                        if (tw.Size >= ordLimit)
+                        if (tw.Count >= ordLimit)
                         {
                             break;
                         }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/64750067/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyCombined.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyCombined.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyCombined.cs
index 45eeb3b..e205877 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyCombined.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyCombined.cs
@@ -163,7 +163,7 @@ namespace Lucene.Net.Facet.Taxonomy
             FillTaxonomy(tw);
             // Also check ITaxonomyWriter.getSize() - see that the taxonomy's size
             // is what we expect it to be.
-            Assert.AreEqual(ExpectedCategories.Length, tw.Size);
+            Assert.AreEqual(ExpectedCategories.Length, tw.Count);
             tw.Dispose();
             indexDir.Dispose();
         }
@@ -185,7 +185,7 @@ namespace Lucene.Net.Facet.Taxonomy
             FillTaxonomy(tw);
             // Let's check the number of categories again, to see that no
             // extraneous categories were created:
-            Assert.AreEqual(ExpectedCategories.Length, tw.Size);
+            Assert.AreEqual(ExpectedCategories.Length, tw.Count);
             tw.Dispose();
             indexDir.Dispose();
         }
@@ -210,7 +210,7 @@ namespace Lucene.Net.Facet.Taxonomy
             // ones, and that the number of categories hasn't grown by the new
             // additions
             FillTaxonomy(tw);
-            Assert.AreEqual(ExpectedCategories.Length, tw.Size);
+            Assert.AreEqual(ExpectedCategories.Length, tw.Count);
             tw.Dispose();
             indexDir.Dispose();
         }
@@ -243,7 +243,7 @@ namespace Lucene.Net.Facet.Taxonomy
             // not be open (as explained above) but because it was not set to null,
             // we forgot that, tried to reopen it, and got an AlreadyClosedException.
             tw.Commit();
-            Assert.AreEqual(ExpectedCategories.Length + 1, tw.Size);
+            Assert.AreEqual(ExpectedCategories.Length + 1, tw.Count);
             tw.Dispose();
             indexDir.Dispose();
         }
@@ -259,37 +259,37 @@ namespace Lucene.Net.Facet.Taxonomy
         {
             var indexDir = NewDirectory();
             var tw = new DirectoryTaxonomyWriter(indexDir);
-            Assert.AreEqual(1, tw.Size); // the root only
+            Assert.AreEqual(1, tw.Count); // the root only
             // Test that adding a new top-level category works
             Assert.AreEqual(1, tw.AddCategory(new FacetLabel("a")));
-            Assert.AreEqual(2, tw.Size);
+            Assert.AreEqual(2, tw.Count);
             // Test that adding the same category again is noticed, and the
             // same ordinal (and not a new one) is returned.
             Assert.AreEqual(1, tw.AddCategory(new FacetLabel("a")));
-            Assert.AreEqual(2, tw.Size);
+            Assert.AreEqual(2, tw.Count);
             // Test that adding another top-level category returns a new ordinal,
             // not the same one
             Assert.AreEqual(2, tw.AddCategory(new FacetLabel("b")));
-            Assert.AreEqual(3, tw.Size);
+            Assert.AreEqual(3, tw.Count);
             // Test that adding a category inside one of the above adds just one
             // new ordinal:
             Assert.AreEqual(3, tw.AddCategory(new FacetLabel("a", "c")));
-            Assert.AreEqual(4, tw.Size);
+            Assert.AreEqual(4, tw.Count);
             // Test that adding the same second-level category doesn't do anything:
             Assert.AreEqual(3, tw.AddCategory(new FacetLabel("a", "c")));
-            Assert.AreEqual(4, tw.Size);
+            Assert.AreEqual(4, tw.Count);
             // Test that adding a second-level category with two new components
             // indeed adds two categories
             Assert.AreEqual(5, tw.AddCategory(new FacetLabel("d", "e")));
-            Assert.AreEqual(6, tw.Size);
+            Assert.AreEqual(6, tw.Count);
             // Verify that the parents were added above in the order we expected
             Assert.AreEqual(4, tw.AddCategory(new FacetLabel("d")));
             // Similar, but inside a category that already exists:
             Assert.AreEqual(7, tw.AddCategory(new FacetLabel("b", "d", "e")));
-            Assert.AreEqual(8, tw.Size);
+            Assert.AreEqual(8, tw.Count);
             // And now inside two levels of categories that already exist:
             Assert.AreEqual(8, tw.AddCategory(new FacetLabel("b", "d", "f")));
-            Assert.AreEqual(9, tw.Size);
+            Assert.AreEqual(9, tw.Count);
 
             tw.Dispose();
             indexDir.Dispose();
@@ -307,7 +307,7 @@ namespace Lucene.Net.Facet.Taxonomy
             var tw = new DirectoryTaxonomyWriter(indexDir);
             // right after opening the index, it should already contain the
             // root, so have size 1:
-            Assert.AreEqual(1, tw.Size);
+            Assert.AreEqual(1, tw.Count);
             tw.Dispose();
             var tr = new DirectoryTaxonomyReader(indexDir);
             Assert.AreEqual(1, tr.Count);
@@ -1124,7 +1124,7 @@ namespace Lucene.Net.Facet.Taxonomy
             FillTaxonomyCheckPaths(tw);
             // Also check ITaxonomyWriter.getSize() - see that the taxonomy's size
             // is what we expect it to be.
-            Assert.AreEqual(ExpectedCategories.Length, tw.Size);
+            Assert.AreEqual(ExpectedCategories.Length, tw.Count);
             tw.Dispose();
             indexDir.Dispose();
         }


[04/46] lucenenet git commit: Finished port of Facet.Taxonomy.WriterCache.TestCompactLabelToOrdinal and fixed bugs with FacetLabel hash code, CategoryPathUtils.EqualsToSerialized(), CharBlockArray.SubSequence() and CompactLabelToOrdinal.Open() to make th

Posted by sy...@apache.org.
Finished port of Facet.Taxonomy.WriterCache.TestCompactLabelToOrdinal and fixed bugs with FacetLabel hash code, CategoryPathUtils.EqualsToSerialized(), CharBlockArray.SubSequence() and CompactLabelToOrdinal.Open() to make the tests pass. Added an extra non-random test to make debugging easier.


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/08dfc1bd
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/08dfc1bd
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/08dfc1bd

Branch: refs/heads/master
Commit: 08dfc1bd0e84712eed5d4d34dd629408998cd196
Parents: 861aa73
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sat Sep 24 16:32:20 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Mon Oct 3 23:30:38 2016 +0700

----------------------------------------------------------------------
 src/Lucene.Net.Core/Support/StreamUtils.cs      |  21 +--
 src/Lucene.Net.Facet/Taxonomy/FacetLabel.cs     |   7 +-
 .../Taxonomy/WriterCache/CategoryPathUtils.cs   |   9 +-
 .../Taxonomy/WriterCache/CharBlockArray.cs      |  12 +-
 .../WriterCache/CompactLabelToOrdinal.cs        |  16 +-
 .../Taxonomy/WriterCache/TestCharBlockArray.cs  |   4 +-
 .../WriterCache/TestCompactLabelToOrdinal.cs    | 183 ++++++++++++++++---
 7 files changed, 189 insertions(+), 63 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/08dfc1bd/src/Lucene.Net.Core/Support/StreamUtils.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Support/StreamUtils.cs b/src/Lucene.Net.Core/Support/StreamUtils.cs
index 7e05717..e6cd549 100644
--- a/src/Lucene.Net.Core/Support/StreamUtils.cs
+++ b/src/Lucene.Net.Core/Support/StreamUtils.cs
@@ -1,11 +1,5 @@
-\ufeffusing System;
-using System.Collections.Generic;
-using System.IO;
-using System.Linq;
-using System.Runtime.Serialization;
+\ufeffusing System.IO;
 using System.Runtime.Serialization.Formatters.Binary;
-using System.Text;
-using System.Threading.Tasks;
 
 namespace Lucene.Net.Support
 {
@@ -15,24 +9,23 @@ namespace Lucene.Net.Support
 
         public static void SerializeToStream(object o, Stream outputStream)
         {
-            // LUCENENET TODO: It would probably be better to serialize to
-            // XML so this works across .NET framework versions or alternatively
-            // find/create an alternative binary formatter implementation that works that way.
             Formatter.Serialize(outputStream, o);
         }
 
+        public static void SerializeToStream(object o, BinaryWriter writer)
+        {
+            Formatter.Serialize(writer.BaseStream, o);
+        }
+
         public static object DeserializeFromStream(Stream stream)
         {
-            stream.Seek(0, SeekOrigin.Begin);
             object o = Formatter.Deserialize(stream);
             return o;
         }
 
         public static object DeserializeFromStream(BinaryReader reader)
         {
-            var stream = reader.BaseStream;
-            stream.Seek(0, SeekOrigin.Begin);
-            object o = Formatter.Deserialize(stream);
+            object o = Formatter.Deserialize(reader.BaseStream);
             return o;
         }
     }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/08dfc1bd/src/Lucene.Net.Facet/Taxonomy/FacetLabel.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/FacetLabel.cs b/src/Lucene.Net.Facet/Taxonomy/FacetLabel.cs
index a448d08..9a6884c 100644
--- a/src/Lucene.Net.Facet/Taxonomy/FacetLabel.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/FacetLabel.cs
@@ -162,7 +162,12 @@ namespace Lucene.Net.Facet.Taxonomy
                 return 0;
             }
 
-            return Arrays.GetHashCode(Components);
+            int hash = Length;
+            for (int i = 0; i < Length; i++)
+            {
+                hash = hash * 31 + Components[i].GetHashCode();
+            }
+            return hash;
         }
 
         /// <summary>

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/08dfc1bd/src/Lucene.Net.Facet/Taxonomy/WriterCache/CategoryPathUtils.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CategoryPathUtils.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CategoryPathUtils.cs
index 24b0fa2..eb63f56 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CategoryPathUtils.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CategoryPathUtils.cs
@@ -1,6 +1,7 @@
-\ufeffnamespace Lucene.Net.Facet.Taxonomy.WriterCache
-{
+\ufeffusing System;
 
+namespace Lucene.Net.Facet.Taxonomy.WriterCache
+{
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -85,7 +86,7 @@
                     return false;
                 }
 
-                if (!cp.Components[i].Equals(charBlockArray.SubSequence(offset, offset + len)))
+                if (!cp.Components[i].Equals(charBlockArray.SubSequence(offset, offset + len), StringComparison.Ordinal))
                 {
                     return false;
                 }
@@ -93,7 +94,5 @@
             }
             return true;
         }
-
     }
-
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/08dfc1bd/src/Lucene.Net.Facet/Taxonomy/WriterCache/CharBlockArray.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CharBlockArray.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CharBlockArray.cs
index a38329b..c1c9825 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CharBlockArray.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CharBlockArray.cs
@@ -163,7 +163,7 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
                 {
                     toCopy = remainingInBlock;
                 }
-                s.CopyTo(offset, this.current.chars, this.current.length, offset + toCopy - offset);
+                s.CopyTo(offset, this.current.chars, this.current.length, toCopy);
                 offset += toCopy;
                 remain -= toCopy;
                 this.current.length += toCopy;
@@ -187,7 +187,7 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
             return b.chars[IndexInBlock(index)];
         }
 
-        public ICharSequence SubSequence(int start, int end)
+        public string SubSequence(int start, int end)
         {
             int remaining = end - start;
             StringBuilder sb = new StringBuilder(remaining);
@@ -201,11 +201,13 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
                 remaining -= numToAppend;
                 indexInBlock = 0; // 2nd+ iterations read from start of the block
             }
-            return new StringCharSequenceWrapper(sb.ToString());
+            return sb.ToString();
         }
 
-
-
+        ICharSequence ICharSequence.SubSequence(int start, int end)
+        {
+            return new StringCharSequenceWrapper(this.SubSequence(start, end));
+        }
 
         public override string ToString()
         {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/08dfc1bd/src/Lucene.Net.Facet/Taxonomy/WriterCache/CompactLabelToOrdinal.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CompactLabelToOrdinal.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CompactLabelToOrdinal.cs
index f717fb1..d0cebe2 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CompactLabelToOrdinal.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CompactLabelToOrdinal.cs
@@ -1,11 +1,9 @@
 \ufeffusing System;
-using System.Collections.Generic;
 using System.IO;
-using Lucene.Net.Store;
+using System.Runtime.Serialization;
 
 namespace Lucene.Net.Facet.Taxonomy.WriterCache
 {
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -23,7 +21,6 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
      * limitations under the License.
      */
 
-
     /// <summary>
     /// This is a very efficient LabelToOrdinal implementation that uses a
     /// CharBlockArray to store all labels and a configurable number of HashArrays to
@@ -396,7 +393,7 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
         /// Opens the file and reloads the CompactLabelToOrdinal. The file it expects
         /// is generated from the <seealso cref="#flush(File)"/> command.
         /// </summary>
-        internal static CompactLabelToOrdinal Open(string file, float loadFactor, int numHashArrays)
+        internal static CompactLabelToOrdinal Open(FileInfo file, float loadFactor, int numHashArrays)
         {
             /// <summary>
             /// Part of the file is the labelRepository, which needs to be rehashed
@@ -411,7 +408,7 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
             BinaryReader dis = null;
             try
             {
-                dis = new BinaryReader(new FileStream(file,FileMode.Open,FileAccess.Read));
+                dis = new BinaryReader(new FileStream(file.FullName, FileMode.Open, FileAccess.Read));
 
                 // TaxiReader needs to load the "counter" or occupancy (L2O) to know
                 // the next unique facet. we used to load the delimiter too, but
@@ -465,9 +462,9 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
                 }
 
             }
-            catch (DllNotFoundException)
+            catch (SerializationException se)
             {
-                throw new IOException("Invalid file format. Cannot deserialize.");
+                throw new IOException("Invalid file format. Cannot deserialize.", se);
             }
             finally
             {
@@ -482,7 +479,7 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
 
         }
 
-        internal virtual void Flush(FileStream stream)
+        internal virtual void Flush(Stream stream)
         {
             using (BinaryWriter dos = new BinaryWriter(stream))
             {
@@ -508,5 +505,4 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
             }
         }
     }
-
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/08dfc1bd/src/Lucene.Net.Tests.Facet/Taxonomy/WriterCache/TestCharBlockArray.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/WriterCache/TestCharBlockArray.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/WriterCache/TestCharBlockArray.cs
index f059a79..a763f80 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/WriterCache/TestCharBlockArray.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/WriterCache/TestCharBlockArray.cs
@@ -76,13 +76,13 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
 
             DirectoryInfo tempDir = CreateTempDir("growingchararray");
             FileInfo f = new FileInfo(Path.Combine(tempDir.FullName, "GrowingCharArrayTest.tmp"));
-            using (Stream @out = new FileStream(f.FullName, FileMode.OpenOrCreate, FileAccess.Write))
+            using (var @out = new FileStream(f.FullName, FileMode.OpenOrCreate, FileAccess.Write))
             {
                 array.Flush(@out);
                 @out.Flush();
             }
 
-            using (Stream @in = new FileStream(f.FullName, FileMode.Open, FileAccess.Read))
+            using (var @in = new FileStream(f.FullName, FileMode.Open, FileAccess.Read))
             {
                 array = CharBlockArray.Open(@in);
                 AssertEqualsInternal("GrowingCharArray<->StringBuilder mismatch after flush/load.", builder, array);

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/08dfc1bd/src/Lucene.Net.Tests.Facet/Taxonomy/WriterCache/TestCompactLabelToOrdinal.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/WriterCache/TestCompactLabelToOrdinal.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/WriterCache/TestCompactLabelToOrdinal.cs
index 549bf09..eafdd62 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/WriterCache/TestCompactLabelToOrdinal.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/WriterCache/TestCompactLabelToOrdinal.cs
@@ -1,14 +1,12 @@
-\ufeffusing System;
+\ufeffusing NUnit.Framework;
+using System;
 using System.Collections.Generic;
-using Lucene.Net.Support;
-using NUnit.Framework;
+using System.IO;
+using System.Text;
+using System.Text.RegularExpressions;
 
 namespace Lucene.Net.Facet.Taxonomy.WriterCache
 {
-
-
-    using TestUtil = Lucene.Net.Util.TestUtil;
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -25,11 +23,11 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
      * See the License for the specific language governing permissions and
      * limitations under the License.
      */
+
     [TestFixture]
     public class TestCompactLabelToOrdinal : FacetTestCase
     {
-        /* not finished to porting yet because of missing decoder implementation */
-        /*
+        [Test]
         public virtual void TestL2O()
         {
             LabelToOrdinal map = new LabelToOrdinalMap();
@@ -43,18 +41,17 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
             byte[] buffer = new byte[50];
 
             Random random = Random();
-            for (int i = 0; i < numUniqueValues; )
+            for (int i = 0; i < numUniqueValues;)
             {
                 random.NextBytes(buffer);
                 int size = 1 + random.Next(buffer.Length);
 
                 // This test is turning random bytes into a string,
                 // this is asking for trouble.
-                CharsetDecoder decoder = StandardCharsets.UTF_8.newDecoder().onUnmappableCharacter(CodingErrorAction.REPLACE).onMalformedInput(CodingErrorAction.REPLACE);
-                uniqueValues[i] = decoder.decode(ByteBuffer.Wrap(buffer, 0, size)).ToString();
+                uniqueValues[i] = Encoding.UTF8.GetString(buffer, 0, size);
                 // we cannot have empty path components, so eliminate all prefix as well
                 // as middle consecutive delimiter chars.
-                uniqueValues[i] = uniqueValues[i].replaceAll("/+", "/");
+                uniqueValues[i] = Regex.Replace(uniqueValues[i], "/+", "/");
                 if (uniqueValues[i].StartsWith("/", StringComparison.Ordinal))
                 {
                     uniqueValues[i] = uniqueValues[i].Substring(1);
@@ -66,16 +63,21 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
             }
 
             var tmpDir = CreateTempDir("testLableToOrdinal");
-            var f = new File(tmpDir, "CompactLabelToOrdinalTest.tmp");
+            var f = new FileInfo(Path.Combine(tmpDir.FullName, "CompactLabelToOrdinalTest.tmp"));
             int flushInterval = 10;
 
             for (int i = 0; i < n; i++)
             {
                 if (i > 0 && i % flushInterval == 0)
                 {
-                    compact.Flush(f);
-                    compact = CompactLabelToOrdinal.open(f, 0.15f, 3);
-                    Assert.True(f.delete());
+                    using (var fileStream = new FileStream(f.FullName, FileMode.OpenOrCreate, FileAccess.ReadWrite))
+                    {
+                        compact.Flush(fileStream);
+                    }
+                    compact = CompactLabelToOrdinal.Open(f, 0.15f, 3);
+                    //assertTrue(f.Delete());
+                    f.Delete();
+                    assertFalse(File.Exists(f.FullName));
                     if (flushInterval < (n / 10))
                     {
                         flushInterval *= 10;
@@ -97,7 +99,12 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
                 int ord1 = map.GetOrdinal(label);
                 int ord2 = compact.GetOrdinal(label);
 
-                Assert.AreEqual(ord1, ord2);
+                if (VERBOSE)
+                {
+                    Console.WriteLine("Testing label: " + label.ToString());
+                }
+
+                assertEquals(ord1, ord2);
 
                 if (ord1 == LabelToOrdinal.INVALID_ORDINAL)
                 {
@@ -121,13 +128,136 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
                 }
                 int ord1 = map.GetOrdinal(label);
                 int ord2 = compact.GetOrdinal(label);
-                Assert.AreEqual(ord1, ord2);
+
+                if (VERBOSE)
+                {
+                    Console.WriteLine("Testing label 2: " + label.ToString());
+                }
+
+                assertEquals(ord1, ord2);
+            }
+        }
+
+        /// <summary>
+        /// LUCENENET specific test similar to TestL2O without any randomness, useful for debugging
+        /// </summary>
+        [Test]
+        public virtual void TestL2OBasic()
+        {
+            LabelToOrdinal map = new LabelToOrdinalMap();
+
+            CompactLabelToOrdinal compact = new CompactLabelToOrdinal(200, 0.15f, 3);
+
+            int n = 50;
+
+            string[] uniqueValues = new string[]
+            {
+                @"\ufffd",
+                @"\ufffdr\ufffdG\ufffd\ufffdF\ufffd\u0382\ufffd7\u0019\ufffdh\ufffd\u0015\ufffd\ufffd\ufffd#\u001d3\r{\ufffd\ufffdq\ufffd_\ufffd\ufffd\ufffd\u0502\ufffd\ufffd\ufffd\ufffd\ufffd\ufffd",
+                "foo bar one",
+                new string(new char[] { (char)65533, (char)65533, (char)65, (char)65533, (char)45, (char)106, (char)40, (char)643, (char)65533, (char)11, (char)65533, (char)88, (char)65533, (char)78, (char)126, (char)56, (char)12, (char)71 }),
+                "foo bar two",
+                "foo bar three",
+                "foo bar four",
+                "foo bar five",
+                "foo bar six",
+                "foo bar seven",
+                "foo bar eight",
+                "foo bar nine",
+                "foo bar ten",
+                "foo/bar/one",
+                "foo/bar/two",
+                "foo/bar/three",
+                "foo/bar/four",
+                "foo/bar/five",
+                "foo/bar/six",
+                "foo/bar/seven",
+                "foo/bar/eight",
+                "foo/bar/nine",
+                "foo/bar/ten",
+                ""
+            };
+
+            var tmpDir = CreateTempDir("testLableToOrdinal");
+            var f = new FileInfo(Path.Combine(tmpDir.FullName, "CompactLabelToOrdinalTest.tmp"));
+            int flushInterval = 10;
+
+            for (int i = 0; i < n; i++)
+            {
+                if (i > 0 && i % flushInterval == 0)
+                {
+                    using (var fileStream = new FileStream(f.FullName, FileMode.OpenOrCreate, FileAccess.ReadWrite))
+                    {
+                        compact.Flush(fileStream);
+                    }
+                    compact = CompactLabelToOrdinal.Open(f, 0.15f, 3);
+                    //assertTrue(f.Delete());
+                    f.Delete();
+                    assertFalse(File.Exists(f.FullName));
+                    if (flushInterval < (n / 10))
+                    {
+                        flushInterval *= 10;
+                    }
+                }
+
+                FacetLabel label = new FacetLabel();
+                foreach (string s in uniqueValues)
+                {
+                    if (s.Length == 0)
+                    {
+                        label = new FacetLabel();
+                    }
+                    else
+                    {
+                        label = new FacetLabel(s.Split("/".ToCharArray(), StringSplitOptions.RemoveEmptyEntries));
+                    }
+
+                    int ord1 = map.GetOrdinal(label);
+                    int ord2 = compact.GetOrdinal(label);
+
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("Testing label: " + label.ToString());
+                    }
+
+                    assertEquals(ord1, ord2);
+
+                    if (ord1 == LabelToOrdinal.INVALID_ORDINAL)
+                    {
+                        ord1 = compact.NextOrdinal;
+                        map.AddLabel(label, ord1);
+                        compact.AddLabel(label, ord1);
+                    }
+                }
+            }
+
+            for (int i = 0; i < uniqueValues.Length; i++)
+            {
+                FacetLabel label;
+                string s = uniqueValues[i];
+                if (s.Length == 0)
+                {
+                    label = new FacetLabel();
+                }
+                else
+                {
+                    label = new FacetLabel(s.Split("/".ToCharArray(), StringSplitOptions.RemoveEmptyEntries));
+                }
+                int ord1 = map.GetOrdinal(label);
+                int ord2 = compact.GetOrdinal(label);
+
+                if (VERBOSE)
+                {
+                    Console.WriteLine("Testing label 2: " + label.ToString());
+                }
+
+                assertEquals(ord1, ord2);
             }
         }
 
         private class LabelToOrdinalMap : LabelToOrdinal
         {
-            internal IDictionary<FacetLabel, int?> map = new Dictionary<FacetLabel, int?>();
+            internal IDictionary<FacetLabel, int> map = new Dictionary<FacetLabel, int>();
 
             internal LabelToOrdinalMap()
             {
@@ -140,12 +270,13 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
 
             public override int GetOrdinal(FacetLabel label)
             {
-                int? value = map[label];
-                return (value != null) ? (int)value : LabelToOrdinal.INVALID_ORDINAL;
+                int value;
+                if (map.TryGetValue(label, out value))
+                {
+                    return value;
+                }
+                return LabelToOrdinal.INVALID_ORDINAL;
             }
-
-        } */
-
+        } 
     }
-
 }
\ No newline at end of file


[10/46] lucenenet git commit: Fixed bug in Facet.Taxonomy.TestTaxonomyFacetCounts.TestBasic() where the output wasn't being written to because of a missing StreamWriter.

Posted by sy...@apache.org.
Fixed bug in Facet.Taxonomy.TestTaxonomyFacetCounts.TestBasic() where the output wasn't being written to because of a missing StreamWriter.


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/2b718fb4
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/2b718fb4
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/2b718fb4

Branch: refs/heads/master
Commit: 2b718fb47402f644b304c2e1c6ce6e617d08327d
Parents: e9302a8
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sat Sep 24 22:42:56 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Mon Oct 3 23:30:54 2016 +0700

----------------------------------------------------------------------
 .../Taxonomy/TestTaxonomyFacetCounts.cs               | 14 +++++++++++---
 1 file changed, 11 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/2b718fb4/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetCounts.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetCounts.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetCounts.cs
index e4ddc51..51d0794 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetCounts.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetCounts.cs
@@ -50,6 +50,8 @@ namespace Lucene.Net.Facet.Taxonomy
     using Directory = Lucene.Net.Store.Directory;
     using IOUtils = Lucene.Net.Util.IOUtils;
     using TestUtil = Lucene.Net.Util.TestUtil;
+    using System.Text;
+
     [TestFixture]
     public class TestTaxonomyFacetCounts : FacetTestCase
     {
@@ -128,9 +130,15 @@ namespace Lucene.Net.Facet.Taxonomy
             Assert.Null(facets.GetTopChildren(10, "Non exitent dim"));
 
             // Smoke test PrintTaxonomyStats:
-            ByteArrayOutputStream bos = new ByteArrayOutputStream();
-            PrintTaxonomyStats.PrintStats(taxoReader, Console.Out, true);
-            string result = bos.ToString();
+            string result;
+            using (ByteArrayOutputStream bos = new ByteArrayOutputStream())
+            {
+                using (StreamWriter w = new StreamWriter(bos, Encoding.UTF8, 2048, true) { AutoFlush = true })
+                {
+                    PrintTaxonomyStats.PrintStats(taxoReader, w, true);
+                }
+                result = bos.ToString();
+            }
             Assert.True(result.IndexOf("/Author: 4 immediate children; 5 total categories", StringComparison.Ordinal) != -1);
             Assert.True(result.IndexOf("/Publish Date: 3 immediate children; 12 total categories", StringComparison.Ordinal) != -1);
             // Make sure at least a few nodes of the tree came out:


[24/46] lucenenet git commit: Facets: Return List rather than IList to prevent having to do too many O(n) operations on the results.

Posted by sy...@apache.org.
Facets: Return List<T> rather than IList<T> to prevent having to do too many O(n) operations on the results.


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/f50f913d
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/f50f913d
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/f50f913d

Branch: refs/heads/master
Commit: f50f913d565ff8d1e90285c8a8af239dbae313f2
Parents: 9912999
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Sep 25 15:10:27 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Mon Oct 3 23:31:29 2016 +0700

----------------------------------------------------------------------
 src/Lucene.Net.Facet/Facets.cs                                 | 2 +-
 src/Lucene.Net.Facet/MultiFacets.cs                            | 4 ++--
 src/Lucene.Net.Facet/Range/RangeFacetCounts.cs                 | 4 ++--
 .../SortedSet/SortedSetDocValuesFacetCounts.cs                 | 6 +++---
 src/Lucene.Net.Facet/Taxonomy/TaxonomyFacets.cs                | 6 +++---
 5 files changed, 11 insertions(+), 11 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/f50f913d/src/Lucene.Net.Facet/Facets.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Facets.cs b/src/Lucene.Net.Facet/Facets.cs
index 405d1b3..ce17c15 100644
--- a/src/Lucene.Net.Facet/Facets.cs
+++ b/src/Lucene.Net.Facet/Facets.cs
@@ -53,6 +53,6 @@ namespace Lucene.Net.Facet
         ///  different dimensions were indexed, for example
         ///  depending on the type of document. 
         /// </summary>
-        public abstract IList<FacetResult> GetAllDims(int topN);
+        public abstract List<FacetResult> GetAllDims(int topN);
     }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/f50f913d/src/Lucene.Net.Facet/MultiFacets.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/MultiFacets.cs b/src/Lucene.Net.Facet/MultiFacets.cs
index 051de44..79bb077 100644
--- a/src/Lucene.Net.Facet/MultiFacets.cs
+++ b/src/Lucene.Net.Facet/MultiFacets.cs
@@ -67,10 +67,10 @@ namespace Lucene.Net.Facet
             return facets.GetSpecificValue(dim, path);
         }
 
-        public override IList<FacetResult> GetAllDims(int topN)
+        public override List<FacetResult> GetAllDims(int topN)
         {
 
-            IList<FacetResult> results = new List<FacetResult>();
+            List<FacetResult> results = new List<FacetResult>();
 
             // First add the specific dim's facets:
             foreach (KeyValuePair<string, Facets> ent in dimToFacets)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/f50f913d/src/Lucene.Net.Facet/Range/RangeFacetCounts.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Range/RangeFacetCounts.cs b/src/Lucene.Net.Facet/Range/RangeFacetCounts.cs
index e051712..943f4e1 100644
--- a/src/Lucene.Net.Facet/Range/RangeFacetCounts.cs
+++ b/src/Lucene.Net.Facet/Range/RangeFacetCounts.cs
@@ -86,9 +86,9 @@ namespace Lucene.Net.Facet.Range
             throw new System.NotSupportedException();
         }
 
-        public override IList<FacetResult> GetAllDims(int topN)
+        public override List<FacetResult> GetAllDims(int topN)
         {
-            return new[] { GetTopChildren(topN, null) };
+            return new List<FacetResult> { GetTopChildren(topN, null) };
         }
     }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/f50f913d/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetCounts.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetCounts.cs b/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetCounts.cs
index 95ee9d1..b32d430 100644
--- a/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetCounts.cs
+++ b/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetCounts.cs
@@ -291,7 +291,7 @@ namespace Lucene.Net.Facet.SortedSet
             return counts[ord];
         }
 
-        public override IList<FacetResult> GetAllDims(int topN)
+        public override List<FacetResult> GetAllDims(int topN)
         {
             IList<FacetResult> results = new List<FacetResult>();
             foreach (KeyValuePair<string, OrdRange> ent in state.PrefixToOrdRange)
@@ -303,9 +303,9 @@ namespace Lucene.Net.Facet.SortedSet
                 }
             }
 
-            var resultArray = results.ToArray();
+            var resultArray = results.ToList();
             // Sort by highest count:
-            Array.Sort(resultArray, new ComparatorAnonymousInnerClassHelper(this));
+            resultArray.Sort(new ComparatorAnonymousInnerClassHelper(this));
             return resultArray;
         }
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/f50f913d/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacets.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacets.cs b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacets.cs
index 1a53994..51a073f 100644
--- a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacets.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacets.cs
@@ -105,7 +105,7 @@ namespace Lucene.Net.Facet.Taxonomy
             return dimConfig;
         }
 
-        public override IList<FacetResult> GetAllDims(int topN)
+        public override List<FacetResult> GetAllDims(int topN)
         {
             int ord = children[TaxonomyReader.ROOT_ORDINAL];
             IList<FacetResult> results = new List<FacetResult>();
@@ -125,8 +125,8 @@ namespace Lucene.Net.Facet.Taxonomy
             }
 
             // Sort by highest value, tie break by dim:
-            var resultArray = results.ToArray();
-            Array.Sort(resultArray, BY_VALUE_THEN_DIM);
+            var resultArray = results.ToList();
+            resultArray.Sort(BY_VALUE_THEN_DIM);
             return resultArray;
         }
     }


[30/46] lucenenet git commit: Changed Facet.Taxonomy.WriterCache.CompactLabelsToOrdinal.SizeOfMap to a property

Posted by sy...@apache.org.
Changed Facet.Taxonomy.WriterCache.CompactLabelsToOrdinal.SizeOfMap to a property


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/c083a05e
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/c083a05e
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/c083a05e

Branch: refs/heads/master
Commit: c083a05ec594c660e8e98698cc97210f510d1930
Parents: e0d070b
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Sep 25 15:47:36 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Mon Oct 3 23:31:46 2016 +0700

----------------------------------------------------------------------
 .../Taxonomy/WriterCache/CompactLabelToOrdinal.cs             | 7 +++++--
 1 file changed, 5 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/c083a05e/src/Lucene.Net.Facet/Taxonomy/WriterCache/CompactLabelToOrdinal.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CompactLabelToOrdinal.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CompactLabelToOrdinal.cs
index 2ba69a5..35d3ee5 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CompactLabelToOrdinal.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CompactLabelToOrdinal.cs
@@ -67,9 +67,12 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
 
         /// <summary>
         /// How many labels. </summary>
-        public virtual int SizeOfMap()
+        public virtual int SizeOfMap
         {
-            return this.collisionMap.Count;
+            get
+            {
+                return this.collisionMap.Count;
+            }
         }
 
         private CompactLabelToOrdinal()


[44/46] lucenenet git commit: Added CSharpTest.Net.Collections.LurchTable to our Support namespace and modified the DirectoryTaxonomyReader and NameIntCacheLRU to utilize it.

Posted by sy...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/36cde063/src/Lucene.Net.Facet/Taxonomy/WriterCache/NameIntCacheLRU.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/NameIntCacheLRU.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/NameIntCacheLRU.cs
index b7264a0..6b849f8 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/NameIntCacheLRU.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/NameIntCacheLRU.cs
@@ -1,4 +1,5 @@
-\ufeffusing System.Collections.Generic;
+\ufeffusing Lucene.Net.Support;
+using System.Collections.Generic;
 using System.Linq;
 
 namespace Lucene.Net.Facet.Taxonomy.WriterCache
@@ -32,7 +33,7 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
     /// </remarks>
     public class NameIntCacheLRU
     {
-        private Dictionary<object, int?> cache;
+        private IDictionary<object, int> cache;
         internal long nMisses = 0; // for debug
         internal long nHits = 0; // for debug
         private int capacity;
@@ -67,31 +68,36 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
 
         private void CreateCache(int maxSize)
         {
-            // LUCENENET TODO: Create an adapter so we can plug in either a generic
-            // dictionary or LRUHashMap or alternatively make LRUHashMap implement IDictionary<TKey, TValue>
-            //if (maxSize < int.MaxValue)
-            //{
-            //    cache = new LRUHashMap<object,int?>(1000,true); //for LRU
-            //}
-            //else
+            if (maxSize < int.MaxValue)
             {
-                cache = new Dictionary<object, int?>(1000); //no need for LRU
+                cache = new LurchTable<object, int>(1000, LurchTableOrder.Access); //for LRU
+            }
+            else
+            {
+                cache = new Dictionary<object, int>(1000); //no need for LRU
             }
         }
 
-        internal virtual int? Get(FacetLabel name)
+        internal virtual int Get(FacetLabel name)
+        {
+            int result;
+            TryGetValue(name, out result);
+            return result;
+        }
+
+        internal virtual bool TryGetValue(FacetLabel name, out int value)
         {
             object key = Key(name);
-            int? res = cache.ContainsKey(key) ? cache[key] : null;
-            if (res == null)
+            if (!cache.TryGetValue(key, out value))
             {
                 nMisses++;
+                return false;
             }
             else
             {
                 nHits++;
+                return true;
             }
-            return res;
         }
 
         /// <summary>
@@ -111,13 +117,13 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
         /// Add a new value to cache.
         /// Return true if cache became full and some room need to be made. 
         /// </summary>
-        internal virtual bool Put(FacetLabel name, int? val)
+        internal virtual bool Put(FacetLabel name, int val)
         {
             cache[Key(name)] = val;
             return CacheFull;
         }
 
-        internal virtual bool Put(FacetLabel name, int prefixLen, int? val)
+        internal virtual bool Put(FacetLabel name, int prefixLen, int val)
         {
             cache[Key(name, prefixLen)] = val;
             return CacheFull;
@@ -160,12 +166,24 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
                 return false;
             }
 
-            // LUCENENET: Loop in reverse so we can safely delete
-            // a range of items (0 - n) without a 
-            // "Collection was modified" conflict
-            for (int i = n - 1; i >= 0; i--)
+            lock (this)
             {
-                cache.Remove(cache.Keys.ElementAt(i));
+                // Double-check that another thread didn't beat us to the operation
+                n = cache.Count - (2 * capacity) / 3;
+                if (n <= 0)
+                {
+                    return false;
+                }
+
+                //System.Diagnostics.Debug.WriteLine("Removing cache entries in MakeRoomLRU");
+
+                // LUCENENET: Loop in reverse so we can safely delete
+                // a range of items (0 - n) without a 
+                // "Collection was modified" conflict
+                for (int i = n - 1; i >= 0; i--)
+                {
+                    cache.Remove(cache.Keys.ElementAt(i));
+                }
             }
             return true;
         }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/36cde063/src/Lucene.Net.Tests.Facet/Taxonomy/TestLRUHashMap.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/TestLRUHashMap.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/TestLRUHashMap.cs
index 1f7afb1..e19a900 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/TestLRUHashMap.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/TestLRUHashMap.cs
@@ -59,6 +59,12 @@ namespace Lucene.Net.Facet.Taxonomy
             Assert.Null(lru.Get("five"));
             Assert.NotNull(lru.Get("six"));
             Assert.NotNull(lru.Get("seven"));
+
+            // LUCENENET specific tests to ensure Put is implemented correctly
+            Assert.Null(lru.Put("ten", "oops"));
+            assertEquals("oops", lru.Put("ten", "not oops"));
+            assertEquals("not oops", lru.Put("ten", "new value"));
+            assertEquals("new value", lru.Put("ten", "new value2"));
         }
     }
 


[35/46] lucenenet git commit: Renamed Facet.SortedSet.SortedSetDocValuesReaderState.Size to be Count (.NETified)

Posted by sy...@apache.org.
Renamed Facet.SortedSet.SortedSetDocValuesReaderState.Size to be Count (.NETified)


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/bcb5d136
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/bcb5d136
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/bcb5d136

Branch: refs/heads/master
Commit: bcb5d136c22fcd5eb81c184598aab4f0915e2fbb
Parents: 4319c5d
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Sep 25 17:24:07 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Mon Oct 3 23:31:58 2016 +0700

----------------------------------------------------------------------
 .../SortedSet/DefaultSortedSetDocValuesReaderState.cs              | 2 +-
 src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetCounts.cs    | 2 +-
 src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesReaderState.cs    | 2 +-
 3 files changed, 3 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/bcb5d136/src/Lucene.Net.Facet/SortedSet/DefaultSortedSetDocValuesReaderState.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/SortedSet/DefaultSortedSetDocValuesReaderState.cs b/src/Lucene.Net.Facet/SortedSet/DefaultSortedSetDocValuesReaderState.cs
index a5806c3..9ecca3f 100644
--- a/src/Lucene.Net.Facet/SortedSet/DefaultSortedSetDocValuesReaderState.cs
+++ b/src/Lucene.Net.Facet/SortedSet/DefaultSortedSetDocValuesReaderState.cs
@@ -143,7 +143,7 @@ namespace Lucene.Net.Facet.SortedSet
 
         /// <summary>
         /// Number of unique labels. </summary>
-        public override int Size
+        public override int Count
         {
             get
             {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/bcb5d136/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetCounts.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetCounts.cs b/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetCounts.cs
index c1a1da9..e552d83 100644
--- a/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetCounts.cs
+++ b/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetCounts.cs
@@ -67,7 +67,7 @@ namespace Lucene.Net.Facet.SortedSet
             this.state = state;
             this.field = state.Field;
             dv = state.DocValues;
-            counts = new int[state.Size];
+            counts = new int[state.Count];
             //System.out.println("field=" + field);
             Count(hits.GetMatchingDocs());
         }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/bcb5d136/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesReaderState.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesReaderState.cs b/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesReaderState.cs
index f895210..636d434 100644
--- a/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesReaderState.cs
+++ b/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesReaderState.cs
@@ -93,6 +93,6 @@ namespace Lucene.Net.Facet.SortedSet
 
         /// <summary>
         /// Number of unique labels. </summary>
-        public abstract int Size { get; }
+        public abstract int Count { get; }
     }
 }
\ No newline at end of file


[11/46] lucenenet git commit: Fixed assert bug in Facet.TestDrillSideways.VerifyEquals() that caused random failures.

Posted by sy...@apache.org.
Fixed assert bug in Facet.TestDrillSideways.VerifyEquals() that caused random failures.


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/de9e8cec
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/de9e8cec
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/de9e8cec

Branch: refs/heads/master
Commit: de9e8cec065d4fdae1169f39f4f4336100345f69
Parents: 2b718fb
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sat Sep 24 23:52:23 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Mon Oct 3 23:30:56 2016 +0700

----------------------------------------------------------------------
 src/Lucene.Net.Tests.Facet/TestDrillSideways.cs | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/de9e8cec/src/Lucene.Net.Tests.Facet/TestDrillSideways.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Facet/TestDrillSideways.cs b/src/Lucene.Net.Tests.Facet/TestDrillSideways.cs
index 366a16a..719e212 100644
--- a/src/Lucene.Net.Tests.Facet/TestDrillSideways.cs
+++ b/src/Lucene.Net.Tests.Facet/TestDrillSideways.cs
@@ -1259,7 +1259,7 @@ namespace Lucene.Net.Facet
                         {
                             // Tie-break facet labels are only in unicode
                             // order with SortedSetDVFacets:
-                            Assert.AreEqual("value @ idx=" + i, dimValues[dim][expectedOrd], fr.LabelValues[i].label);
+                            assertEquals("value @ idx=" + i, dimValues[dim][expectedOrd], fr.LabelValues[i].label);
                         }
                     }
                 }


[02/46] lucenenet git commit: Fixed bug in Facet.Taxonomy.SearcherTaxonomyManager - wrong exception being thrown.

Posted by sy...@apache.org.
Fixed bug in Facet.Taxonomy.SearcherTaxonomyManager - wrong exception being thrown.


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/bcbfad58
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/bcbfad58
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/bcbfad58

Branch: refs/heads/master
Commit: bcbfad58f7c6b3c3c26f20d72e87e4a06c798e37
Parents: 1ca08df
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Fri Sep 23 22:32:08 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Mon Oct 3 23:30:33 2016 +0700

----------------------------------------------------------------------
 src/Lucene.Net.Facet/Taxonomy/SearcherTaxonomyManager.cs | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/bcbfad58/src/Lucene.Net.Facet/Taxonomy/SearcherTaxonomyManager.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/SearcherTaxonomyManager.cs b/src/Lucene.Net.Facet/Taxonomy/SearcherTaxonomyManager.cs
index d543aad..7abc055 100644
--- a/src/Lucene.Net.Facet/Taxonomy/SearcherTaxonomyManager.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/SearcherTaxonomyManager.cs
@@ -32,6 +32,7 @@ namespace Lucene.Net.Facet.Taxonomy
     using SearcherManager = Lucene.Net.Search.SearcherManager;
     using Directory = Lucene.Net.Store.Directory;
     using IOUtils = Lucene.Net.Util.IOUtils;
+    using System;
 
     /// <summary>
     /// Manages near-real-time reopen of both an IndexSearcher
@@ -163,7 +164,7 @@ namespace Lucene.Net.Facet.Taxonomy
                 else if (taxoWriter != null && taxoWriter.TaxonomyEpoch != taxoEpoch)
                 {
                     IOUtils.Close(newReader, tr);
-                    throw new ThreadStateException("DirectoryTaxonomyWriter.replaceTaxonomy was called, which is not allowed when using SearcherTaxonomyManager");
+                    throw new InvalidOperationException("DirectoryTaxonomyWriter.replaceTaxonomy was called, which is not allowed when using SearcherTaxonomyManager");
                 }
 
                 return new SearcherAndTaxonomy(SearcherManager.GetSearcher(searcherFactory, newReader), tr);


[12/46] lucenenet git commit: Fixed cache cleanup bug in Facet.Taxonomy.WriterCache.NameIntCacheLRU.

Posted by sy...@apache.org.
Fixed cache cleanup bug in Facet.Taxonomy.WriterCache.NameIntCacheLRU.


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/c40662a2
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/c40662a2
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/c40662a2

Branch: refs/heads/master
Commit: c40662a239a4debbbbfd3e1756386e10b302f499
Parents: de9e8ce
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sat Sep 24 23:53:20 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Mon Oct 3 23:30:58 2016 +0700

----------------------------------------------------------------------
 .../Taxonomy/WriterCache/NameIntCacheLRU.cs            | 13 +++++++------
 1 file changed, 7 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/c40662a2/src/Lucene.Net.Facet/Taxonomy/WriterCache/NameIntCacheLRU.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/NameIntCacheLRU.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/NameIntCacheLRU.cs
index ce14f49..a761aea 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/NameIntCacheLRU.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/NameIntCacheLRU.cs
@@ -1,4 +1,5 @@
 \ufeffusing System.Collections.Generic;
+using System.Linq;
 
 namespace Lucene.Net.Facet.Taxonomy.WriterCache
 {
@@ -152,13 +153,13 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
             {
                 return false;
             }
-            IEnumerator<object> it = cache.Keys.GetEnumerator();
-            int i = 0;
-            
-            while (i < n && it.MoveNext())
+
+            // LUCENENET: Loop in reverse so we can safely delete
+            // a range of items (0 - n) without a 
+            // "Collection was modified" conflict
+            for (int i = n - 1; i >= 0; i--)
             {
-                cache.Remove(it.Current);
-                i++;
+                cache.Remove(cache.Keys.ElementAt(i));
             }
             return true;
         }


[26/46] lucenenet git commit: Changed Facet.Taxonomy.ParallelTaxonomyArrays methods Parents, Children, and Siblings to be properties.

Posted by sy...@apache.org.
Changed Facet.Taxonomy.ParallelTaxonomyArrays methods Parents, Children, and Siblings to be properties.


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/67b29ee3
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/67b29ee3
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/67b29ee3

Branch: refs/heads/master
Commit: 67b29ee32249c88628db4a8a4ddd1e69cc6a9694
Parents: 0bc3130
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Sep 25 15:31:19 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Mon Oct 3 23:31:34 2016 +0700

----------------------------------------------------------------------
 .../Directory/DirectoryTaxonomyWriter.cs        |  2 +-
 .../Taxonomy/Directory/TaxonomyIndexArrays.cs   | 43 ++++++++++-------
 .../Taxonomy/ParallelTaxonomyArrays.cs          |  6 +--
 src/Lucene.Net.Facet/Taxonomy/TaxonomyFacets.cs |  4 +-
 src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs |  4 +-
 .../Directory/TestConcurrentFacetedIndexing.cs  |  2 +-
 .../Directory/TestDirectoryTaxonomyReader.cs    | 10 ++--
 .../Directory/TestDirectoryTaxonomyWriter.cs    |  2 +-
 .../Taxonomy/TestTaxonomyCombined.cs            | 50 ++++++++++----------
 9 files changed, 66 insertions(+), 57 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/67b29ee3/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs
index d25cd1b..b07940e 100644
--- a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs
@@ -886,7 +886,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
                 throw new System.IndexOutOfRangeException("requested ordinal is bigger than the largest ordinal in the taxonomy");
             }
 
-            int[] parents = TaxoArrays.Parents();
+            int[] parents = TaxoArrays.Parents;
             Debug.Assert(ordinal < parents.Length, "requested ordinal (" + ordinal + "); parents.length (" + parents.Length + ") !");
             return parents[ordinal];
         }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/67b29ee3/src/Lucene.Net.Facet/Taxonomy/Directory/TaxonomyIndexArrays.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/Directory/TaxonomyIndexArrays.cs b/src/Lucene.Net.Facet/Taxonomy/Directory/TaxonomyIndexArrays.cs
index ec0b33a..9cbaaed 100644
--- a/src/Lucene.Net.Facet/Taxonomy/Directory/TaxonomyIndexArrays.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/Directory/TaxonomyIndexArrays.cs
@@ -76,7 +76,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
             // it may be caused if e.g. the taxonomy segments were merged, and so an updated
             // NRT reader was obtained, even though nothing was changed. this is not very likely
             // to happen.
-            int[] copyParents = copyFrom.Parents();
+            int[] copyParents = copyFrom.Parents;
             this.parents = new int[reader.MaxDoc];
             Array.Copy(copyParents, 0, parents, 0, copyParents.Length);
             InitParents(reader, copyParents.Length);
@@ -98,8 +98,8 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
                     if (copyFrom != null)
                     {
                         // called from the ctor, after we know copyFrom has initialized children/siblings
-                        Array.Copy(copyFrom.Children(), 0, children, 0, copyFrom.Children().Length);
-                        Array.Copy(copyFrom.Siblings(), 0, siblings, 0, copyFrom.Siblings().Length);
+                        Array.Copy(copyFrom.Children, 0, children, 0, copyFrom.Children.Length);
+                        Array.Copy(copyFrom.Siblings, 0, siblings, 0, copyFrom.Siblings.Length);
                         ComputeChildrenSiblings(copyFrom.parents.Length);
                     }
                     else
@@ -207,9 +207,12 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
         /// Returns the parents array, where {@code parents[i]} denotes the parent of
         /// category ordinal {@code i}.
         /// </summary>
-        public override int[] Parents()
+        public override int[] Parents
         {
-            return parents;
+            get
+            {
+                return parents;
+            }
         }
 
         /// <summary>
@@ -218,15 +221,18 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
         /// category that was added last to the taxonomy as an immediate child of
         /// {@code i}.
         /// </summary>
-        public override int[] Children()
+        public override int[] Children
         {
-            if (!initializedChildren)
+            get
             {
-                InitChildrenSiblings(null);
-            }
+                if (!initializedChildren)
+                {
+                    InitChildrenSiblings(null);
+                }
 
-            // the array is guaranteed to be populated
-            return children;
+                // the array is guaranteed to be populated
+                return children;
+            }
         }
 
         /// <summary>
@@ -234,15 +240,18 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
         /// of category ordinal {@code i}. The sibling is defined as the previous
         /// youngest child of {@code parents[i]}.
         /// </summary>
-        public override int[] Siblings()
+        public override int[] Siblings
         {
-            if (!initializedChildren)
+            get
             {
-                InitChildrenSiblings(null);
-            }
+                if (!initializedChildren)
+                {
+                    InitChildrenSiblings(null);
+                }
 
-            // the array is guaranteed to be populated
-            return siblings;
+                // the array is guaranteed to be populated
+                return siblings;
+            }
         }
     }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/67b29ee3/src/Lucene.Net.Facet/Taxonomy/ParallelTaxonomyArrays.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/ParallelTaxonomyArrays.cs b/src/Lucene.Net.Facet/Taxonomy/ParallelTaxonomyArrays.cs
index 60e351e..1b51568 100644
--- a/src/Lucene.Net.Facet/Taxonomy/ParallelTaxonomyArrays.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/ParallelTaxonomyArrays.cs
@@ -52,18 +52,18 @@
         /// Returns the parents array, where {@code parents[i]} denotes the parent of
         /// category ordinal {@code i}.
         /// </summary>
-        public abstract int[] Parents();
+        public abstract int[] Parents { get; }
 
         /// <summary>
         /// Returns the children array, where {@code children[i]} denotes a child of
         /// category ordinal {@code i}.
         /// </summary>
-        public abstract int[] Children();
+        public abstract int[] Children { get; }
 
         /// <summary>
         /// Returns the siblings array, where {@code siblings[i]} denotes the sibling
         /// of category ordinal {@code i}.
         /// </summary>
-        public abstract int[] Siblings();
+        public abstract int[] Siblings { get; }
     }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/67b29ee3/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacets.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacets.cs b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacets.cs
index 51a073f..ef8dede 100644
--- a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacets.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacets.cs
@@ -86,8 +86,8 @@ namespace Lucene.Net.Facet.Taxonomy
             this.taxoReader = taxoReader;
             this.config = config;
             ParallelTaxonomyArrays pta = taxoReader.ParallelTaxonomyArrays;
-            children = pta.Children();
-            siblings = pta.Siblings();
+            children = pta.Children;
+            siblings = pta.Siblings;
         }
 
         /// <summary>

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/67b29ee3/src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs b/src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs
index 4dc391e..e99b011 100644
--- a/src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs
@@ -221,8 +221,8 @@ namespace Lucene.Net.Facet.Taxonomy
         public virtual ChildrenIterator GetChildren(int ordinal)
         {
             ParallelTaxonomyArrays arrays = ParallelTaxonomyArrays;
-            int child = ordinal >= 0 ? arrays.Children()[ordinal] : INVALID_ORDINAL;
-            return new ChildrenIterator(child, arrays.Siblings());
+            int child = ordinal >= 0 ? arrays.Children[ordinal] : INVALID_ORDINAL;
+            return new ChildrenIterator(child, arrays.Siblings);
         }
 
         /// <summary>

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/67b29ee3/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestConcurrentFacetedIndexing.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestConcurrentFacetedIndexing.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestConcurrentFacetedIndexing.cs
index 8c36649..52a4d61 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestConcurrentFacetedIndexing.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestConcurrentFacetedIndexing.cs
@@ -148,7 +148,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
                 }
                 Fail("mismatch number of categories");
             }
-            int[] parents = tr.ParallelTaxonomyArrays.Parents();
+            int[] parents = tr.ParallelTaxonomyArrays.Parents;
             foreach (string cat in values.Keys)
             {
                 FacetLabel cp = new FacetLabel(FacetsConfig.StringToPath(cat));

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/67b29ee3/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestDirectoryTaxonomyReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestDirectoryTaxonomyReader.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestDirectoryTaxonomyReader.cs
index d0801dd..0e2935a 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestDirectoryTaxonomyReader.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestDirectoryTaxonomyReader.cs
@@ -253,7 +253,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
                 // assert categories
                 Assert.AreEqual(numCategories, reader.Size);
                 int roundOrdinal = reader.GetOrdinal(new FacetLabel(Convert.ToString(i)));
-                int[] parents = reader.ParallelTaxonomyArrays.Parents();
+                int[] parents = reader.ParallelTaxonomyArrays.Parents;
                 Assert.AreEqual(0, parents[roundOrdinal]); // round's parent is root
                 for (int j = 0; j < numCats; j++)
                 {
@@ -306,7 +306,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
 
             var reader = new DirectoryTaxonomyReader(writer);
             Assert.AreEqual(1, reader.Size);
-            Assert.AreEqual(1, reader.ParallelTaxonomyArrays.Parents().Length);
+            Assert.AreEqual(1, reader.ParallelTaxonomyArrays.Parents.Length);
 
             // add category and call forceMerge -- this should flush IW and merge segments down to 1
             // in ParentArray.initFromReader, this used to fail assuming there are no parents.
@@ -319,7 +319,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
             reader.Dispose();
             reader = newtr;
             Assert.AreEqual(2, reader.Size);
-            Assert.AreEqual(2, reader.ParallelTaxonomyArrays.Parents().Length);
+            Assert.AreEqual(2, reader.ParallelTaxonomyArrays.Parents.Length);
 
             reader.Dispose();
             writer.Dispose();
@@ -367,7 +367,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
 
             var reader = new DirectoryTaxonomyReader(writer);
             Assert.AreEqual(2, reader.Size);
-            Assert.AreEqual(2, reader.ParallelTaxonomyArrays.Parents().Length);
+            Assert.AreEqual(2, reader.ParallelTaxonomyArrays.Parents.Length);
 
             // merge all the segments so that NRT reader thinks there's a change 
             iw.ForceMerge(1);
@@ -378,7 +378,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
             reader.Dispose();
             reader = newtr;
             Assert.AreEqual(2, reader.Size);
-            Assert.AreEqual(2, reader.ParallelTaxonomyArrays.Parents().Length);
+            Assert.AreEqual(2, reader.ParallelTaxonomyArrays.Parents.Length);
 
             reader.Dispose();
             writer.Dispose();

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/67b29ee3/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestDirectoryTaxonomyWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestDirectoryTaxonomyWriter.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestDirectoryTaxonomyWriter.cs
index 8924f56..778a889 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestDirectoryTaxonomyWriter.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestDirectoryTaxonomyWriter.cs
@@ -325,7 +325,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
                 Fail("mismatch number of categories");
             }
 
-            int[] parents = dtr.ParallelTaxonomyArrays.Parents();
+            int[] parents = dtr.ParallelTaxonomyArrays.Parents;
             foreach (string cat in values.Keys)
             {
                 FacetLabel cp = new FacetLabel(FacetsConfig.StringToPath(cat));

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/67b29ee3/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyCombined.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyCombined.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyCombined.cs
index 9290ec2..70080ec 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyCombined.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyCombined.cs
@@ -312,7 +312,7 @@ namespace Lucene.Net.Facet.Taxonomy
             var tr = new DirectoryTaxonomyReader(indexDir);
             Assert.AreEqual(1, tr.Size);
             Assert.AreEqual(0, tr.GetPath(0).Length);
-            Assert.AreEqual(TaxonomyReader.INVALID_ORDINAL, tr.ParallelTaxonomyArrays.Parents()[0]);
+            Assert.AreEqual(TaxonomyReader.INVALID_ORDINAL, tr.ParallelTaxonomyArrays.Parents[0]);
             Assert.AreEqual(0, tr.GetOrdinal(new FacetLabel()));
             tr.Dispose(true);
             indexDir.Dispose();
@@ -333,7 +333,7 @@ namespace Lucene.Net.Facet.Taxonomy
             var tr = new DirectoryTaxonomyReader(indexDir);
             Assert.AreEqual(1, tr.Size);
             Assert.AreEqual(0, tr.GetPath(0).Length);
-            Assert.AreEqual(TaxonomyReader.INVALID_ORDINAL, tr.ParallelTaxonomyArrays.Parents()[0]);
+            Assert.AreEqual(TaxonomyReader.INVALID_ORDINAL, tr.ParallelTaxonomyArrays.Parents[0]);
             Assert.AreEqual(0, tr.GetOrdinal(new FacetLabel()));
             tw.Dispose();
             tr.Dispose(true);
@@ -420,7 +420,7 @@ namespace Lucene.Net.Facet.Taxonomy
             var tr = new DirectoryTaxonomyReader(indexDir);
 
             // check that the parent of the root ordinal is the invalid ordinal:
-            int[] parents = tr.ParallelTaxonomyArrays.Parents();
+            int[] parents = tr.ParallelTaxonomyArrays.Parents;
             Assert.AreEqual(TaxonomyReader.INVALID_ORDINAL, parents[0]);
 
             // check parent of non-root ordinals:
@@ -557,9 +557,9 @@ namespace Lucene.Net.Facet.Taxonomy
             tw.Dispose();
             var tr = new DirectoryTaxonomyReader(indexDir);
             ParallelTaxonomyArrays ca = tr.ParallelTaxonomyArrays;
-            int[] youngestChildArray = ca.Children();
+            int[] youngestChildArray = ca.Children;
             Assert.AreEqual(tr.Size, youngestChildArray.Length);
-            int[] olderSiblingArray = ca.Siblings();
+            int[] olderSiblingArray = ca.Siblings;
             Assert.AreEqual(tr.Size, olderSiblingArray.Length);
             for (int i = 0; i < ExpectedCategories.Length; i++)
             {
@@ -630,13 +630,13 @@ namespace Lucene.Net.Facet.Taxonomy
             tw.Dispose();
             var tr = new DirectoryTaxonomyReader(indexDir);
             ParallelTaxonomyArrays ca = tr.ParallelTaxonomyArrays;
-            int[] children = ca.Children();
+            int[] children = ca.Children;
             Assert.AreEqual(tr.Size, children.Length);
-            int[] olderSiblingArray = ca.Siblings();
+            int[] olderSiblingArray = ca.Siblings;
             Assert.AreEqual(tr.Size, olderSiblingArray.Length);
 
             // test that the "youngest child" of every category is indeed a child:
-            int[] parents = tr.ParallelTaxonomyArrays.Parents();
+            int[] parents = tr.ParallelTaxonomyArrays.Parents;
             for (int i = 0; i < tr.Size; i++)
             {
                 int youngestChild = children[i];
@@ -726,10 +726,10 @@ namespace Lucene.Net.Facet.Taxonomy
             var tr = new DirectoryTaxonomyReader(indexDir);
             ParallelTaxonomyArrays ca = tr.ParallelTaxonomyArrays;
             Assert.AreEqual(3, tr.Size);
-            Assert.AreEqual(3, ca.Siblings().Length);
-            Assert.AreEqual(3, ca.Children().Length);
-            Assert.True(Arrays.Equals(new int[] { 1, 2, -1 }, ca.Children()));
-            Assert.True(Arrays.Equals(new int[] { -1, -1, -1 }, ca.Siblings()));
+            Assert.AreEqual(3, ca.Siblings.Length);
+            Assert.AreEqual(3, ca.Children.Length);
+            Assert.True(Arrays.Equals(new int[] { 1, 2, -1 }, ca.Children));
+            Assert.True(Arrays.Equals(new int[] { -1, -1, -1 }, ca.Siblings));
             tw.AddCategory(new FacetLabel("hi", "ho"));
             tw.AddCategory(new FacetLabel("hello"));
             tw.Commit();
@@ -737,8 +737,8 @@ namespace Lucene.Net.Facet.Taxonomy
             ParallelTaxonomyArrays newca = tr.ParallelTaxonomyArrays;
             Assert.AreSame(newca, ca); // we got exactly the same object
             Assert.AreEqual(3, tr.Size);
-            Assert.AreEqual(3, ca.Siblings().Length);
-            Assert.AreEqual(3, ca.Children().Length);
+            Assert.AreEqual(3, ca.Siblings.Length);
+            Assert.AreEqual(3, ca.Children.Length);
             // After the refresh, things change:
             var newtr = TaxonomyReader.OpenIfChanged(tr);
             Assert.NotNull(newtr);
@@ -746,10 +746,10 @@ namespace Lucene.Net.Facet.Taxonomy
             tr = newtr;
             ca = tr.ParallelTaxonomyArrays;
             Assert.AreEqual(5, tr.Size);
-            Assert.AreEqual(5, ca.Siblings().Length);
-            Assert.AreEqual(5, ca.Children().Length);
-            Assert.True(Arrays.Equals(new int[] { 4, 3, -1, -1, -1 }, ca.Children()));
-            Assert.True(Arrays.Equals(new int[] { -1, -1, -1, 2, 1 }, ca.Siblings()));
+            Assert.AreEqual(5, ca.Siblings.Length);
+            Assert.AreEqual(5, ca.Children.Length);
+            Assert.True(Arrays.Equals(new int[] { 4, 3, -1, -1, -1 }, ca.Children));
+            Assert.True(Arrays.Equals(new int[] { -1, -1, -1, 2, 1 }, ca.Siblings));
             tw.Dispose();
             tr.Dispose();
             indexDir.Dispose();
@@ -771,7 +771,7 @@ namespace Lucene.Net.Facet.Taxonomy
             ParallelTaxonomyArrays ca1 = trBase.ParallelTaxonomyArrays;
 
             int abOrd = trBase.GetOrdinal(abPath);
-            int abYoungChildBase1 = ca1.Children()[abOrd];
+            int abYoungChildBase1 = ca1.Children[abOrd];
 
             int numCategories = AtLeast(800);
             for (int i = 0; i < numCategories; i++)
@@ -786,7 +786,7 @@ namespace Lucene.Net.Facet.Taxonomy
             trBase = newTaxoReader;
 
             ParallelTaxonomyArrays ca2 = trBase.ParallelTaxonomyArrays;
-            int abYoungChildBase2 = ca2.Children()[abOrd];
+            int abYoungChildBase2 = ca2.Children[abOrd];
 
             int numRetries = AtLeast(50);
             for (int retry = 0; retry < numRetries; retry++)
@@ -875,7 +875,7 @@ namespace Lucene.Net.Facet.Taxonomy
                 {
                     while (!stop.Get())
                     {
-                        int lastOrd = tr.ParallelTaxonomyArrays.Parents().Length - 1;
+                        int lastOrd = tr.ParallelTaxonomyArrays.Parents.Length - 1;
                         Assert.NotNull(tr.GetPath(lastOrd), "path of last-ord " + lastOrd + " is not found!");
                         AssertChildrenArrays(tr.ParallelTaxonomyArrays, retry, retrieval[0]++);
                         Thread.Sleep(10);// don't starve refresh()'s CPU, which sleeps every 50 bytes for 1 ms
@@ -890,8 +890,8 @@ namespace Lucene.Net.Facet.Taxonomy
 
             private void AssertChildrenArrays(ParallelTaxonomyArrays ca, int retry, int retrieval)
             {
-                int abYoungChild = ca.Children()[abOrd];
-                Assert.True(abYoungChildBase1 == abYoungChild || abYoungChildBase2 == ca.Children()[abOrd], "Retry " + retry + ": retrieval: " + retrieval + ": wrong youngest child for category " + abPath + " (ord=" + abOrd + ") - must be either " + abYoungChildBase1 + " or " + abYoungChildBase2 + " but was: " + abYoungChild);
+                int abYoungChild = ca.Children[abOrd];
+                Assert.True(abYoungChildBase1 == abYoungChild || abYoungChildBase2 == ca.Children[abOrd], "Retry " + retry + ": retrieval: " + retrieval + ": wrong youngest child for category " + abPath + " (ord=" + abOrd + ") - must be either " + abYoungChildBase1 + " or " + abYoungChildBase2 + " but was: " + abYoungChild);
             }
         }
 
@@ -949,7 +949,7 @@ namespace Lucene.Net.Facet.Taxonomy
             int author = 1;
             try
             {
-                Assert.AreEqual(TaxonomyReader.ROOT_ORDINAL, tr.ParallelTaxonomyArrays.Parents()[author]);
+                Assert.AreEqual(TaxonomyReader.ROOT_ORDINAL, tr.ParallelTaxonomyArrays.Parents[author]);
                 // ok
             }
             catch (System.IndexOutOfRangeException)
@@ -969,7 +969,7 @@ namespace Lucene.Net.Facet.Taxonomy
             Assert.NotNull(newTaxoReader);
             tr.Dispose();
             tr = newTaxoReader;
-            int[] parents = tr.ParallelTaxonomyArrays.Parents();
+            int[] parents = tr.ParallelTaxonomyArrays.Parents;
             Assert.AreEqual(author, parents[dawkins]);
             Assert.AreEqual(TaxonomyReader.ROOT_ORDINAL, parents[author]);
             Assert.AreEqual(TaxonomyReader.INVALID_ORDINAL, parents[TaxonomyReader.ROOT_ORDINAL]);


[07/46] lucenenet git commit: Fixed wrong exception thrown from Facet.SortedSet.SortedSetDocValuesFacetCounts as well as the wrong expected exception in its test.

Posted by sy...@apache.org.
Fixed wrong exception thrown from Facet.SortedSet.SortedSetDocValuesFacetCounts as well as the wrong expected exception in its test.


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/4b87de01
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/4b87de01
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/4b87de01

Branch: refs/heads/master
Commit: 4b87de01204d603d2de96e04fba6a62425bf6381
Parents: abf096e
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sat Sep 24 19:19:06 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Mon Oct 3 23:30:46 2016 +0700

----------------------------------------------------------------------
 src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetCounts.cs  | 4 +---
 .../SortedSet/TestSortedSetDocValuesFacets.cs                    | 2 +-
 2 files changed, 2 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/4b87de01/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetCounts.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetCounts.cs b/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetCounts.cs
index 8ccb190..509b473 100644
--- a/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetCounts.cs
+++ b/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetCounts.cs
@@ -1,8 +1,6 @@
 \ufeffusing System;
 using System.Collections.Generic;
 using System.Linq;
-using System.Threading;
-using Lucene.Net.Facet;
 
 namespace Lucene.Net.Facet.SortedSet
 {
@@ -191,7 +189,7 @@ namespace Lucene.Net.Facet.SortedSet
                 // AIOOBE can happen:
                 if (!Equals(ReaderUtil.GetTopLevelContext(hits.context).Reader, origReader))
                 {
-                    throw new ThreadStateException("the SortedSetDocValuesReaderState provided to this class does not match the reader being searched; you must create a new SortedSetDocValuesReaderState every time you open a new IndexReader");
+                    throw new InvalidOperationException("the SortedSetDocValuesReaderState provided to this class does not match the reader being searched; you must create a new SortedSetDocValuesReaderState every time you open a new IndexReader");
                 }
 
                 SortedSetDocValues segValues = reader.GetSortedSetDocValues(field);

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/4b87de01/src/Lucene.Net.Tests.Facet/SortedSet/TestSortedSetDocValuesFacets.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Facet/SortedSet/TestSortedSetDocValuesFacets.cs b/src/Lucene.Net.Tests.Facet/SortedSet/TestSortedSetDocValuesFacets.cs
index 7b42c55..75cf9d3 100644
--- a/src/Lucene.Net.Tests.Facet/SortedSet/TestSortedSetDocValuesFacets.cs
+++ b/src/Lucene.Net.Tests.Facet/SortedSet/TestSortedSetDocValuesFacets.cs
@@ -135,7 +135,7 @@ namespace Lucene.Net.Facet.SortedSet
                 new SortedSetDocValuesFacetCounts(state, c);
                 Fail("did not hit expected exception");
             }
-            catch (IllegalStateException)
+            catch (InvalidOperationException)
             {
                 // expected
             }


[43/46] lucenenet git commit: Fixed "identityHashCode" call in Facet.Taxonomy.TaxonomyFacetSumValueSource.

Posted by sy...@apache.org.
Fixed "identityHashCode" call in Facet.Taxonomy.TaxonomyFacetSumValueSource.


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/d8668889
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/d8668889
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/d8668889

Branch: refs/heads/master
Commit: d86688897061bc280a730fe2235a5cb398298ee6
Parents: 49e2503
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Sep 25 23:55:42 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Mon Oct 3 23:32:15 2016 +0700

----------------------------------------------------------------------
 src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumValueSource.cs | 5 ++---
 1 file changed, 2 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/d8668889/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumValueSource.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumValueSource.cs b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumValueSource.cs
index c5e5cb5..a08021c 100644
--- a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumValueSource.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumValueSource.cs
@@ -1,6 +1,7 @@
 \ufeffusing System;
 using System.Collections;
 using System.Collections.Generic;
+using System.Runtime.CompilerServices;
 using System.Threading;
 
 namespace Lucene.Net.Facet.Taxonomy
@@ -222,11 +223,9 @@ namespace Lucene.Net.Facet.Taxonomy
 
             public override int GetHashCode()
             {
-                return hcode;
+                return RuntimeHelpers.GetHashCode(this);
             }
 
-            private static readonly int hcode = typeof(DoubleDocValuesAnonymousInnerClassHelper).GetHashCode();
-
             public override string Description
             {
                 get


[15/46] lucenenet git commit: .NETify Facet: Interfaces should begin with an "I"

Posted by sy...@apache.org.
.NETify Facet: Interfaces should begin with an "I"


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/2e5bae05
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/2e5bae05
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/2e5bae05

Branch: refs/heads/master
Commit: 2e5bae058f8842dd01e01287473ad1c8783e7b6e
Parents: ae225b9
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Sep 25 01:20:50 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Mon Oct 3 23:31:06 2016 +0700

----------------------------------------------------------------------
 src/Lucene.Net.Facet/FacetsConfig.cs            | 12 +++++-----
 .../DefaultSortedSetDocValuesReaderState.cs     |  2 +-
 .../Directory/DirectoryTaxonomyWriter.cs        | 24 ++++++++++----------
 src/Lucene.Net.Facet/Taxonomy/TaxonomyWriter.cs |  4 ++--
 .../WriterCache/Cl2oTaxonomyWriterCache.cs      |  2 +-
 .../WriterCache/LruTaxonomyWriterCache.cs       |  2 +-
 .../Taxonomy/WriterCache/TaxonomyWriterCache.cs |  6 ++---
 .../Taxonomy/Directory/TestAddTaxonomy.cs       | 16 ++++++-------
 .../Directory/TestConcurrentFacetedIndexing.cs  | 10 ++++----
 .../Directory/TestDirectoryTaxonomyReader.cs    |  2 +-
 .../Directory/TestDirectoryTaxonomyWriter.cs    | 10 ++++----
 .../Taxonomy/TestSearcherTaxonomyManager.cs     |  4 ++--
 .../Taxonomy/TestTaxonomyCombined.cs            | 20 ++++++++--------
 .../Taxonomy/TestTaxonomyFacetAssociations.cs   |  6 ++---
 .../Taxonomy/TestTaxonomyFacetCounts.cs         |  6 ++---
 .../Taxonomy/TestTaxonomyFacetCounts2.cs        |  8 +++----
 .../TestDrillDownQuery.cs                       |  4 ++--
 .../TestMultipleIndexFields.cs                  |  6 ++---
 18 files changed, 72 insertions(+), 72 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/2e5bae05/src/Lucene.Net.Facet/FacetsConfig.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/FacetsConfig.cs b/src/Lucene.Net.Facet/FacetsConfig.cs
index a62d625..99aaf6c 100644
--- a/src/Lucene.Net.Facet/FacetsConfig.cs
+++ b/src/Lucene.Net.Facet/FacetsConfig.cs
@@ -40,7 +40,7 @@ namespace Lucene.Net.Facet
     using SortedSetDocValuesFacetField = Lucene.Net.Facet.SortedSet.SortedSetDocValuesFacetField;
     using SortedSetDocValuesField = Lucene.Net.Documents.SortedSetDocValuesField;
     using StringField = Lucene.Net.Documents.StringField;
-    using TaxonomyWriter = Lucene.Net.Facet.Taxonomy.TaxonomyWriter;
+    using ITaxonomyWriter = Lucene.Net.Facet.Taxonomy.ITaxonomyWriter;
 
     /// <summary>
     /// Records per-dimension configuration.  By default a
@@ -270,7 +270,7 @@ namespace Lucene.Net.Facet
         /// input one!
         /// </para>
         /// </summary>
-        public virtual Document Build(TaxonomyWriter taxoWriter, Document doc)
+        public virtual Document Build(ITaxonomyWriter taxoWriter, Document doc)
         {
             // Find all FacetFields, collated by the actual field:
             IDictionary<string, IList<FacetField>> byField = new Dictionary<string, IList<FacetField>>();
@@ -395,7 +395,7 @@ namespace Lucene.Net.Facet
             return result;
         }
 
-        private void ProcessFacetFields(TaxonomyWriter taxoWriter, IDictionary<string, IList<FacetField>> byField, Document doc)
+        private void ProcessFacetFields(ITaxonomyWriter taxoWriter, IDictionary<string, IList<FacetField>> byField, Document doc)
         {
 
             foreach (KeyValuePair<string, IList<FacetField>> ent in byField)
@@ -486,7 +486,7 @@ namespace Lucene.Net.Facet
             }
         }
 
-        private void ProcessAssocFacetFields(TaxonomyWriter taxoWriter, IDictionary<string, IList<AssociationFacetField>> byField, Document doc)
+        private void ProcessAssocFacetFields(ITaxonomyWriter taxoWriter, IDictionary<string, IList<AssociationFacetField>> byField, Document doc)
         {
             foreach (KeyValuePair<string, IList<AssociationFacetField>> ent in byField)
             {
@@ -592,11 +592,11 @@ namespace Lucene.Net.Facet
             return new BytesRef(bytes, 0, upto);
         }
 
-        private void checkTaxoWriter(TaxonomyWriter taxoWriter)
+        private void checkTaxoWriter(ITaxonomyWriter taxoWriter)
         {
             if (taxoWriter == null)
             {
-                throw new ThreadStateException("a non-null TaxonomyWriter must be provided when indexing FacetField or AssociationFacetField");
+                throw new ThreadStateException("a non-null ITaxonomyWriter must be provided when indexing FacetField or AssociationFacetField");
             }
         }
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/2e5bae05/src/Lucene.Net.Facet/SortedSet/DefaultSortedSetDocValuesReaderState.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/SortedSet/DefaultSortedSetDocValuesReaderState.cs b/src/Lucene.Net.Facet/SortedSet/DefaultSortedSetDocValuesReaderState.cs
index 649135c..10983d8 100644
--- a/src/Lucene.Net.Facet/SortedSet/DefaultSortedSetDocValuesReaderState.cs
+++ b/src/Lucene.Net.Facet/SortedSet/DefaultSortedSetDocValuesReaderState.cs
@@ -61,7 +61,7 @@ namespace Lucene.Net.Facet.SortedSet
             valueCount = (int)dv.ValueCount;
 
             // TODO: we can make this more efficient if eg we can be
-            // "involved" when OrdinalMap is being created?  Ie see
+            // "involved" when IOrdinalMap is being created?  Ie see
             // each term/ord it's assigning as it goes...
             string lastDim = null;
             int startOrd = -1;

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/2e5bae05/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs
index c93d0e7..a1c1939 100644
--- a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs
@@ -46,7 +46,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
     using ReaderManager = Lucene.Net.Index.ReaderManager;
     using SegmentInfos = Lucene.Net.Index.SegmentInfos;
     using StringField = Lucene.Net.Documents.StringField;
-    using TaxonomyWriterCache = Lucene.Net.Facet.Taxonomy.WriterCache.TaxonomyWriterCache;
+    using ITaxonomyWriterCache = Lucene.Net.Facet.Taxonomy.WriterCache.ITaxonomyWriterCache;
     using Terms = Lucene.Net.Index.Terms;
     using TermsEnum = Lucene.Net.Index.TermsEnum;
     using TextField = Lucene.Net.Documents.TextField;
@@ -72,7 +72,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
     /// @lucene.experimental
     /// </para>
     /// </summary>
-    public class DirectoryTaxonomyWriter : TaxonomyWriter
+    public class DirectoryTaxonomyWriter : ITaxonomyWriter
     {
         /// <summary>
         /// Property name of user commit data that contains the index epoch. The epoch
@@ -87,7 +87,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
 
         private readonly Directory dir;
         private readonly IndexWriter indexWriter;
-        private readonly TaxonomyWriterCache cache;
+        private readonly ITaxonomyWriterCache cache;
         private readonly AtomicInteger cacheMisses = new AtomicInteger(0);
 
         // Records the taxonomy index epoch, updated on replaceTaxonomy as well.
@@ -175,7 +175,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
         /// <exception cref="IOException">
         ///     if another error occurred. </exception>
         public DirectoryTaxonomyWriter(Directory directory, OpenMode openMode, 
-            TaxonomyWriterCache cache)
+            ITaxonomyWriterCache cache)
         {
             dir = directory;
             IndexWriterConfig config = CreateIndexWriterConfig(openMode);
@@ -332,7 +332,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
         /// with the parameters (1024, 0.15f, 3), i.e., the entire taxonomy is
         /// cached in memory while building it.
         /// </summary>
-        public static TaxonomyWriterCache DefaultTaxonomyWriterCache()
+        public static ITaxonomyWriterCache DefaultTaxonomyWriterCache()
         {
             return new Cl2oTaxonomyWriterCache(1024, 0.15f, 3);
         }
@@ -897,14 +897,14 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
         /// <seealso cref="OrdinalMap"/> with a mapping from the original ordinal to the new
         /// ordinal.
         /// </summary>
-        public virtual void AddTaxonomy(Directory taxoDir, OrdinalMap map)
+        public virtual void AddTaxonomy(Directory taxoDir, IOrdinalMap map)
         {
             EnsureOpen();
             DirectoryReader r = DirectoryReader.Open(taxoDir);
             try
             {
                 int size = r.NumDocs;
-                OrdinalMap ordinalMap = map;
+                IOrdinalMap ordinalMap = map;
                 ordinalMap.Size = size;
                 int @base = 0;
                 TermsEnum te = null;
@@ -936,11 +936,11 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
         /// wit separate taxonomies.
         /// <para> 
         /// addToTaxonomies() merges one or more taxonomies into the given taxonomy
-        /// (this). An OrdinalMap is filled for each of the added taxonomies,
+        /// (this). An IOrdinalMap is filled for each of the added taxonomies,
         /// containing the new ordinal (in the merged taxonomy) of each of the
         /// categories in the old taxonomy.
         /// <P>  
-        /// There exist two implementations of OrdinalMap: MemoryOrdinalMap and
+        /// There exist two implementations of IOrdinalMap: MemoryOrdinalMap and
         /// DiskOrdinalMap. As their names suggest, the former keeps the map in
         /// memory and the latter in a temporary disk file. Because these maps will
         /// later be needed one by one (to remap the counting lists), not all at the
@@ -949,7 +949,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
         /// by one, when needed).
         /// </para>
         /// </summary>
-        public interface OrdinalMap
+        public interface IOrdinalMap
         {
             /// <summary>
             /// Set the size of the map. This MUST be called before addMapping().
@@ -983,7 +983,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
         /// <summary>
         /// <seealso cref="OrdinalMap"/> maintained in memory
         /// </summary>
-        public sealed class MemoryOrdinalMap : OrdinalMap
+        public sealed class MemoryOrdinalMap : IOrdinalMap
         {
             internal int[] map;
 
@@ -1027,7 +1027,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
         /// <summary>
         /// <seealso cref="OrdinalMap"/> maintained on file system
         /// </summary>
-        public sealed class DiskOrdinalMap : OrdinalMap
+        public sealed class DiskOrdinalMap : IOrdinalMap
         {
             internal string tmpfile;
             internal OutputStreamDataOutput @out;

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/2e5bae05/src/Lucene.Net.Facet/Taxonomy/TaxonomyWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/TaxonomyWriter.cs b/src/Lucene.Net.Facet/Taxonomy/TaxonomyWriter.cs
index 0487cbf..6358dc1 100644
--- a/src/Lucene.Net.Facet/Taxonomy/TaxonomyWriter.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/TaxonomyWriter.cs
@@ -23,7 +23,7 @@ namespace Lucene.Net.Facet.Taxonomy
     using TwoPhaseCommit = Lucene.Net.Index.TwoPhaseCommit;
 
     /// <summary>
-    /// TaxonomyWriter is the interface which the faceted-search library uses
+    /// ITaxonomyWriter is the interface which the faceted-search library uses
     /// to dynamically build the taxonomy at indexing time.
     /// <P>
     /// Notes about concurrent access to the taxonomy:
@@ -51,7 +51,7 @@ namespace Lucene.Net.Facet.Taxonomy
     /// 
     /// @lucene.experimental
     /// </summary>
-    public interface TaxonomyWriter : IDisposable, TwoPhaseCommit
+    public interface ITaxonomyWriter : IDisposable, TwoPhaseCommit
     {
         /// <summary>
         /// addCategory() adds a category with a given path name to the taxonomy,

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/2e5bae05/src/Lucene.Net.Facet/Taxonomy/WriterCache/Cl2oTaxonomyWriterCache.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/Cl2oTaxonomyWriterCache.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/Cl2oTaxonomyWriterCache.cs
index 8e6ca98..9a40f16 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/Cl2oTaxonomyWriterCache.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/Cl2oTaxonomyWriterCache.cs
@@ -27,7 +27,7 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
     /// 
     /// @lucene.experimental
     /// </summary>
-    public class Cl2oTaxonomyWriterCache : TaxonomyWriterCache
+    public class Cl2oTaxonomyWriterCache : ITaxonomyWriterCache
     {
         private const int LockTimeOut = 1000;
         private readonly ReaderWriterLock @lock = new ReaderWriterLock();

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/2e5bae05/src/Lucene.Net.Facet/Taxonomy/WriterCache/LruTaxonomyWriterCache.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/LruTaxonomyWriterCache.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/LruTaxonomyWriterCache.cs
index d74891b..8727893 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/LruTaxonomyWriterCache.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/LruTaxonomyWriterCache.cs
@@ -22,7 +22,7 @@
     /// 
     /// @lucene.experimental
     /// </summary>
-    public class LruTaxonomyWriterCache : TaxonomyWriterCache
+    public class LruTaxonomyWriterCache : ITaxonomyWriterCache
     {
         /// <summary>
         /// Determines cache type.

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/2e5bae05/src/Lucene.Net.Facet/Taxonomy/WriterCache/TaxonomyWriterCache.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/TaxonomyWriterCache.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/TaxonomyWriterCache.cs
index b0d32b9..bbe82ec 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/TaxonomyWriterCache.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/TaxonomyWriterCache.cs
@@ -20,8 +20,8 @@
     using DirectoryTaxonomyWriter = Lucene.Net.Facet.Taxonomy.Directory.DirectoryTaxonomyWriter;
 
     /// <summary>
-    /// TaxonomyWriterCache is a relatively simple interface for a cache of
-    /// category->ordinal mappings, used in TaxonomyWriter implementations (such as
+    /// ITaxonomyWriterCache is a relatively simple interface for a cache of
+    /// category->ordinal mappings, used in ITaxonomyWriter implementations (such as
     /// <seealso cref="DirectoryTaxonomyWriter"/>).
     /// <para>
     /// It basically has put() methods for adding a mapping, and get() for looking a
@@ -46,7 +46,7 @@
     /// @lucene.experimental
     /// </para>
     /// </summary>
-    public interface TaxonomyWriterCache
+    public interface ITaxonomyWriterCache
     {
         /// <summary>
         /// Let go of whatever resources the cache is holding. After a close(),

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/2e5bae05/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestAddTaxonomy.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestAddTaxonomy.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestAddTaxonomy.cs
index df4f491..9482407 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestAddTaxonomy.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestAddTaxonomy.cs
@@ -13,7 +13,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
 
     using DiskOrdinalMap = Lucene.Net.Facet.Taxonomy.Directory.DirectoryTaxonomyWriter.DiskOrdinalMap;
     using MemoryOrdinalMap = Lucene.Net.Facet.Taxonomy.Directory.DirectoryTaxonomyWriter.MemoryOrdinalMap;
-    using OrdinalMap = Lucene.Net.Facet.Taxonomy.Directory.DirectoryTaxonomyWriter.OrdinalMap;
+    using IOrdinalMap = Lucene.Net.Facet.Taxonomy.Directory.DirectoryTaxonomyWriter.IOrdinalMap;
     using Directory = Lucene.Net.Store.Directory;
     using IOUtils = Lucene.Net.Util.IOUtils;
     using TestUtil = Lucene.Net.Util.TestUtil;
@@ -64,7 +64,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
             }
 
             var tw1 = new DirectoryTaxonomyWriter(dirs[0]);
-            OrdinalMap map = randomOrdinalMap();
+            IOrdinalMap map = randomOrdinalMap();
             tw1.AddTaxonomy(dirs[1], map);
             tw1.Dispose();
 
@@ -108,7 +108,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
         }
 
 
-        private OrdinalMap randomOrdinalMap()
+        private IOrdinalMap randomOrdinalMap()
         {
             if (Random().NextBoolean())
             {
@@ -120,7 +120,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
             }
         }
 
-        private void validate(Directory dest, Directory src, OrdinalMap ordMap)
+        private void validate(Directory dest, Directory src, IOrdinalMap ordMap)
         {
             var destTr = new DirectoryTaxonomyReader(dest);
             try
@@ -168,7 +168,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
             Directory src = NewDirectory();
             (new DirectoryTaxonomyWriter(src)).Dispose(); // create an empty taxonomy
 
-            OrdinalMap map = randomOrdinalMap();
+            IOrdinalMap map = randomOrdinalMap();
             destTW.AddTaxonomy(src, map);
             destTW.Dispose();
 
@@ -189,7 +189,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
             srcTW.Dispose();
 
             DirectoryTaxonomyWriter destTW = new DirectoryTaxonomyWriter(dest);
-            OrdinalMap map = randomOrdinalMap();
+            IOrdinalMap map = randomOrdinalMap();
             destTW.AddTaxonomy(src, map);
             destTW.Dispose();
 
@@ -234,7 +234,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
             tw2.AddCategory(new FacetLabel("Aardvarks", "Bob"));
             tw2.Dispose();
 
-            OrdinalMap map = randomOrdinalMap();
+            IOrdinalMap map = randomOrdinalMap();
 
             tw1.AddTaxonomy(src, map);
             tw1.Dispose();
@@ -266,7 +266,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
             ThreadClass t = new ThreadAnonymousInnerClassHelper2(this, numCategories, destTw);
             t.Start();
 
-            OrdinalMap map = new MemoryOrdinalMap();
+            IOrdinalMap map = new MemoryOrdinalMap();
             destTw.AddTaxonomy(src, map);
             t.Join();
             destTw.Dispose();

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/2e5bae05/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestConcurrentFacetedIndexing.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestConcurrentFacetedIndexing.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestConcurrentFacetedIndexing.cs
index 5ee17a6..f2c0fe7 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestConcurrentFacetedIndexing.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestConcurrentFacetedIndexing.cs
@@ -10,7 +10,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
 
 
     using Document = Lucene.Net.Documents.Document;
-    using TaxonomyWriterCache = Lucene.Net.Facet.Taxonomy.WriterCache.TaxonomyWriterCache;
+    using ITaxonomyWriterCache = Lucene.Net.Facet.Taxonomy.WriterCache.ITaxonomyWriterCache;
     using Cl2oTaxonomyWriterCache = Lucene.Net.Facet.Taxonomy.WriterCache.Cl2oTaxonomyWriterCache;
     using LruTaxonomyWriterCache = Lucene.Net.Facet.Taxonomy.WriterCache.LruTaxonomyWriterCache;
     using IndexWriter = Lucene.Net.Index.IndexWriter;
@@ -38,11 +38,11 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
     public class TestConcurrentFacetedIndexing : FacetTestCase
     {
 
-        // A No-Op TaxonomyWriterCache which always discards all given categories, and
+        // A No-Op ITaxonomyWriterCache which always discards all given categories, and
         // always returns true in put(), to indicate some cache entries were cleared.
-        private static TaxonomyWriterCache NO_OP_CACHE = new TaxonomyWriterCacheAnonymousInnerClassHelper();
+        private static ITaxonomyWriterCache NO_OP_CACHE = new TaxonomyWriterCacheAnonymousInnerClassHelper();
 
-        private class TaxonomyWriterCacheAnonymousInnerClassHelper : TaxonomyWriterCache
+        private class TaxonomyWriterCacheAnonymousInnerClassHelper : ITaxonomyWriterCache
         {
             public TaxonomyWriterCacheAnonymousInnerClassHelper()
             {
@@ -82,7 +82,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
             return new FacetField(l1, l2, l3);
         }
 
-        internal static TaxonomyWriterCache NewTaxoWriterCache(int ndocs)
+        internal static ITaxonomyWriterCache NewTaxoWriterCache(int ndocs)
         {
             double d = Random().NextDouble();
             if (d < 0.7)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/2e5bae05/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestDirectoryTaxonomyReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestDirectoryTaxonomyReader.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestDirectoryTaxonomyReader.cs
index 74ba8fe..d0801dd 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestDirectoryTaxonomyReader.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestDirectoryTaxonomyReader.cs
@@ -146,7 +146,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
         private void doTestReadRecreatedTaxonomy(Random random, bool closeReader)
         {
             Directory dir = null;
-            TaxonomyWriter tw = null;
+            ITaxonomyWriter tw = null;
             TaxonomyReader tr = null;
 
             // prepare a few categories

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/2e5bae05/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestDirectoryTaxonomyWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestDirectoryTaxonomyWriter.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestDirectoryTaxonomyWriter.cs
index 60e90c2..8924f56 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestDirectoryTaxonomyWriter.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestDirectoryTaxonomyWriter.cs
@@ -13,7 +13,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
     using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
     using Document = Lucene.Net.Documents.Document;
     using MemoryOrdinalMap = Lucene.Net.Facet.Taxonomy.Directory.DirectoryTaxonomyWriter.MemoryOrdinalMap;
-    using TaxonomyWriterCache = Lucene.Net.Facet.Taxonomy.WriterCache.TaxonomyWriterCache;
+    using ITaxonomyWriterCache = Lucene.Net.Facet.Taxonomy.WriterCache.ITaxonomyWriterCache;
     using Cl2oTaxonomyWriterCache = Lucene.Net.Facet.Taxonomy.WriterCache.Cl2oTaxonomyWriterCache;
     using LruTaxonomyWriterCache = Lucene.Net.Facet.Taxonomy.WriterCache.LruTaxonomyWriterCache;
     using DirectoryReader = Lucene.Net.Index.DirectoryReader;
@@ -48,11 +48,11 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
     public class TestDirectoryTaxonomyWriter : FacetTestCase
     {
 
-        // A No-Op TaxonomyWriterCache which always discards all given categories, and
+        // A No-Op ITaxonomyWriterCache which always discards all given categories, and
         // always returns true in put(), to indicate some cache entries were cleared.
-        private static TaxonomyWriterCache NO_OP_CACHE = new TaxonomyWriterCacheAnonymousInnerClassHelper();
+        private static ITaxonomyWriterCache NO_OP_CACHE = new TaxonomyWriterCacheAnonymousInnerClassHelper();
 
-        private class TaxonomyWriterCacheAnonymousInnerClassHelper : TaxonomyWriterCache
+        private class TaxonomyWriterCacheAnonymousInnerClassHelper : ITaxonomyWriterCache
         {
             public virtual void Close()
             {
@@ -272,7 +272,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
             Directory dir = NewDirectory();
             var values = new ConcurrentDictionary<string, string>();
             double d = Random().NextDouble();
-            TaxonomyWriterCache cache;
+            ITaxonomyWriterCache cache;
             if (d < 0.7)
             {
                 // this is the fastest, yet most memory consuming

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/2e5bae05/src/Lucene.Net.Tests.Facet/Taxonomy/TestSearcherTaxonomyManager.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/TestSearcherTaxonomyManager.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/TestSearcherTaxonomyManager.cs
index b0a1277..0d02696 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/TestSearcherTaxonomyManager.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/TestSearcherTaxonomyManager.cs
@@ -47,12 +47,12 @@ namespace Lucene.Net.Facet.Taxonomy
 
             internal IndexWriter w;
             internal FacetsConfig config;
-            internal TaxonomyWriter tw;
+            internal ITaxonomyWriter tw;
             internal ReferenceManager<SearcherAndTaxonomy> mgr;
             internal int ordLimit;
             internal AtomicBoolean stop;
 
-            public IndexerThread(IndexWriter w, FacetsConfig config, TaxonomyWriter tw, ReferenceManager<SearcherAndTaxonomy> mgr, int ordLimit, AtomicBoolean stop)
+            public IndexerThread(IndexWriter w, FacetsConfig config, ITaxonomyWriter tw, ReferenceManager<SearcherAndTaxonomy> mgr, int ordLimit, AtomicBoolean stop)
             {
                 this.w = w;
                 this.config = config;

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/2e5bae05/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyCombined.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyCombined.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyCombined.cs
index caf1f09..9290ec2 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyCombined.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyCombined.cs
@@ -61,7 +61,7 @@ namespace Lucene.Net.Facet.Taxonomy
 	  };
 
         /// <summary>
-        ///  When adding the above categories with TaxonomyWriter.AddCategory(), 
+        ///  When adding the above categories with ITaxonomyWriter.AddCategory(), 
         ///  the following paths are expected to be returned:
         ///  (note that currently the full path is not returned, and therefore
         ///  not tested - rather, just the last component, the ordinal, is returned
@@ -99,7 +99,7 @@ namespace Lucene.Net.Facet.Taxonomy
         ///  index will surely have this method fail.
         /// </summary>
 
-        public static void FillTaxonomy(TaxonomyWriter tw)
+        public static void FillTaxonomy(ITaxonomyWriter tw)
         {
             for (int i = 0; i < categories.Length; i++)
             {
@@ -149,7 +149,7 @@ namespace Lucene.Net.Facet.Taxonomy
         }
 
         /// <summary>
-        ///  Basic tests for TaxonomyWriter. Basically, we test that
+        ///  Basic tests for ITaxonomyWriter. Basically, we test that
         ///  IndexWriter.AddCategory works, i.e. returns the expected ordinals
         ///  (this is tested by calling the fillTaxonomy() method above).
         ///  We do not test here that after writing the index can be read -
@@ -161,7 +161,7 @@ namespace Lucene.Net.Facet.Taxonomy
             var indexDir = NewDirectory();
             var tw = new DirectoryTaxonomyWriter(indexDir);
             FillTaxonomy(tw);
-            // Also check TaxonomyWriter.getSize() - see that the taxonomy's size
+            // Also check ITaxonomyWriter.getSize() - see that the taxonomy's size
             // is what we expect it to be.
             Assert.AreEqual(ExpectedCategories.Length, tw.Size);
             tw.Dispose();
@@ -445,7 +445,7 @@ namespace Lucene.Net.Facet.Taxonomy
         }
 
         /// <summary>
-        /// Tests for TaxonomyWriter's getParent() method. We check it by comparing
+        /// Tests for ITaxonomyWriter's getParent() method. We check it by comparing
         /// its results to those we could have gotten by looking at the category
         /// string paths using a TaxonomyReader (where the parentage is obvious).
         /// Note that after testReaderBasic(), we already know we can trust the
@@ -490,7 +490,7 @@ namespace Lucene.Net.Facet.Taxonomy
             indexDir.Dispose();
         }
 
-        private void CheckWriterParent(TaxonomyReader tr, TaxonomyWriter tw)
+        private void CheckWriterParent(TaxonomyReader tr, ITaxonomyWriter tw)
         {
             // check that the parent of the root ordinal is the invalid ordinal:
             Assert.AreEqual(TaxonomyReader.INVALID_ORDINAL, tw.GetParent(0));
@@ -1068,7 +1068,7 @@ namespace Lucene.Net.Facet.Taxonomy
         /// taxonomy index. Calling it after something else was already added to the
         /// taxonomy index will surely have this method fail.
         /// </summary>
-        public static void FillTaxonomyCheckPaths(TaxonomyWriter tw)
+        public static void FillTaxonomyCheckPaths(ITaxonomyWriter tw)
         {
             for (int i = 0; i < categories.Length; i++)
             {
@@ -1092,7 +1092,7 @@ namespace Lucene.Net.Facet.Taxonomy
 
         // After fillTaxonomy returned successfully, checkPaths() checks that
         // the getParent() calls return as expected, from the table
-        public static void CheckPaths(TaxonomyWriter tw)
+        public static void CheckPaths(ITaxonomyWriter tw)
         {
             for (int i = 0; i < categories.Length; i++)
             {
@@ -1112,7 +1112,7 @@ namespace Lucene.Net.Facet.Taxonomy
         }
 
         /// <summary>
-        /// Basic test for TaxonomyWriter.getParent(). This is similar to testWriter
+        /// Basic test for ITaxonomyWriter.getParent(). This is similar to testWriter
         /// above, except we also check the parents of the added categories, not just
         /// the categories themselves.
         /// </summary>
@@ -1122,7 +1122,7 @@ namespace Lucene.Net.Facet.Taxonomy
             var indexDir = NewDirectory();
             var tw = new DirectoryTaxonomyWriter(indexDir);
             FillTaxonomyCheckPaths(tw);
-            // Also check TaxonomyWriter.getSize() - see that the taxonomy's size
+            // Also check ITaxonomyWriter.getSize() - see that the taxonomy's size
             // is what we expect it to be.
             Assert.AreEqual(ExpectedCategories.Length, tw.Size);
             tw.Dispose();

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/2e5bae05/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetAssociations.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetAssociations.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetAssociations.cs
index b4bc17b..8cdd14d 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetAssociations.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetAssociations.cs
@@ -191,7 +191,7 @@ namespace Lucene.Net.Facet.Taxonomy
             Store.Directory dir = NewDirectory();
             Store.Directory taxoDir = NewDirectory();
 
-            TaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir);
+            ITaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir);
             FacetsConfig config = new FacetsConfig();
             RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
 
@@ -216,7 +216,7 @@ namespace Lucene.Net.Facet.Taxonomy
             Store.Directory dir = NewDirectory();
             Store.Directory taxoDir = NewDirectory();
 
-            TaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir);
+            ITaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir);
             FacetsConfig config = new FacetsConfig();
             config.SetHierarchical("a", true);
             RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
@@ -241,7 +241,7 @@ namespace Lucene.Net.Facet.Taxonomy
             Store.Directory dir = NewDirectory();
             Store.Directory taxoDir = NewDirectory();
 
-            TaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir);
+            ITaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir);
             FacetsConfig config = new FacetsConfig();
             config.SetRequireDimCount("a", true);
             RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/2e5bae05/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetCounts.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetCounts.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetCounts.cs
index 51d0794..263a1d2 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetCounts.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetCounts.cs
@@ -287,7 +287,7 @@ namespace Lucene.Net.Facet.Taxonomy
             Store.Directory taxoDir = NewDirectory();
             IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
             iwc.SetSimilarity(new PerFieldSimilarityWrapperAnonymousInnerClassHelper(this));
-            TaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir, IndexWriterConfig.OpenMode_e.CREATE);
+            ITaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir, IndexWriterConfig.OpenMode_e.CREATE);
             RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, iwc);
             FacetsConfig config = new FacetsConfig();
 
@@ -543,7 +543,7 @@ namespace Lucene.Net.Facet.Taxonomy
         {
             Store.Directory dir = NewDirectory();
             Store.Directory taxoDir = NewDirectory();
-            TaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir, IndexWriterConfig.OpenMode_e.CREATE);
+            ITaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir, IndexWriterConfig.OpenMode_e.CREATE);
             RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
             FacetsConfig config = new FacetsConfig();
 
@@ -686,7 +686,7 @@ namespace Lucene.Net.Facet.Taxonomy
             IOUtils.Close(taxoWriter, iw, taxoReader, taxoDir, r, indexDir);
         }
 
-        private void indexTwoDocs(TaxonomyWriter taxoWriter, IndexWriter indexWriter, FacetsConfig config, bool withContent)
+        private void indexTwoDocs(ITaxonomyWriter taxoWriter, IndexWriter indexWriter, FacetsConfig config, bool withContent)
         {
             for (int i = 0; i < 2; i++)
             {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/2e5bae05/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetCounts2.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetCounts2.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetCounts2.cs
index 6cc8314..6f4cb8b 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetCounts2.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetCounts2.cs
@@ -170,7 +170,7 @@ namespace Lucene.Net.Facet.Taxonomy
             indexWriter.Commit(); // flush a segment
         }
 
-        private static void IndexDocsWithFacetsNoTerms(IndexWriter indexWriter, TaxonomyWriter taxoWriter, IDictionary<string, int?> expectedCounts)
+        private static void IndexDocsWithFacetsNoTerms(IndexWriter indexWriter, ITaxonomyWriter taxoWriter, IDictionary<string, int?> expectedCounts)
         {
             Random random = Random();
             int numDocs = AtLeast(random, 2);
@@ -184,7 +184,7 @@ namespace Lucene.Net.Facet.Taxonomy
             indexWriter.Commit(); // flush a segment
         }
 
-        private static void IndexDocsWithFacetsAndTerms(IndexWriter indexWriter, TaxonomyWriter taxoWriter, IDictionary<string, int?> expectedCounts)
+        private static void IndexDocsWithFacetsAndTerms(IndexWriter indexWriter, ITaxonomyWriter taxoWriter, IDictionary<string, int?> expectedCounts)
         {
             Random random = Random();
             int numDocs = AtLeast(random, 2);
@@ -199,7 +199,7 @@ namespace Lucene.Net.Facet.Taxonomy
             indexWriter.Commit(); // flush a segment
         }
 
-        private static void IndexDocsWithFacetsAndSomeTerms(IndexWriter indexWriter, TaxonomyWriter taxoWriter, IDictionary<string, int?> expectedCounts)
+        private static void IndexDocsWithFacetsAndSomeTerms(IndexWriter indexWriter, ITaxonomyWriter taxoWriter, IDictionary<string, int?> expectedCounts)
         {
             Random random = Random();
             int numDocs = AtLeast(random, 2);
@@ -260,7 +260,7 @@ namespace Lucene.Net.Facet.Taxonomy
             IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
             //conf.MergePolicy = NoMergePolicy.INSTANCE; // prevent merges, so we can control the index segments
             IndexWriter indexWriter = new IndexWriter(indexDir, conf);
-            TaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir);
+            ITaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir);
 
             allExpectedCounts = newCounts();
             termExpectedCounts = newCounts();

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/2e5bae05/src/Lucene.Net.Tests.Facet/TestDrillDownQuery.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Facet/TestDrillDownQuery.cs b/src/Lucene.Net.Tests.Facet/TestDrillDownQuery.cs
index 3ceb005..01f7fe5 100644
--- a/src/Lucene.Net.Tests.Facet/TestDrillDownQuery.cs
+++ b/src/Lucene.Net.Tests.Facet/TestDrillDownQuery.cs
@@ -29,7 +29,7 @@ namespace Lucene.Net.Facet
     using Document = Lucene.Net.Documents.Document;
     using Field = Lucene.Net.Documents.Field;
     using TextField = Lucene.Net.Documents.TextField;
-    using TaxonomyWriter = Lucene.Net.Facet.Taxonomy.TaxonomyWriter;
+    using ITaxonomyWriter = Lucene.Net.Facet.Taxonomy.ITaxonomyWriter;
     using DirectoryTaxonomyReader = Lucene.Net.Facet.Taxonomy.Directory.DirectoryTaxonomyReader;
     using DirectoryTaxonomyWriter = Lucene.Net.Facet.Taxonomy.Directory.DirectoryTaxonomyWriter;
     using IndexReader = Lucene.Net.Index.IndexReader;
@@ -75,7 +75,7 @@ namespace Lucene.Net.Facet
             RandomIndexWriter writer = new RandomIndexWriter(r, dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(r, MockTokenizer.KEYWORD, false)));
 
             taxoDir = NewDirectory();
-            TaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir);
+            ITaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir);
             config = new FacetsConfig();
 
             // Randomize the per-dim config:

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/2e5bae05/src/Lucene.Net.Tests.Facet/TestMultipleIndexFields.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Facet/TestMultipleIndexFields.cs b/src/Lucene.Net.Tests.Facet/TestMultipleIndexFields.cs
index 086e608..3d229fe 100644
--- a/src/Lucene.Net.Tests.Facet/TestMultipleIndexFields.cs
+++ b/src/Lucene.Net.Tests.Facet/TestMultipleIndexFields.cs
@@ -28,7 +28,7 @@ namespace Lucene.Net.Facet
     using Field = Lucene.Net.Documents.Field;
     using TextField = Lucene.Net.Documents.TextField;
     using TaxonomyReader = Lucene.Net.Facet.Taxonomy.TaxonomyReader;
-    using TaxonomyWriter = Lucene.Net.Facet.Taxonomy.TaxonomyWriter;
+    using ITaxonomyWriter = Lucene.Net.Facet.Taxonomy.ITaxonomyWriter;
     using DirectoryTaxonomyReader = Lucene.Net.Facet.Taxonomy.Directory.DirectoryTaxonomyReader;
     using DirectoryTaxonomyWriter = Lucene.Net.Facet.Taxonomy.Directory.DirectoryTaxonomyWriter;
     using AtomicReader = Lucene.Net.Index.AtomicReader;
@@ -234,7 +234,7 @@ namespace Lucene.Net.Facet
             // create and open an index writer
             RandomIndexWriter iw = new RandomIndexWriter(Random(), indexDir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false)));
             // create and open a taxonomy writer
-            TaxonomyWriter tw = new DirectoryTaxonomyWriter(taxoDir, OpenMode.CREATE);
+            ITaxonomyWriter tw = new DirectoryTaxonomyWriter(taxoDir, OpenMode.CREATE);
 
             FacetsConfig config = Config;
             config.SetIndexFieldName("Band", "$music");
@@ -286,7 +286,7 @@ namespace Lucene.Net.Facet
             return fc;
         }
 
-        private static void seedIndex(TaxonomyWriter tw, RandomIndexWriter iw, FacetsConfig config)
+        private static void seedIndex(ITaxonomyWriter tw, RandomIndexWriter iw, FacetsConfig config)
         {
             foreach (FacetField ff in CATEGORIES)
             {


[25/46] lucenenet git commit: Facet.Taxononmy.Directory.DirectoryTaxonomyWriter.IOrdinalMap: Renamed Size back to the original SetSize() because it changes the internal state of the object.

Posted by sy...@apache.org.
Facet.Taxononmy.Directory.DirectoryTaxonomyWriter.IOrdinalMap: Renamed Size back to the original SetSize() because it changes the internal state of the object.


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/0bc31302
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/0bc31302
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/0bc31302

Branch: refs/heads/master
Commit: 0bc313025481fbd698f0dbb773a8fa27ade18d0f
Parents: f50f913
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Sep 25 15:18:47 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Mon Oct 3 23:31:31 2016 +0700

----------------------------------------------------------------------
 .../Taxonomy/Directory/DirectoryTaxonomyWriter.cs  | 17 ++++++-----------
 1 file changed, 6 insertions(+), 11 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/0bc31302/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs
index e4f2d3b..d25cd1b 100644
--- a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs
@@ -905,7 +905,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
             {
                 int size = r.NumDocs;
                 IOrdinalMap ordinalMap = map;
-                ordinalMap.Size = size;
+                ordinalMap.SetSize(size);
                 int @base = 0;
                 TermsEnum te = null;
                 DocsEnum docs = null;
@@ -957,7 +957,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
             /// called exactly 'size' times, with different origOrdinals between 0
             /// and size-1.  
             /// </summary>
-            int Size { set; }
+            void SetSize(int taxonomySize);
 
             /// <summary>
             /// Record a mapping. </summary>
@@ -995,9 +995,9 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
                 map = new int[] { };
             }
 
-            public int Size
+            public void SetSize(int taxonomySize)
             {
-                set { map = new int[value]; }
+                map = new int[taxonomySize];
             }
 
             public void AddMapping(int origOrdinal, int newOrdinal)
@@ -1011,8 +1011,6 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
                     Array.Resize(ref map, origOrdinal + 1);
                     map[origOrdinal] = newOrdinal;
                 }
-
-
             }
 
             public void AddDone() // nothing to do
@@ -1051,12 +1049,9 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
                 @out.WriteInt(newOrdinal);
             }
 
-            public int Size
+            public void SetSize(int taxonomySize)
             {
-                set
-                {
-                    @out.WriteInt(value);
-                }
+                @out.WriteInt(taxonomySize);
             }
 
             public void AddDone()


[39/46] lucenenet git commit: Facet: Updated documentation

Posted by sy...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/44958102/src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs b/src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs
index db56dc2..4cf132d 100644
--- a/src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs
@@ -28,32 +28,37 @@ namespace Lucene.Net.Facet.Taxonomy
     /// <summary>
     /// TaxonomyReader is the read-only interface with which the faceted-search
     /// library uses the taxonomy during search time.
-    /// <P>
+    /// <para>
     /// A TaxonomyReader holds a list of categories. Each category has a serial
     /// number which we call an "ordinal", and a hierarchical "path" name:
-    /// <UL>
-    /// <LI>
+    /// <list type="bullet">
+    /// <item>
     /// The ordinal is an integer that starts at 0 for the first category (which is
     /// always the root category), and grows contiguously as more categories are
     /// added; Note that once a category is added, it can never be deleted.
-    /// <LI>
+    /// </item>
+    /// <item>
     /// The path is a CategoryPath object specifying the category's position in the
     /// hierarchy.
-    /// </UL>
-    /// <B>Notes about concurrent access to the taxonomy:</B>
-    /// <P>
+    /// </item>
+    /// </list>
+    /// </para>
+    /// <b>Notes about concurrent access to the taxonomy:</b>
+    /// <para>
     /// An implementation must allow multiple readers to be active concurrently
     /// with a single writer. Readers follow so-called "point in time" semantics,
     /// i.e., a TaxonomyReader object will only see taxonomy entries which were
     /// available at the time it was created. What the writer writes is only
-    /// available to (new) readers after the writer's commit() is called.
-    /// <P>
+    /// available to (new) readers after the writer's <see cref="Index.IndexWriter.Commit()"/> is called.
+    /// </para>
+    /// <para>
     /// In faceted search, two separate indices are used: the main Lucene index,
     /// and the taxonomy. Because the main index refers to the categories listed
     /// in the taxonomy, it is important to open the taxonomy *after* opening the
-    /// main index, and it is also necessary to reopen() the taxonomy after
-    /// reopen()ing the main index.
-    /// <P>
+    /// main index, and it is also necessary to Reopen() the taxonomy after
+    /// Reopen()ing the main index.
+    /// </para>
+    /// <para>
     /// This order is important, otherwise it would be possible for the main index
     /// to refer to a category which is not yet visible in the old snapshot of
     /// the taxonomy. Note that it is indeed fine for the the taxonomy to be opened
@@ -61,13 +66,15 @@ namespace Lucene.Net.Facet.Taxonomy
     /// a category is added to the taxonomy, it can never be changed or deleted,
     /// so there is no danger that a "too new" taxonomy not being consistent with
     /// an older index.
+    /// </para>
     /// 
     /// @lucene.experimental
     /// </summary>
     public abstract class TaxonomyReader
     {
         /// <summary>
-        /// An iterator over a category's children. </summary>
+        /// An iterator over a category's children.
+        /// </summary>
         public class ChildrenIterator
         {
             private readonly int[] siblings;
@@ -80,7 +87,7 @@ namespace Lucene.Net.Facet.Taxonomy
             }
 
             /// <summary>
-            /// Return the next child ordinal, or <seealso cref="TaxonomyReader#INVALID_ORDINAL"/>
+            /// Return the next child ordinal, or <see cref="TaxonomyReader.INVALID_ORDINAL"/>
             /// if no more children.
             /// </summary>
             public virtual int Next()
@@ -95,35 +102,36 @@ namespace Lucene.Net.Facet.Taxonomy
         }
 
         /// <summary>
-        /// Sole constructor. </summary>
+        /// Sole constructor.
+        /// </summary>
         public TaxonomyReader()
         {
         }
 
         /// <summary>
         /// The root category (the category with the empty path) always has the ordinal
-        /// 0, to which we give a name ROOT_ORDINAL. <seealso cref="#getOrdinal(FacetLabel)"/>
-        /// of an empty path will always return {@code ROOT_ORDINAL}, and
-        /// <seealso cref="#getPath(int)"/> with {@code ROOT_ORDINAL} will return the empty path.
+        /// 0, to which we give a name ROOT_ORDINAL. <see cref="GetOrdinal(FacetLabel)"/>
+        /// of an empty path will always return <see cref="ROOT_ORDINAL"/>, and
+        /// <see cref="GetPath(int)"/> with <see cref="ROOT_ORDINAL"/> will return the empty path.
         /// </summary>
         public const int ROOT_ORDINAL = 0;
 
         /// <summary>
         /// Ordinals are always non-negative, so a negative ordinal can be used to
-        /// signify an error. Methods here return INVALID_ORDINAL (-1) in this case.
+        /// signify an error. Methods here return <see cref="INVALID_ORDINAL"/> (-1) in this case.
         /// </summary>
         public const int INVALID_ORDINAL = -1;
 
         /// <summary>
         /// If the taxonomy has changed since the provided reader was opened, open and
-        /// return a new <seealso cref="TaxonomyReader"/>; else, return {@code null}. The new
-        /// reader, if not {@code null}, will be the same type of reader as the one
+        /// return a new <see cref="TaxonomyReader"/>; else, return <c>null</c>. The new
+        /// reader, if not <c>null</c>, will be the same type of reader as the one
         /// given to this method.
         /// 
         /// <para>
         /// This method is typically far less costly than opening a fully new
-        /// <seealso cref="TaxonomyReader"/> as it shares resources with the provided
-        /// <seealso cref="TaxonomyReader"/>, when possible.
+        /// <see cref="TaxonomyReader"/> as it shares resources with the provided
+        /// <see cref="TaxonomyReader"/>, when possible.
         /// </para>
         /// </summary>
         public static T OpenIfChanged<T>(T oldTaxoReader) where T : TaxonomyReader
@@ -145,14 +153,14 @@ namespace Lucene.Net.Facet.Taxonomy
         protected internal abstract void DoClose();
 
         /// <summary>
-        /// Implements the actual opening of a new <seealso cref="TaxonomyReader"/> instance if
+        /// Implements the actual opening of a new <see cref="TaxonomyReader"/> instance if
         /// the taxonomy has changed.
         /// </summary>
-        /// <seealso cref= #openIfChanged(TaxonomyReader) </seealso>
+        /// <see cref= #openIfChanged(TaxonomyReader) </seealso>
         protected abstract TaxonomyReader DoOpenIfChanged();
 
         /// <summary>
-        /// Throws <seealso cref="AlreadyClosedException"/> if this IndexReader is closed
+        /// Throws <see cref="AlreadyClosedException"/> if this <see cref="IndexReader"/> is disposed
         /// </summary>
         protected void EnsureOpen()
         {
@@ -211,13 +219,14 @@ namespace Lucene.Net.Facet.Taxonomy
         }
 
         /// <summary>
-        /// Returns a <seealso cref="ParallelTaxonomyArrays"/> object which can be used to
+        /// Returns a <see cref="ParallelTaxonomyArrays"/> object which can be used to
         /// efficiently traverse the taxonomy tree.
         /// </summary>
         public abstract ParallelTaxonomyArrays ParallelTaxonomyArrays { get; }
 
         /// <summary>
-        /// Returns an iterator over the children of the given ordinal. </summary>
+        /// Returns an iterator over the children of the given ordinal.
+        /// </summary>
         public virtual ChildrenIterator GetChildren(int ordinal)
         {
             ParallelTaxonomyArrays arrays = ParallelTaxonomyArrays;
@@ -228,7 +237,7 @@ namespace Lucene.Net.Facet.Taxonomy
         /// <summary>
         /// Retrieve user committed data.
         /// </summary>
-        /// <seealso cref= TaxonomyWriter#setCommitData(Map) </seealso>
+        /// <seealso cref="ITaxonomyWriter.CommitData"/>
         public abstract IDictionary<string, string> CommitUserData { get; }
 
         /// <summary>
@@ -237,12 +246,13 @@ namespace Lucene.Net.Facet.Taxonomy
         /// categories are added (note that once a category is added, it can never be
         /// deleted).
         /// </summary>
-        /// <returns> the category's ordinal or <seealso cref="#INVALID_ORDINAL"/> if the category
-        ///         wasn't foun. </returns>
+        /// <returns> the category's ordinal or <see cref="INVALID_ORDINAL"/> if the category
+        ///         wasn't found. </returns>
         public abstract int GetOrdinal(FacetLabel categoryPath);
 
         /// <summary>
-        /// Returns ordinal for the dim + path. </summary>
+        /// Returns ordinal for the dim + path.
+        /// </summary>
         public virtual int GetOrdinal(string dim, string[] path)
         {
             string[] fullPath = new string[path.Length + 1];
@@ -252,11 +262,13 @@ namespace Lucene.Net.Facet.Taxonomy
         }
 
         /// <summary>
-        /// Returns the path name of the category with the given ordinal. </summary>
+        /// Returns the path name of the category with the given ordinal.
+        /// </summary>
         public abstract FacetLabel GetPath(int ordinal);
 
         /// <summary>
-        /// Returns the current refCount for this taxonomy reader. </summary>
+        /// Returns the current refCount for this taxonomy reader.
+        /// </summary>
         public int RefCount
         {
             get
@@ -278,8 +290,8 @@ namespace Lucene.Net.Facet.Taxonomy
         /// Expert: increments the refCount of this TaxonomyReader instance. RefCounts
         /// can be used to determine when a taxonomy reader can be closed safely, i.e.
         /// as soon as there are no more references. Be sure to always call a
-        /// corresponding decRef(), in a finally clause; otherwise the reader may never
-        /// be closed.
+        /// corresponding <see cref="DecRef"/>, in a finally clause; otherwise the reader may never
+        /// be disposed.
         /// </summary>
         public void IncRef()
         {
@@ -289,8 +301,8 @@ namespace Lucene.Net.Facet.Taxonomy
 
         /// <summary>
         /// Expert: increments the refCount of this TaxonomyReader
-        ///  instance only if it has not been closed yet.  Returns
-        ///  true on success. 
+        /// instance only if it has not been closed yet.  Returns
+        /// <c>true</c> on success. 
         /// </summary>
         public bool TryIncRef()
         {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/44958102/src/Lucene.Net.Facet/Taxonomy/TaxonomyWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/TaxonomyWriter.cs b/src/Lucene.Net.Facet/Taxonomy/TaxonomyWriter.cs
index fdec22f..94f62a1 100644
--- a/src/Lucene.Net.Facet/Taxonomy/TaxonomyWriter.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/TaxonomyWriter.cs
@@ -23,17 +23,19 @@ namespace Lucene.Net.Facet.Taxonomy
     using TwoPhaseCommit = Lucene.Net.Index.TwoPhaseCommit;
 
     /// <summary>
-    /// ITaxonomyWriter is the interface which the faceted-search library uses
+    /// <see cref="ITaxonomyWriter"/> is the interface which the faceted-search library uses
     /// to dynamically build the taxonomy at indexing time.
-    /// <P>
+    /// <para>
     /// Notes about concurrent access to the taxonomy:
-    /// <P>
+    /// </para>
+    /// <para>
     /// An implementation must allow multiple readers and a single writer to be
     /// active concurrently. Readers follow so-called "point in time" semantics,
     /// i.e., a reader object will only see taxonomy entries which were available
     /// at the time it was created. What the writer writes is only available to
-    /// (new) readers after the writer's commit() is called.
-    /// <P>
+    /// (new) readers after the writer's <see cref="Index.IndexWriter.Commit"/> is called.
+    /// </para>
+    /// <para>
     /// Faceted search keeps two indices - namely Lucene's main index, and this
     /// taxonomy index. When one or more readers are active concurrently with the
     /// writer, care must be taken to avoid an inconsistency between the state of
@@ -41,24 +43,25 @@ namespace Lucene.Net.Facet.Taxonomy
     /// be committed to disk *before* the main index, because the main index
     /// refers to categories listed in the taxonomy.
     /// Such control can best be achieved by turning off the main index's
-    /// "autocommit" feature, and explicitly calling commit() for both indices
+    /// "autocommit" feature, and explicitly calling <see cref="Index.IndexWriter.Commit"/> for both indices
     /// (first for the taxonomy, then for the main index).
     /// In old versions of Lucene (2.2 or earlier), when autocommit could not be
     /// turned off, a more complicated solution needs to be used. E.g., use
     /// some sort of (possibly inter-process) locking to ensure that a reader
     /// is being opened only right after both indices have been flushed (and
     /// before anything else is written to them).
+    /// </para>
     /// 
     /// @lucene.experimental
     /// </summary>
     public interface ITaxonomyWriter : IDisposable, TwoPhaseCommit
     {
         /// <summary>
-        /// addCategory() adds a category with a given path name to the taxonomy,
+        /// <see cref="AddCategory"/> adds a category with a given path name to the taxonomy,
         /// and returns its ordinal. If the category was already present in
         /// the taxonomy, its existing ordinal is returned.
         /// <P>
-        /// Before adding a category, addCategory() makes sure that all its
+        /// Before adding a category, <see cref="AddCategory"/> makes sure that all its
         /// ancestor categories exist in the taxonomy as well. As result, the
         /// ordinal of a category is guaranteed to be smaller then the ordinal of
         /// any of its descendants. 
@@ -66,37 +69,42 @@ namespace Lucene.Net.Facet.Taxonomy
         int AddCategory(FacetLabel categoryPath);
 
         /// <summary>
-        /// getParent() returns the ordinal of the parent category of the category
+        /// <see cref="GetParent"/> returns the ordinal of the parent category of the category
         /// with the given ordinal.
-        /// <P>
+        /// <para>
         /// When a category is specified as a path name, finding the path of its
         /// parent is as trivial as dropping the last component of the path.
-        /// getParent() is functionally equivalent to calling getPath() on the
+        /// <see cref="GetParent"/> is functionally equivalent to calling <see cref="TaxonomyReader.GetPath"/> on the
         /// given ordinal, dropping the last component of the path, and then calling
-        /// getOrdinal() to get an ordinal back. 
-        /// <P>
-        /// If the given ordinal is the ROOT_ORDINAL, an INVALID_ORDINAL is returned.
-        /// If the given ordinal is a top-level category, the ROOT_ORDINAL is returned.
+        /// <see cref="TaxonomyReader.GetOrdinal"/> to get an ordinal back.
+        /// </para>
+        /// <para>
+        /// If the given ordinal is the <see cref="TaxonomyReader.ROOT_ORDINAL"/>, an 
+        /// <see cref="TaxonomyReader.INVALID_ORDINAL"/> is returned.
+        /// If the given ordinal is a top-level category, the 
+        /// <see cref="TaxonomyReader.ROOT_ORDINAL"/> is returned.
         /// If an invalid ordinal is given (negative or beyond the last available
-        /// ordinal), an ArrayIndexOutOfBoundsException is thrown. However, it is
-        /// expected that getParent will only be called for ordinals which are
+        /// ordinal), an <see cref="IndexOutOfRangeException"/> is thrown. However, it is
+        /// expected that <see cref="GetParent"/> will only be called for ordinals which are
         /// already known to be in the taxonomy.
-        /// TODO (Facet): instead of a getParent(ordinal) method, consider having a
-        /// <P>
-        /// getCategory(categorypath, prefixlen) which is similar to addCategory
+        /// </para>
+        /// <para>
+        /// TODO (Facet): instead of a <see cref="GetParent(int)">GetParent(ordinal)</see> method, consider having a
+        /// GetCategory(categorypath, prefixlen) which is similar to <see cref="AddCategory"/>
         /// except it doesn't add new categories; This method can be used to get
         /// the ordinals of all prefixes of the given category, and it can use
-        /// exactly the same code and cache used by addCategory() so it means less code.
+        /// exactly the same code and cache used by <see cref="AddCategory"/> so it means less code.
+        /// </para>
         /// </summary>
         int GetParent(int ordinal);
 
         /// <summary>
-        /// getSize() returns the number of categories in the taxonomy.
+        /// <see cref="Count"/> returns the number of categories in the taxonomy.
         /// <P>
         /// Because categories are numbered consecutively starting with 0, it
-        /// means the taxonomy contains ordinals 0 through getSize()-1.
+        /// means the taxonomy contains ordinals 0 through <see cref="Count"/>-1.
         /// <P>
-        /// Note that the number returned by getSize() is often slightly higher
+        /// Note that the number returned by <see cref="Count"/> is often slightly higher
         /// than the number of categories inserted into the taxonomy; This is
         /// because when a category is added to the taxonomy, its ancestors
         /// are also added automatically (including the root, which always get
@@ -106,7 +114,7 @@ namespace Lucene.Net.Facet.Taxonomy
 
         /// <summary>
         /// Sets the commit user data map. That method is considered a transaction and
-        /// will be <seealso cref="#commit() committed"/> even if no other changes were made to
+        /// will be <see cref="Index.IndexWriter.Commit">committed</see> even if no other changes were made to
         /// the writer instance.
         /// <para>
         /// <b>NOTE:</b> the map is cloned internally, therefore altering the map's

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/44958102/src/Lucene.Net.Facet/Taxonomy/WriterCache/CategoryPathUtils.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CategoryPathUtils.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CategoryPathUtils.cs
index 800fda0..c2714c6 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CategoryPathUtils.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CategoryPathUtils.cs
@@ -20,11 +20,13 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
      */
 
     /// <summary>
-    /// Utilities for use of <seealso cref="FacetLabel"/> by <seealso cref="CompactLabelToOrdinal"/>. </summary>
+    /// Utilities for use of <see cref="FacetLabel"/> by <see cref="CompactLabelToOrdinal"/>.
+    /// </summary>
     internal class CategoryPathUtils
     {
         /// <summary>
-        /// Serializes the given <seealso cref="FacetLabel"/> to the <seealso cref="CharBlockArray"/>. </summary>
+        /// Serializes the given <see cref="FacetLabel"/> to the <see cref="CharBlockArray"/>.
+        /// </summary>
         public static void Serialize(FacetLabel cp, CharBlockArray charBlockArray)
         {
             charBlockArray.Append((char)cp.Length);
@@ -41,7 +43,7 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
 
         /// <summary>
         /// Calculates a hash function of a path that was serialized with
-        /// <seealso cref="#serialize(FacetLabel, CharBlockArray)"/>.
+        /// <see cref="Serialize(FacetLabel, CharBlockArray)"/>.
         /// </summary>
         public static int HashCodeOfSerialized(CharBlockArray charBlockArray, int offset)
         {
@@ -62,8 +64,8 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
         }
 
         /// <summary>
-        /// Check whether the <seealso cref="FacetLabel"/> is equal to the one serialized in
-        /// <seealso cref="CharBlockArray"/>.
+        /// Check whether the <see cref="FacetLabel"/> is equal to the one serialized in
+        /// <see cref="CharBlockArray"/>.
         /// </summary>
         public static bool EqualsToSerialized(FacetLabel cp, CharBlockArray charBlockArray, int offset)
         {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/44958102/src/Lucene.Net.Facet/Taxonomy/WriterCache/CharBlockArray.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CharBlockArray.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CharBlockArray.cs
index 60a0726..a461da2 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CharBlockArray.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CharBlockArray.cs
@@ -24,7 +24,7 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
      */
 
     /// <summary>
-    /// Similar to <seealso cref="StringBuilder"/>, but with a more efficient growing strategy.
+    /// Similar to <see cref="StringBuilder"/>, but with a more efficient growing strategy.
     /// This class uses char array blocks to grow.
     /// 
     /// @lucene.experimental

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/44958102/src/Lucene.Net.Facet/Taxonomy/WriterCache/Cl2oTaxonomyWriterCache.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/Cl2oTaxonomyWriterCache.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/Cl2oTaxonomyWriterCache.cs
index 6cbff1f..a6f6ff8 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/Cl2oTaxonomyWriterCache.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/Cl2oTaxonomyWriterCache.cs
@@ -20,9 +20,9 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
      */
 
     /// <summary>
-    /// <seealso cref="TaxonomyWriterCache"/> using <seealso cref="CompactLabelToOrdinal"/>. Although
+    /// <see cref="ITaxonomyWriterCache"/> using <see cref="CompactLabelToOrdinal"/>. Although
     /// called cache, it maintains in memory all the mappings from category to
-    /// ordinal, relying on that <seealso cref="CompactLabelToOrdinal"/> is an efficient
+    /// ordinal, relying on that <see cref="CompactLabelToOrdinal"/> is an efficient
     /// mapping for this purpose.
     /// 
     /// @lucene.experimental
@@ -37,7 +37,8 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
         private volatile CompactLabelToOrdinal cache;
 
         /// <summary>
-        /// Sole constructor. </summary>
+        /// Sole constructor.
+        /// </summary>
         public Cl2oTaxonomyWriterCache(int initialCapcity, float loadFactor, int numHashArrays)
         {
             this.cache = new CompactLabelToOrdinal(initialCapcity, loadFactor, numHashArrays);
@@ -106,7 +107,8 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
         }
 
         /// <summary>
-        /// Returns the number of bytes in memory used by this object. </summary>
+        /// Returns the number of bytes in memory used by this object.
+        /// </summary>
         public virtual int GetMemoryUsage()
         {
             return cache == null ? 0 : cache.GetMemoryUsage();

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/44958102/src/Lucene.Net.Facet/Taxonomy/WriterCache/CollisionMap.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CollisionMap.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CollisionMap.cs
index b199513..a03f1bb 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CollisionMap.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CollisionMap.cs
@@ -22,7 +22,7 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
      */
 
     /// <summary>
-    /// HashMap to store colliding labels. See <seealso cref="CompactLabelToOrdinal"/> for
+    /// HashMap to store colliding labels. See <see cref="CompactLabelToOrdinal"/> for
     /// details.
     /// 
     /// @lucene.experimental
@@ -75,7 +75,8 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
         }
 
         /// <summary>
-        /// How many mappings. </summary>
+        /// How many mappings.
+        /// </summary>
         public virtual int Count
         {
             get
@@ -125,9 +126,8 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
         }
 
         /// <summary>
-        /// Return the mapping, or {@link
-        ///  LabelToOrdinal#INVALID_ORDINAL} if the label isn't
-        ///  recognized. 
+        /// Return the mapping, or <see cref="LabelToOrdinal.INVALID_ORDINAL"/> 
+        /// if the label isn't recognized. 
         /// </summary>
         public virtual int Get(FacetLabel label, int hash)
         {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/44958102/src/Lucene.Net.Facet/Taxonomy/WriterCache/CompactLabelToOrdinal.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CompactLabelToOrdinal.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CompactLabelToOrdinal.cs
index 35d3ee5..da3a8fc 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CompactLabelToOrdinal.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CompactLabelToOrdinal.cs
@@ -22,21 +22,21 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
      */
 
     /// <summary>
-    /// This is a very efficient LabelToOrdinal implementation that uses a
-    /// CharBlockArray to store all labels and a configurable number of HashArrays to
+    /// This is a very efficient <see cref="LabelToOrdinal"/> implementation that uses a
+    /// <see cref="CharBlockArray"/> to store all labels and a configurable number of <see cref="HashArray"/>s to
     /// reference the labels.
     /// <para>
-    /// Since the HashArrays don't handle collisions, a <seealso cref="CollisionMap"/> is used
+    /// Since the <see cref="HashArray"/>s don't handle collisions, a <see cref="CollisionMap"/> is used
     /// to store the colliding labels.
     /// </para>
     /// <para>
     /// This data structure grows by adding a new HashArray whenever the number of
-    /// collisions in the <seealso cref="CollisionMap"/> exceeds {@code loadFactor} * 
-    /// <seealso cref="#getMaxOrdinal()"/>. Growing also includes reinserting all colliding
-    /// labels into the HashArrays to possibly reduce the number of collisions.
+    /// collisions in the <see cref="CollisionMap"/> exceeds <see cref="loadFactor"/> * 
+    /// <see cref="GetMaxOrdinal()"/>. Growing also includes reinserting all colliding
+    /// labels into the <see cref="HashArray"/>s to possibly reduce the number of collisions.
     /// 
-    /// For setting the {@code loadFactor} see 
-    /// <seealso cref="#CompactLabelToOrdinal(int, float, int)"/>. 
+    /// For setting the <see cref="loadFactor"/> see 
+    /// <see cref="CompactLabelToOrdinal(int, float, int)"/>. 
     /// 
     /// </para>
     /// <para>
@@ -66,7 +66,8 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
         private float loadFactor;
 
         /// <summary>
-        /// How many labels. </summary>
+        /// How many labels. 
+        /// </summary>
         public virtual int SizeOfMap
         {
             get
@@ -80,7 +81,8 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
         }
 
         /// <summary>
-        /// Sole constructor. </summary>
+        /// Sole constructor.
+        /// </summary>
         public CompactLabelToOrdinal(int initialCapacity, float loadFactor, int numHashArrays)
         {
             this.hashArrays = new HashArray[numHashArrays];
@@ -293,7 +295,8 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
         }
 
         /// <summary>
-        /// Returns index for hash code h. </summary>
+        /// Returns index for hash code h.
+        /// </summary>
         internal static int IndexFor(int h, int length)
         {
             return h & (length - 1);
@@ -389,7 +392,7 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
 
         /// <summary>
         /// Opens the file and reloads the CompactLabelToOrdinal. The file it expects
-        /// is generated from the <seealso cref="#flush(File)"/> command.
+        /// is generated from the <see cref="Flush(Stream)"/> command.
         /// </summary>
         internal static CompactLabelToOrdinal Open(FileInfo file, float loadFactor, int numHashArrays)
         {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/44958102/src/Lucene.Net.Facet/Taxonomy/WriterCache/LabelToOrdinal.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/LabelToOrdinal.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/LabelToOrdinal.cs
index c9b36d6..bcb53ad 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/LabelToOrdinal.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/LabelToOrdinal.cs
@@ -29,13 +29,14 @@
         protected internal int counter;
 
         /// <summary>
-        /// Returned by <seealso cref="#getOrdinal"/> when the label isn't
-        ///  recognized. 
+        /// Returned by <see cref="GetOrdinal"/> when the label isn't
+        /// recognized. 
         /// </summary>
         public const int INVALID_ORDINAL = -2;
 
         /// <summary>
-        /// Default constructor. </summary>
+        /// Default constructor.
+        /// </summary>
         public LabelToOrdinal()
         {
         }
@@ -62,14 +63,14 @@
 
         /// <summary>
         /// Adds a new label if its not yet in the table.
-        /// Throws an <seealso cref="IllegalArgumentException"/> if the same label with
+        /// Throws an <see cref="System.ArgumentException"/> if the same label with
         /// a different ordinal was previoulsy added to this table.
         /// </summary>
         public abstract void AddLabel(FacetLabel label, int ordinal);
 
         /// <summary>
         /// Returns the ordinal assigned to the given label, 
-        /// or <seealso cref="#INVALID_ORDINAL"/> if the label cannot be found in this table.
+        /// or <see cref="INVALID_ORDINAL"/> if the label cannot be found in this table.
         /// </summary>
         public abstract int GetOrdinal(FacetLabel label);
     }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/44958102/src/Lucene.Net.Facet/Taxonomy/WriterCache/LruTaxonomyWriterCache.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/LruTaxonomyWriterCache.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/LruTaxonomyWriterCache.cs
index 7152094..3f6c4dc 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/LruTaxonomyWriterCache.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/LruTaxonomyWriterCache.cs
@@ -18,7 +18,7 @@
      */
 
     /// <summary>
-    /// LRU <seealso cref="TaxonomyWriterCache"/> - good choice for huge taxonomies.
+    /// LRU <see cref="ITaxonomyWriterCache"/> - good choice for huge taxonomies.
     /// 
     /// @lucene.experimental
     /// </summary>
@@ -47,7 +47,8 @@
         private NameIntCacheLRU cache;
 
         /// <summary>
-        /// Creates this with <seealso cref="LRUType#LRU_HASHED"/> method. </summary>
+        /// Creates this with <see cref="LRUType.LRU_HASHED"/> method.
+        /// </summary>
         public LruTaxonomyWriterCache(int cacheSize)
             : this(cacheSize, LRUType.LRU_HASHED)
         {
@@ -60,7 +61,8 @@
         }
 
         /// <summary>
-        /// Creates this with the specified method. </summary>
+        /// Creates this with the specified method.
+        /// </summary>
         public LruTaxonomyWriterCache(int cacheSize, LRUType lruType)
         {
             // TODO (Facet): choose between NameHashIntCacheLRU and NameIntCacheLRU.

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/44958102/src/Lucene.Net.Facet/Taxonomy/WriterCache/NameIntCacheLRU.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/NameIntCacheLRU.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/NameIntCacheLRU.cs
index ca0011a..b7264a0 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/NameIntCacheLRU.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/NameIntCacheLRU.cs
@@ -26,8 +26,10 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
     /// 
     /// @lucene.experimental
     /// </summary>
-    // Note: Nothing in this class is synchronized. The caller is assumed to be
-    // synchronized so that no two methods of this class are called concurrently.
+    /// <remarks>
+    /// Note: Nothing in this class is synchronized. The caller is assumed to be
+    /// synchronized so that no two methods of this class are called concurrently.
+    /// </remarks>
     public class NameIntCacheLRU
     {
         private Dictionary<object, int?> cache;
@@ -42,7 +44,8 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
         }
 
         /// <summary>
-        /// Maximum number of cache entries before eviction. </summary>
+        /// Maximum number of cache entries before eviction.
+        /// </summary>
         public virtual int Capacity
         {
             get
@@ -52,7 +55,8 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
         }
 
         /// <summary>
-        /// Number of entries currently in the cache. </summary>
+        /// Number of entries currently in the cache.
+        /// </summary>
         public virtual int Count
         {
             get
@@ -91,7 +95,8 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
         }
 
         /// <summary>
-        /// Subclasses can override this to provide caching by e.g. hash of the string. </summary>
+        /// Subclasses can override this to provide caching by e.g. hash of the string.
+        /// </summary>
         internal virtual object Key(FacetLabel name)
         {
             return name;
@@ -140,7 +145,7 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
         /// If cache is full remove least recently used entries from cache. Return true
         /// if anything was removed, false otherwise.
         /// 
-        /// See comment in DirectoryTaxonomyWriter.addToCache(CategoryPath, int) for an
+        /// See comment in <see cref="Directory.DirectoryTaxonomyWriter.AddToCache(FacetLabel, int)"/> for an
         /// explanation why we clean 2/3rds of the cache, and not just one entry.
         /// </summary>
         internal virtual bool MakeRoomLRU()

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/44958102/src/Lucene.Net.Facet/Taxonomy/WriterCache/TaxonomyWriterCache.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/TaxonomyWriterCache.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/TaxonomyWriterCache.cs
index 1dc4d64..bb2eec8 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/TaxonomyWriterCache.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/TaxonomyWriterCache.cs
@@ -22,22 +22,22 @@
     /// <summary>
     /// ITaxonomyWriterCache is a relatively simple interface for a cache of
     /// category->ordinal mappings, used in ITaxonomyWriter implementations (such as
-    /// <seealso cref="DirectoryTaxonomyWriter"/>).
+    /// <see cref="DirectoryTaxonomyWriter"/>).
     /// <para>
-    /// It basically has put() methods for adding a mapping, and get() for looking a
-    /// mapping up the cache. The cache does <B>not</B> guarantee to hold everything
+    /// It basically has <see cref="Put"/> methods for adding a mapping, and <see cref="Get"/> for looking a
+    /// mapping up the cache. The cache does <b>not</b> guarantee to hold everything
     /// that has been put into it, and might in fact selectively delete some of the
-    /// mappings (e.g., the ones least recently used). This means that if get()
+    /// mappings (e.g., the ones least recently used). This means that if <see cref="Get"/>
     /// returns a negative response, it does not necessarily mean that the category
     /// doesn't exist - just that it is not in the cache. The caller can only infer
     /// that the category doesn't exist if it knows the cache to be complete (because
-    /// all the categories were loaded into the cache, and since then no put()
+    /// all the categories were loaded into the cache, and since then no <see cref="Put"/>
     /// returned true).
     /// </para>
     /// <para>
     /// However, if it does so, it should clear out large parts of the cache at once,
     /// because the user will typically need to work hard to recover from every cache
-    /// cleanup (see <seealso cref="#put(FacetLabel, int)"/>'s return value).
+    /// cleanup (see <see cref="Put(FacetLabel, int)"/>'s return value).
     /// </para>
     /// <para>
     /// <b>NOTE:</b> the cache may be accessed concurrently by multiple threads,
@@ -49,7 +49,7 @@
     public interface ITaxonomyWriterCache
     {
         /// <summary>
-        /// Let go of whatever resources the cache is holding. After a close(),
+        /// Let go of whatever resources the cache is holding. After a <see cref="Close()"/>,
         /// this object can no longer be used.
         /// </summary>
         void Close();
@@ -57,43 +57,47 @@
         /// <summary>
         /// Lookup a category in the cache, returning its ordinal, or a negative
         /// number if the category is not in the cache.
-        /// <P>
+        /// <para>
         /// It is up to the caller to remember what a negative response means:
-        /// If the caller knows the cache is <I>complete</I> (it was initially
-        /// fed with all the categories, and since then put() never returned true)
+        /// If the caller knows the cache is <i>complete</i> (it was initially
+        /// fed with all the categories, and since then <see cref="Put"/> never returned true)
         /// it means the category does not exist. Otherwise, the category might
         /// still exist, but just be missing from the cache.
+        /// </para>
         /// </summary>
         int Get(FacetLabel categoryPath);
 
         /// <summary>
         /// Add a category to the cache, with the given ordinal as the value.
-        /// <P>
+        /// <para>
         /// If the implementation keeps only a partial cache (e.g., an LRU cache)
         /// and finds that its cache is full, it should clear up part of the cache
-        /// and return <code>true</code>. Otherwise, it should return
-        /// <code>false</code>.
-        /// <P>
+        /// and return <c>true</c>. Otherwise, it should return
+        /// <c>false</c>.
+        /// </para>
+        /// <para>
         /// The reason why the caller needs to know if part of the cache was
         /// cleared is that in that case it will have to commit its on-disk index
         /// (so that all the latest category additions can be searched on disk, if
         /// we can't rely on the cache to contain them).
-        /// <P>
+        /// </para>
+        /// <para>
         /// Ordinals should be non-negative. Currently there is no defined way to
         /// specify that a cache should remember a category does NOT exist.
         /// It doesn't really matter, because normally the next thing we do after
         /// finding that a category does not exist is to add it.
+        /// </para>
         /// </summary>
         bool Put(FacetLabel categoryPath, int ordinal);
 
         /// <summary>
-        /// Returns true if the cache is full, such that the next <seealso cref="#put"/> will
+        /// Returns true if the cache is full, such that the next <see cref="Put"/> will
         /// evict entries from it, false otherwise.
         /// </summary>
         bool IsFull { get; }
 
         /// <summary>
-        /// Clears the content of the cache. Unlike <seealso cref="#close()"/>, the caller can
+        /// Clears the content of the cache. Unlike <see cref="Close()"/>, the caller can
         /// assume that the cache is still operable after this method returns.
         /// </summary>
         void Clear();

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/44958102/src/Lucene.Net.Facet/TopOrdAndFloatQueue.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/TopOrdAndFloatQueue.cs b/src/Lucene.Net.Facet/TopOrdAndFloatQueue.cs
index 9c81f31..756bd94 100644
--- a/src/Lucene.Net.Facet/TopOrdAndFloatQueue.cs
+++ b/src/Lucene.Net.Facet/TopOrdAndFloatQueue.cs
@@ -21,12 +21,13 @@ namespace Lucene.Net.Facet
 
     /// <summary>
     /// Keeps highest results, first by largest float value,
-    ///  then tie break by smallest ord. 
+    /// then tie break by smallest ord. 
     /// </summary>
     public class TopOrdAndFloatQueue : PriorityQueue<TopOrdAndFloatQueue.OrdAndValue>
     {
         /// <summary>
-        /// Holds a single entry. </summary>
+        /// Holds a single entry.
+        /// </summary>
         public sealed class OrdAndValue
         {
             /// <summary>
@@ -38,14 +39,16 @@ namespace Lucene.Net.Facet
             public float Value { get; set; }
 
             /// <summary>
-            /// Default constructor. </summary>
+            /// Default constructor.
+            /// </summary>
             public OrdAndValue()
             {
             }
         }
 
         /// <summary>
-        /// Sole constructor. </summary>
+        /// Sole constructor.
+        /// </summary>
         public TopOrdAndFloatQueue(int topN) : base(topN, false)
         {
         }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/44958102/src/Lucene.Net.Facet/TopOrdAndIntQueue.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/TopOrdAndIntQueue.cs b/src/Lucene.Net.Facet/TopOrdAndIntQueue.cs
index 7016e80..d9fbbfe 100644
--- a/src/Lucene.Net.Facet/TopOrdAndIntQueue.cs
+++ b/src/Lucene.Net.Facet/TopOrdAndIntQueue.cs
@@ -21,12 +21,13 @@ namespace Lucene.Net.Facet
 
     /// <summary>
     /// Keeps highest results, first by largest int value,
-    ///  then tie break by smallest ord. 
+    /// then tie break by smallest ord. 
     /// </summary>
     public class TopOrdAndIntQueue : PriorityQueue<TopOrdAndIntQueue.OrdAndValue>
     {
         /// <summary>
-        /// Holds a single entry. </summary>
+        /// Holds a single entry.
+        /// </summary>
         public sealed class OrdAndValue
         {
             /// <summary>
@@ -38,14 +39,16 @@ namespace Lucene.Net.Facet
             public int Value { get; set; }
 
             /// <summary>
-            /// Default constructor. </summary>
+            /// Default constructor.
+            /// </summary>
             public OrdAndValue()
             {
             }
         }
 
         /// <summary>
-        /// Sole constructor. </summary>
+        /// Sole constructor.
+        /// </summary>
         public TopOrdAndIntQueue(int topN)
             : base(topN, false)
         {


[33/46] lucenenet git commit: Renamed Facet.Taxonomy.TaxonomyReader.Size to Count (.NETified)

Posted by sy...@apache.org.
Renamed Facet.Taxonomy.TaxonomyReader.Size to Count (.NETified)


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/66e39325
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/66e39325
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/66e39325

Branch: refs/heads/master
Commit: 66e393255818d5475014af3cc9e869e6079dbfc9
Parents: fd13e8e
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Sep 25 16:11:50 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Mon Oct 3 23:31:53 2016 +0700

----------------------------------------------------------------------
 .../Directory/DirectoryTaxonomyReader.cs        |  2 +-
 .../Taxonomy/FloatTaxonomyFacets.cs             |  4 +-
 .../Taxonomy/IntTaxonomyFacets.cs               |  4 +-
 .../Taxonomy/PrintTaxonomyStats.cs              |  2 +-
 src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs |  3 +-
 .../Taxonomy/Directory/TestAddTaxonomy.cs       |  8 +--
 .../Directory/TestConcurrentFacetedIndexing.cs  |  2 +-
 .../Directory/TestDirectoryTaxonomyReader.cs    | 18 +++---
 .../Directory/TestDirectoryTaxonomyWriter.cs    |  2 +-
 .../Taxonomy/TestSearcherTaxonomyManager.cs     |  4 +-
 .../Taxonomy/TestTaxonomyCombined.cs            | 68 ++++++++++----------
 11 files changed, 58 insertions(+), 59 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/66e39325/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyReader.cs b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyReader.cs
index e540a00..98bd09e 100644
--- a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyReader.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyReader.cs
@@ -369,7 +369,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
             return res;
         }
 
-        public override int Size
+        public override int Count
         {
             get
             {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/66e39325/src/Lucene.Net.Facet/Taxonomy/FloatTaxonomyFacets.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/FloatTaxonomyFacets.cs b/src/Lucene.Net.Facet/Taxonomy/FloatTaxonomyFacets.cs
index f2e79a5..27c8eff 100644
--- a/src/Lucene.Net.Facet/Taxonomy/FloatTaxonomyFacets.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/FloatTaxonomyFacets.cs
@@ -38,7 +38,7 @@ namespace Lucene.Net.Facet.Taxonomy
         protected internal FloatTaxonomyFacets(string indexFieldName, TaxonomyReader taxoReader, FacetsConfig config)
             : base(indexFieldName, taxoReader, config)
         {
-            values = new float[taxoReader.Size];
+            values = new float[taxoReader.Count];
         }
 
         /// <summary>
@@ -112,7 +112,7 @@ namespace Lucene.Net.Facet.Taxonomy
                 return null;
             }
 
-            TopOrdAndFloatQueue q = new TopOrdAndFloatQueue(Math.Min(taxoReader.Size, topN));
+            TopOrdAndFloatQueue q = new TopOrdAndFloatQueue(Math.Min(taxoReader.Count, topN));
             float bottomValue = 0;
 
             int ord = children[dimOrd];

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/66e39325/src/Lucene.Net.Facet/Taxonomy/IntTaxonomyFacets.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/IntTaxonomyFacets.cs b/src/Lucene.Net.Facet/Taxonomy/IntTaxonomyFacets.cs
index c9a789b..2c4e533 100644
--- a/src/Lucene.Net.Facet/Taxonomy/IntTaxonomyFacets.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/IntTaxonomyFacets.cs
@@ -38,7 +38,7 @@ namespace Lucene.Net.Facet.Taxonomy
         protected internal IntTaxonomyFacets(string indexFieldName, TaxonomyReader taxoReader, FacetsConfig config)
             : base(indexFieldName, taxoReader, config)
         {
-            values = new int[taxoReader.Size];
+            values = new int[taxoReader.Count];
         }
 
         /// <summary>
@@ -116,7 +116,7 @@ namespace Lucene.Net.Facet.Taxonomy
                 return null;
             }
 
-            TopOrdAndIntQueue q = new TopOrdAndIntQueue(Math.Min(taxoReader.Size, topN));
+            TopOrdAndIntQueue q = new TopOrdAndIntQueue(Math.Min(taxoReader.Count, topN));
 
             int bottomValue = 0;
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/66e39325/src/Lucene.Net.Facet/Taxonomy/PrintTaxonomyStats.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/PrintTaxonomyStats.cs b/src/Lucene.Net.Facet/Taxonomy/PrintTaxonomyStats.cs
index e4b723b..601e5d8 100644
--- a/src/Lucene.Net.Facet/Taxonomy/PrintTaxonomyStats.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/PrintTaxonomyStats.cs
@@ -70,7 +70,7 @@ namespace Lucene.Net.Facet.Taxonomy
         /// Recursively prints stats for all ordinals. </summary>
         public static void PrintStats(TaxonomyReader r, TextWriter @out, bool printTree)
         {
-            @out.WriteLine(r.Size + " total categories.");
+            @out.WriteLine(r.Count + " total categories.");
 
             ChildrenIterator it = r.GetChildren(TaxonomyReader.ROOT_ORDINAL);
             int child;

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/66e39325/src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs b/src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs
index e99b011..db56dc2 100644
--- a/src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs
@@ -265,7 +265,6 @@ namespace Lucene.Net.Facet.Taxonomy
             }
         }
 
-        // LUCENENET TODO: Rename to Count (.NETify)
         /// <summary>
         /// Returns the number of categories in the taxonomy. Note that the number of
         /// categories returned is often slightly higher than the number of categories
@@ -273,7 +272,7 @@ namespace Lucene.Net.Facet.Taxonomy
         /// taxonomy, its ancestors are also added automatically (including the root,
         /// which always get ordinal 0).
         /// </summary>
-        public abstract int Size { get; }
+        public abstract int Count { get; }
 
         /// <summary>
         /// Expert: increments the refCount of this TaxonomyReader instance. RefCounts

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/66e39325/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestAddTaxonomy.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestAddTaxonomy.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestAddTaxonomy.cs
index 9482407..0ed2bb4 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestAddTaxonomy.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestAddTaxonomy.cs
@@ -125,14 +125,14 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
             var destTr = new DirectoryTaxonomyReader(dest);
             try
             {
-                int destSize = destTr.Size;
+                int destSize = destTr.Count;
                 var srcTR = new DirectoryTaxonomyReader(src);
                 try
                 {
                     var map = ordMap.Map;
 
                     // validate taxo sizes
-                    int srcSize = srcTR.Size;
+                    int srcSize = srcTR.Count;
                     Assert.True(destSize >= srcSize, "destination taxonomy expected to be larger than source; dest=" + destSize + " src=" + srcSize);
 
                     // validate that all source categories exist in destination, and their
@@ -275,9 +275,9 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
 
             var dtr = new DirectoryTaxonomyReader(dest);
             // +2 to account for the root category + "a"
-            Assert.AreEqual(numCategories + 2, dtr.Size);
+            Assert.AreEqual(numCategories + 2, dtr.Count);
             var categories = new HashSet<FacetLabel>();
-            for (int i = 1; i < dtr.Size; i++)
+            for (int i = 1; i < dtr.Count; i++)
             {
                 FacetLabel cat = dtr.GetPath(i);
                 Assert.True(categories.Add(cat), "category " + cat + " already existed");

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/66e39325/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestConcurrentFacetedIndexing.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestConcurrentFacetedIndexing.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestConcurrentFacetedIndexing.cs
index 49df670..abd82f8 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestConcurrentFacetedIndexing.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestConcurrentFacetedIndexing.cs
@@ -136,7 +136,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
 
             var tr = new DirectoryTaxonomyReader(tw);
             // +1 for root category
-            if (values.Count + 1 != tr.Size)
+            if (values.Count + 1 != tr.Count)
             {
                 foreach (string value in values.Keys)
                 {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/66e39325/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestDirectoryTaxonomyReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestDirectoryTaxonomyReader.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestDirectoryTaxonomyReader.cs
index 0e2935a..41ee66f 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestDirectoryTaxonomyReader.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestDirectoryTaxonomyReader.cs
@@ -53,7 +53,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
             ltr.Dispose();
 
             // should not fail as we IncRef() before close
-            var tmpSie = ltr.Size;
+            var tmpSie = ltr.Count;
             ltr.DecRef();
 
             dir.Dispose();
@@ -118,7 +118,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
             ltr.Dispose();
             try
             {
-                var tmpSize = ltr.Size;
+                var tmpSize = ltr.Count;
                 Fail("An AlreadyClosedException should have been thrown here");
             }
             catch (AlreadyClosedException)
@@ -166,7 +166,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
                 tw.Dispose();
 
                 tr = new DirectoryTaxonomyReader(dir);
-                int baseNumCategories = tr.Size;
+                int baseNumCategories = tr.Count;
 
                 for (int i = 0; i < n; i++)
                 {
@@ -189,7 +189,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
                         tr.Dispose(true);
                         tr = newtr;
                     }
-                    Assert.AreEqual(baseNumCategories + 1 + k, tr.Size, "Wrong #categories in taxonomy (i=" + i + ", k=" + k + ")");
+                    Assert.AreEqual(baseNumCategories + 1 + k, tr.Count, "Wrong #categories in taxonomy (i=" + i + ", k=" + k + ")");
                 }
             }
             finally
@@ -251,7 +251,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
                 reader = newtr;
 
                 // assert categories
-                Assert.AreEqual(numCategories, reader.Size);
+                Assert.AreEqual(numCategories, reader.Count);
                 int roundOrdinal = reader.GetOrdinal(new FacetLabel(Convert.ToString(i)));
                 int[] parents = reader.ParallelTaxonomyArrays.Parents;
                 Assert.AreEqual(0, parents[roundOrdinal]); // round's parent is root
@@ -305,7 +305,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
             var writer = new DirectoryTaxonomyWriterAnonymousInnerClassHelper2(dir);
 
             var reader = new DirectoryTaxonomyReader(writer);
-            Assert.AreEqual(1, reader.Size);
+            Assert.AreEqual(1, reader.Count);
             Assert.AreEqual(1, reader.ParallelTaxonomyArrays.Parents.Length);
 
             // add category and call forceMerge -- this should flush IW and merge segments down to 1
@@ -318,7 +318,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
             Assert.NotNull(newtr);
             reader.Dispose();
             reader = newtr;
-            Assert.AreEqual(2, reader.Size);
+            Assert.AreEqual(2, reader.Count);
             Assert.AreEqual(2, reader.ParallelTaxonomyArrays.Parents.Length);
 
             reader.Dispose();
@@ -366,7 +366,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
             writer.AddCategory(new FacetLabel("a"));
 
             var reader = new DirectoryTaxonomyReader(writer);
-            Assert.AreEqual(2, reader.Size);
+            Assert.AreEqual(2, reader.Count);
             Assert.AreEqual(2, reader.ParallelTaxonomyArrays.Parents.Length);
 
             // merge all the segments so that NRT reader thinks there's a change 
@@ -377,7 +377,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
             Assert.NotNull(newtr);
             reader.Dispose();
             reader = newtr;
-            Assert.AreEqual(2, reader.Size);
+            Assert.AreEqual(2, reader.Count);
             Assert.AreEqual(2, reader.ParallelTaxonomyArrays.Parents.Length);
 
             reader.Dispose();

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/66e39325/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestDirectoryTaxonomyWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestDirectoryTaxonomyWriter.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestDirectoryTaxonomyWriter.cs
index 5e4ec75..bd32d53 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestDirectoryTaxonomyWriter.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestDirectoryTaxonomyWriter.cs
@@ -312,7 +312,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
 
             DirectoryTaxonomyReader dtr = new DirectoryTaxonomyReader(dir);
             // +1 for root category
-            if (values.Count + 1 != dtr.Size)
+            if (values.Count + 1 != dtr.Count)
             {
                 foreach (string value in values.Keys)
                 {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/66e39325/src/Lucene.Net.Tests.Facet/Taxonomy/TestSearcherTaxonomyManager.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/TestSearcherTaxonomyManager.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/TestSearcherTaxonomyManager.cs
index 79f8b80..257ec1b 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/TestSearcherTaxonomyManager.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/TestSearcherTaxonomyManager.cs
@@ -365,7 +365,7 @@ namespace Lucene.Net.Facet.Taxonomy
             SearcherAndTaxonomy pair = mgr.Acquire();
             try
             {
-                Assert.AreEqual(1, pair.TaxonomyReader.Size);
+                Assert.AreEqual(1, pair.TaxonomyReader.Count);
             }
             finally
             {
@@ -382,7 +382,7 @@ namespace Lucene.Net.Facet.Taxonomy
             pair = mgr.Acquire();
             try
             {
-                Assert.AreEqual(3, pair.TaxonomyReader.Size);
+                Assert.AreEqual(3, pair.TaxonomyReader.Count);
             }
             finally
             {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/66e39325/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyCombined.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyCombined.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyCombined.cs
index 70080ec..45eeb3b 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyCombined.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyCombined.cs
@@ -310,7 +310,7 @@ namespace Lucene.Net.Facet.Taxonomy
             Assert.AreEqual(1, tw.Size);
             tw.Dispose();
             var tr = new DirectoryTaxonomyReader(indexDir);
-            Assert.AreEqual(1, tr.Size);
+            Assert.AreEqual(1, tr.Count);
             Assert.AreEqual(0, tr.GetPath(0).Length);
             Assert.AreEqual(TaxonomyReader.INVALID_ORDINAL, tr.ParallelTaxonomyArrays.Parents[0]);
             Assert.AreEqual(0, tr.GetOrdinal(new FacetLabel()));
@@ -331,7 +331,7 @@ namespace Lucene.Net.Facet.Taxonomy
             var tw = new DirectoryTaxonomyWriter(indexDir);
             tw.Commit();
             var tr = new DirectoryTaxonomyReader(indexDir);
-            Assert.AreEqual(1, tr.Size);
+            Assert.AreEqual(1, tr.Count);
             Assert.AreEqual(0, tr.GetPath(0).Length);
             Assert.AreEqual(TaxonomyReader.INVALID_ORDINAL, tr.ParallelTaxonomyArrays.Parents[0]);
             Assert.AreEqual(0, tr.GetOrdinal(new FacetLabel()));
@@ -356,16 +356,16 @@ namespace Lucene.Net.Facet.Taxonomy
             var tr = new DirectoryTaxonomyReader(indexDir);
 
             // test TaxonomyReader.getSize():
-            Assert.AreEqual(ExpectedCategories.Length, tr.Size);
+            Assert.AreEqual(ExpectedCategories.Length, tr.Count);
 
             // test round trips of ordinal => category => ordinal
-            for (int i = 0; i < tr.Size; i++)
+            for (int i = 0; i < tr.Count; i++)
             {
                 Assert.AreEqual(i, tr.GetOrdinal(tr.GetPath(i)));
             }
 
             // test TaxonomyReader.getCategory():
-            for (int i = 1; i < tr.Size; i++)
+            for (int i = 1; i < tr.Count; i++)
             {
                 FacetLabel expectedCategory = new FacetLabel(ExpectedCategories[i]);
                 FacetLabel category = tr.GetPath(i);
@@ -376,7 +376,7 @@ namespace Lucene.Net.Facet.Taxonomy
             }
             //  (also test invalid ordinals:)
             Assert.Null(tr.GetPath(-1));
-            Assert.Null(tr.GetPath(tr.Size));
+            Assert.Null(tr.GetPath(tr.Count));
             Assert.Null(tr.GetPath(TaxonomyReader.INVALID_ORDINAL));
 
             // test TaxonomyReader.GetOrdinal():
@@ -424,7 +424,7 @@ namespace Lucene.Net.Facet.Taxonomy
             Assert.AreEqual(TaxonomyReader.INVALID_ORDINAL, parents[0]);
 
             // check parent of non-root ordinals:
-            for (int ordinal = 1; ordinal < tr.Size; ordinal++)
+            for (int ordinal = 1; ordinal < tr.Count; ordinal++)
             {
                 FacetLabel me = tr.GetPath(ordinal);
                 int parentOrdinal = parents[ordinal];
@@ -496,7 +496,7 @@ namespace Lucene.Net.Facet.Taxonomy
             Assert.AreEqual(TaxonomyReader.INVALID_ORDINAL, tw.GetParent(0));
 
             // check parent of non-root ordinals:
-            for (int ordinal = 1; ordinal < tr.Size; ordinal++)
+            for (int ordinal = 1; ordinal < tr.Count; ordinal++)
             {
                 FacetLabel me = tr.GetPath(ordinal);
                 int parentOrdinal = tw.GetParent(ordinal);
@@ -534,7 +534,7 @@ namespace Lucene.Net.Facet.Taxonomy
             }
             try
             {
-                int parent = tw.GetParent(tr.Size);
+                int parent = tw.GetParent(tr.Count);
                 Fail("getParent for getSize() should throw exception, but returned " + parent);
             }
             catch (System.IndexOutOfRangeException)
@@ -558,9 +558,9 @@ namespace Lucene.Net.Facet.Taxonomy
             var tr = new DirectoryTaxonomyReader(indexDir);
             ParallelTaxonomyArrays ca = tr.ParallelTaxonomyArrays;
             int[] youngestChildArray = ca.Children;
-            Assert.AreEqual(tr.Size, youngestChildArray.Length);
+            Assert.AreEqual(tr.Count, youngestChildArray.Length);
             int[] olderSiblingArray = ca.Siblings;
-            Assert.AreEqual(tr.Size, olderSiblingArray.Length);
+            Assert.AreEqual(tr.Count, olderSiblingArray.Length);
             for (int i = 0; i < ExpectedCategories.Length; i++)
             {
                 // find expected children by looking at all expectedCategories
@@ -631,13 +631,13 @@ namespace Lucene.Net.Facet.Taxonomy
             var tr = new DirectoryTaxonomyReader(indexDir);
             ParallelTaxonomyArrays ca = tr.ParallelTaxonomyArrays;
             int[] children = ca.Children;
-            Assert.AreEqual(tr.Size, children.Length);
+            Assert.AreEqual(tr.Count, children.Length);
             int[] olderSiblingArray = ca.Siblings;
-            Assert.AreEqual(tr.Size, olderSiblingArray.Length);
+            Assert.AreEqual(tr.Count, olderSiblingArray.Length);
 
             // test that the "youngest child" of every category is indeed a child:
             int[] parents = tr.ParallelTaxonomyArrays.Parents;
-            for (int i = 0; i < tr.Size; i++)
+            for (int i = 0; i < tr.Count; i++)
             {
                 int youngestChild = children[i];
                 if (youngestChild != TaxonomyReader.INVALID_ORDINAL)
@@ -648,14 +648,14 @@ namespace Lucene.Net.Facet.Taxonomy
 
             // test that the "older sibling" of every category is indeed older (lower)
             // (it can also be INVALID_ORDINAL, which is lower than any ordinal)
-            for (int i = 0; i < tr.Size; i++)
+            for (int i = 0; i < tr.Count; i++)
             {
                 Assert.True(olderSiblingArray[i] < i, "olderSiblingArray[" + i + "] should be <" + i);
             }
 
             // test that the "older sibling" of every category is indeed a sibling
             // (they share the same parent)
-            for (int i = 0; i < tr.Size; i++)
+            for (int i = 0; i < tr.Count; i++)
             {
                 int sibling = olderSiblingArray[i];
                 if (sibling == TaxonomyReader.INVALID_ORDINAL)
@@ -670,11 +670,11 @@ namespace Lucene.Net.Facet.Taxonomy
 
             // test that the "youngest child" is indeed the youngest (so we don't
             // miss the first children in the chain)
-            for (int i = 0; i < tr.Size; i++)
+            for (int i = 0; i < tr.Count; i++)
             {
                 // Find the really youngest child:
                 int j;
-                for (j = tr.Size - 1; j > i; j--)
+                for (j = tr.Count - 1; j > i; j--)
                 {
                     if (parents[j] == i)
                     {
@@ -691,7 +691,7 @@ namespace Lucene.Net.Facet.Taxonomy
             // test that the "older sibling" is indeed the least oldest one - and
             // not a too old one or -1 (so we didn't miss some children in the
             // middle or the end of the chain).
-            for (int i = 0; i < tr.Size; i++)
+            for (int i = 0; i < tr.Count; i++)
             {
                 // Find the youngest older sibling:
                 int j;
@@ -725,7 +725,7 @@ namespace Lucene.Net.Facet.Taxonomy
             tw.Commit();
             var tr = new DirectoryTaxonomyReader(indexDir);
             ParallelTaxonomyArrays ca = tr.ParallelTaxonomyArrays;
-            Assert.AreEqual(3, tr.Size);
+            Assert.AreEqual(3, tr.Count);
             Assert.AreEqual(3, ca.Siblings.Length);
             Assert.AreEqual(3, ca.Children.Length);
             Assert.True(Arrays.Equals(new int[] { 1, 2, -1 }, ca.Children));
@@ -736,7 +736,7 @@ namespace Lucene.Net.Facet.Taxonomy
             // Before refresh, nothing changed..
             ParallelTaxonomyArrays newca = tr.ParallelTaxonomyArrays;
             Assert.AreSame(newca, ca); // we got exactly the same object
-            Assert.AreEqual(3, tr.Size);
+            Assert.AreEqual(3, tr.Count);
             Assert.AreEqual(3, ca.Siblings.Length);
             Assert.AreEqual(3, ca.Children.Length);
             // After the refresh, things change:
@@ -745,7 +745,7 @@ namespace Lucene.Net.Facet.Taxonomy
             tr.Dispose();
             tr = newtr;
             ca = tr.ParallelTaxonomyArrays;
-            Assert.AreEqual(5, tr.Size);
+            Assert.AreEqual(5, tr.Count);
             Assert.AreEqual(5, ca.Siblings.Length);
             Assert.AreEqual(5, ca.Children.Length);
             Assert.True(Arrays.Equals(new int[] { 4, 3, -1, -1, -1 }, ca.Children));
@@ -934,13 +934,13 @@ namespace Lucene.Net.Facet.Taxonomy
             tw.Commit();
             var tr = new DirectoryTaxonomyReader(indexDir);
 
-            Assert.AreEqual(1, tr.Size); // the empty taxonomy has size 1 (the root)
+            Assert.AreEqual(1, tr.Count); // the empty taxonomy has size 1 (the root)
             tw.AddCategory(new FacetLabel("Author"));
-            Assert.AreEqual(1, tr.Size); // still root only...
+            Assert.AreEqual(1, tr.Count); // still root only...
             Assert.Null(TaxonomyReader.OpenIfChanged(tr)); // this is not enough, because tw.Commit() hasn't been done yet
-            Assert.AreEqual(1, tr.Size); // still root only...
+            Assert.AreEqual(1, tr.Count); // still root only...
             tw.Commit();
-            Assert.AreEqual(1, tr.Size); // still root only...
+            Assert.AreEqual(1, tr.Count); // still root only...
             var newTaxoReader = TaxonomyReader.OpenIfChanged(tr);
             Assert.NotNull(newTaxoReader);
             tr.Dispose();
@@ -956,7 +956,7 @@ namespace Lucene.Net.Facet.Taxonomy
             {
                 Fail("After category addition, commit() and refresh(), getParent for " + author + " should NOT throw exception");
             }
-            Assert.AreEqual(2, tr.Size); // finally, see there are two categories
+            Assert.AreEqual(2, tr.Count); // finally, see there are two categories
 
             // now, add another category, and verify that after commit and refresh
             // the parent of this category is correct (this requires the reader
@@ -973,7 +973,7 @@ namespace Lucene.Net.Facet.Taxonomy
             Assert.AreEqual(author, parents[dawkins]);
             Assert.AreEqual(TaxonomyReader.ROOT_ORDINAL, parents[author]);
             Assert.AreEqual(TaxonomyReader.INVALID_ORDINAL, parents[TaxonomyReader.ROOT_ORDINAL]);
-            Assert.AreEqual(3, tr.Size);
+            Assert.AreEqual(3, tr.Count);
             tw.Dispose();
             tr.Dispose();
             indexDir.Dispose();
@@ -990,25 +990,25 @@ namespace Lucene.Net.Facet.Taxonomy
             // Test getOrdinal():
             FacetLabel author = new FacetLabel("Author");
 
-            Assert.AreEqual(1, tr.Size); // the empty taxonomy has size 1 (the root)
+            Assert.AreEqual(1, tr.Count); // the empty taxonomy has size 1 (the root)
             Assert.AreEqual(TaxonomyReader.INVALID_ORDINAL, tr.GetOrdinal(author));
             tw.AddCategory(author);
             // before commit and refresh, no change:
             Assert.AreEqual(TaxonomyReader.INVALID_ORDINAL, tr.GetOrdinal(author));
-            Assert.AreEqual(1, tr.Size); // still root only...
+            Assert.AreEqual(1, tr.Count); // still root only...
             Assert.Null(TaxonomyReader.OpenIfChanged(tr)); // this is not enough, because tw.Commit() hasn't been done yet
             Assert.AreEqual(TaxonomyReader.INVALID_ORDINAL, tr.GetOrdinal(author));
-            Assert.AreEqual(1, tr.Size); // still root only...
+            Assert.AreEqual(1, tr.Count); // still root only...
             tw.Commit();
             // still not enough before refresh:
             Assert.AreEqual(TaxonomyReader.INVALID_ORDINAL, tr.GetOrdinal(author));
-            Assert.AreEqual(1, tr.Size); // still root only...
+            Assert.AreEqual(1, tr.Count); // still root only...
             var newTaxoReader = TaxonomyReader.OpenIfChanged(tr);
             Assert.NotNull(newTaxoReader);
             tr.Dispose();
             tr = newTaxoReader;
             Assert.AreEqual(1, tr.GetOrdinal(author));
-            Assert.AreEqual(2, tr.Size);
+            Assert.AreEqual(2, tr.Count);
             tw.Dispose();
             tr.Dispose();
             indexDir.Dispose();
@@ -1166,7 +1166,7 @@ namespace Lucene.Net.Facet.Taxonomy
             writer.AddCategory(cp);
             var newReader = TaxonomyReader.OpenIfChanged(reader);
             Assert.NotNull(newReader, "expected a new instance");
-            Assert.AreEqual(2, newReader.Size);
+            Assert.AreEqual(2, newReader.Count);
             Assert.AreNotSame(TaxonomyReader.INVALID_ORDINAL, newReader.GetOrdinal(cp));
             reader.Dispose();
             reader = newReader;


[21/46] lucenenet git commit: Fixed Facet.Taxonomy.Directory.TaxonomyIndexArrays _renamed variable names.

Posted by sy...@apache.org.
Fixed Facet.Taxonomy.Directory.TaxonomyIndexArrays _renamed variable names.


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/8d23d138
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/8d23d138
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/8d23d138

Branch: refs/heads/master
Commit: 8d23d138ac51e8a73d87ab13b2cb657d06263f87
Parents: e4b7e0b
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Sep 25 13:22:53 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Mon Oct 3 23:31:21 2016 +0700

----------------------------------------------------------------------
 .../Taxonomy/Directory/TaxonomyIndexArrays.cs   | 52 ++++++++++----------
 1 file changed, 26 insertions(+), 26 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/8d23d138/src/Lucene.Net.Facet/Taxonomy/Directory/TaxonomyIndexArrays.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/Directory/TaxonomyIndexArrays.cs b/src/Lucene.Net.Facet/Taxonomy/Directory/TaxonomyIndexArrays.cs
index 2319550..ec0b33a 100644
--- a/src/Lucene.Net.Facet/Taxonomy/Directory/TaxonomyIndexArrays.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/Directory/TaxonomyIndexArrays.cs
@@ -35,26 +35,26 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
     /// </summary>
     internal class TaxonomyIndexArrays : ParallelTaxonomyArrays
     {
-        private readonly int[] parents_Renamed;
+        private readonly int[] parents;
 
         // the following two arrays are lazily intialized. note that we only keep a
         // single boolean member as volatile, instead of declaring the arrays
         // volatile. the code guarantees that only after the boolean is set to true,
         // the arrays are returned.
         private volatile bool initializedChildren = false;
-        private int[] children_Renamed, siblings_Renamed;
+        private int[] children, siblings;
 
         /// <summary>
         /// Used by <seealso cref="#add(int, int)"/> after the array grew. </summary>
         private TaxonomyIndexArrays(int[] parents)
         {
-            this.parents_Renamed = parents;
+            this.parents = parents;
         }
 
         public TaxonomyIndexArrays(IndexReader reader)
         {
-            parents_Renamed = new int[reader.MaxDoc];
-            if (parents_Renamed.Length > 0)
+            parents = new int[reader.MaxDoc];
+            if (parents.Length > 0)
             {
                 InitParents(reader, 0);
                 // Starting Lucene 2.9, following the change LUCENE-1542, we can
@@ -64,7 +64,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
                 // with existing indexes, so what we'll do instead is just
                 // hard-code the parent of ordinal 0 to be -1, and assume (as is
                 // indeed the case) that no other parent can be -1.
-                parents_Renamed[0] = TaxonomyReader.INVALID_ORDINAL;
+                parents[0] = TaxonomyReader.INVALID_ORDINAL;
             }
         }
 
@@ -77,8 +77,8 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
             // NRT reader was obtained, even though nothing was changed. this is not very likely
             // to happen.
             int[] copyParents = copyFrom.Parents();
-            this.parents_Renamed = new int[reader.MaxDoc];
-            Array.Copy(copyParents, 0, parents_Renamed, 0, copyParents.Length);
+            this.parents = new int[reader.MaxDoc];
+            Array.Copy(copyParents, 0, parents, 0, copyParents.Length);
             InitParents(reader, copyParents.Length);
 
             if (copyFrom.initializedChildren)
@@ -93,14 +93,14 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
             {
                 if (!initializedChildren) // must do this check !
                 {
-                    children_Renamed = new int[parents_Renamed.Length];
-                    siblings_Renamed = new int[parents_Renamed.Length];
+                    children = new int[parents.Length];
+                    siblings = new int[parents.Length];
                     if (copyFrom != null)
                     {
                         // called from the ctor, after we know copyFrom has initialized children/siblings
-                        Array.Copy(copyFrom.Children(), 0, children_Renamed, 0, copyFrom.Children().Length);
-                        Array.Copy(copyFrom.Siblings(), 0, siblings_Renamed, 0, copyFrom.Siblings().Length);
-                        ComputeChildrenSiblings(copyFrom.parents_Renamed.Length);
+                        Array.Copy(copyFrom.Children(), 0, children, 0, copyFrom.Children().Length);
+                        Array.Copy(copyFrom.Siblings(), 0, siblings, 0, copyFrom.Siblings().Length);
+                        ComputeChildrenSiblings(copyFrom.parents.Length);
                     }
                     else
                     {
@@ -116,24 +116,24 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
             // reset the youngest child of all ordinals. while this should be done only
             // for the leaves, we don't know up front which are the leaves, so we reset
             // all of them.
-            for (int i = first; i < parents_Renamed.Length; i++)
+            for (int i = first; i < parents.Length; i++)
             {
-                children_Renamed[i] = TaxonomyReader.INVALID_ORDINAL;
+                children[i] = TaxonomyReader.INVALID_ORDINAL;
             }
 
             // the root category has no parent, and therefore no siblings
             if (first == 0)
             {
                 first = 1;
-                siblings_Renamed[0] = TaxonomyReader.INVALID_ORDINAL;
+                siblings[0] = TaxonomyReader.INVALID_ORDINAL;
             }
 
-            for (int i = first; i < parents_Renamed.Length; i++)
+            for (int i = first; i < parents.Length; i++)
             {
                 // note that parents[i] is always < i, so the right-hand-side of
                 // the following line is already set when we get here
-                siblings_Renamed[i] = children_Renamed[parents_Renamed[i]];
-                children_Renamed[parents_Renamed[i]] = i;
+                siblings[i] = children[parents[i]];
+                children[parents[i]] = i;
             }
         }
 
@@ -166,7 +166,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
                         throw new CorruptIndexException("Missing parent data for category " + i);
                     }
 
-                    parents_Renamed[i] = positions.NextPosition();
+                    parents[i] = positions.NextPosition();
 
                     if (positions.NextDoc() == DocIdSetIterator.NO_MORE_DOCS)
                     {
@@ -193,13 +193,13 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
         /// </summary>
         internal virtual TaxonomyIndexArrays Add(int ordinal, int parentOrdinal)
         {
-            if (ordinal >= parents_Renamed.Length)
+            if (ordinal >= parents.Length)
             {
-                int[] newarray = ArrayUtil.Grow(parents_Renamed, ordinal + 1);
+                int[] newarray = ArrayUtil.Grow(parents, ordinal + 1);
                 newarray[ordinal] = parentOrdinal;
                 return new TaxonomyIndexArrays(newarray);
             }
-            parents_Renamed[ordinal] = parentOrdinal;
+            parents[ordinal] = parentOrdinal;
             return this;
         }
 
@@ -209,7 +209,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
         /// </summary>
         public override int[] Parents()
         {
-            return parents_Renamed;
+            return parents;
         }
 
         /// <summary>
@@ -226,7 +226,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
             }
 
             // the array is guaranteed to be populated
-            return children_Renamed;
+            return children;
         }
 
         /// <summary>
@@ -242,7 +242,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
             }
 
             // the array is guaranteed to be populated
-            return siblings_Renamed;
+            return siblings;
         }
     }
 }
\ No newline at end of file


[05/46] lucenenet git commit: Added reminder to .NETify the Size property.

Posted by sy...@apache.org.
Added reminder to .NETify the Size property.


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/602bbccd
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/602bbccd
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/602bbccd

Branch: refs/heads/master
Commit: 602bbccdffeb0dee470220893ad41383d54f68f3
Parents: 08dfc1b
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sat Sep 24 16:33:54 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Mon Oct 3 23:30:41 2016 +0700

----------------------------------------------------------------------
 src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs | 1 +
 1 file changed, 1 insertion(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/602bbccd/src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs b/src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs
index 55c8f27..f5cfc12 100644
--- a/src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs
@@ -270,6 +270,7 @@ namespace Lucene.Net.Facet.Taxonomy
             }
         }
 
+        // LUCENENET TODO: Rename to Count (.NETify)
         /// <summary>
         /// Returns the number of categories in the taxonomy. Note that the number of
         /// categories returned is often slightly higher than the number of categories


[29/46] lucenenet git commit: Changed Size() and Capacity() methods of Facet.Taxonomy.WriterCache.CollisionMap to Count and Capacity properties.

Posted by sy...@apache.org.
Changed Size() and Capacity() methods of Facet.Taxonomy.WriterCache.CollisionMap to Count and Capacity properties.


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/e0d070b5
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/e0d070b5
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/e0d070b5

Branch: refs/heads/master
Commit: e0d070b5de0a9329fcd4cc41afbdda2a7d6216a6
Parents: cb8d47f
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Sep 25 15:46:15 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Mon Oct 3 23:31:43 2016 +0700

----------------------------------------------------------------------
 .../Taxonomy/WriterCache/CollisionMap.cs              | 14 ++++++++++----
 .../Taxonomy/WriterCache/CompactLabelToOrdinal.cs     |  8 ++++----
 2 files changed, 14 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e0d070b5/src/Lucene.Net.Facet/Taxonomy/WriterCache/CollisionMap.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CollisionMap.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CollisionMap.cs
index 1b2767b..b199513 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CollisionMap.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CollisionMap.cs
@@ -76,17 +76,23 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
 
         /// <summary>
         /// How many mappings. </summary>
-        public virtual int Size()
+        public virtual int Count
         {
-            return this.size;
+            get
+            {
+                return this.size;
+            }
         }
 
         /// <summary>
         /// How many slots are allocated. 
         /// </summary>
-        public virtual int Capacity()
+        public virtual int Capacity
         {
-            return this.capacity;
+            get
+            {
+                return this.capacity;
+            }
         }
 
         private void Grow()

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e0d070b5/src/Lucene.Net.Facet/Taxonomy/WriterCache/CompactLabelToOrdinal.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CompactLabelToOrdinal.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CompactLabelToOrdinal.cs
index 94a9e38..2ba69a5 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CompactLabelToOrdinal.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CompactLabelToOrdinal.cs
@@ -69,7 +69,7 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
         /// How many labels. </summary>
         public virtual int SizeOfMap()
         {
-            return this.collisionMap.Size();
+            return this.collisionMap.Count;
         }
 
         private CompactLabelToOrdinal()
@@ -117,7 +117,7 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
 
         public override void AddLabel(FacetLabel label, int ordinal)
         {
-            if (collisionMap.Size() > threshold)
+            if (collisionMap.Count > threshold)
             {
                 Grow();
             }
@@ -205,7 +205,7 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
             }
 
             CollisionMap oldCollisionMap = this.collisionMap;
-            this.collisionMap = new CollisionMap(oldCollisionMap.Capacity(), this.labelRepository);
+            this.collisionMap = new CollisionMap(oldCollisionMap.Capacity, this.labelRepository);
             this.threshold = (int)(this.capacity * this.loadFactor);
 
             var it = oldCollisionMap.GetEnumerator();
@@ -245,7 +245,7 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
 
             this.collisionMap.AddLabelOffset(hash, knownOffset, cid);
 
-            if (this.collisionMap.Size() > this.threshold)
+            if (this.collisionMap.Count > this.threshold)
             {
                 Grow();
             }


[38/46] lucenenet git commit: Fixed all Facet compiler warnings

Posted by sy...@apache.org.
Fixed all Facet compiler warnings


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/8cbc4927
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/8cbc4927
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/8cbc4927

Branch: refs/heads/master
Commit: 8cbc4927cff3c3fba36b944b854597045091df15
Parents: 007b217
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Sep 25 18:20:45 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Mon Oct 3 23:32:06 2016 +0700

----------------------------------------------------------------------
 src/Lucene.Net.Facet/DrillSidewaysScorer.cs                        | 2 +-
 src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyReader.cs | 2 +-
 src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumValueSource.cs       | 2 +-
 src/Lucene.Net.Tests.Facet/Range/TestRangeFacetCounts.cs           | 2 +-
 src/Lucene.Net.Tests.Facet/SlowRAMDirectory.cs                     | 2 ++
 src/Lucene.Net.Tests.Facet/TestDrillSideways.cs                    | 2 +-
 6 files changed, 7 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/8cbc4927/src/Lucene.Net.Facet/DrillSidewaysScorer.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/DrillSidewaysScorer.cs b/src/Lucene.Net.Facet/DrillSidewaysScorer.cs
index 13d51e6..18a0e06 100644
--- a/src/Lucene.Net.Facet/DrillSidewaysScorer.cs
+++ b/src/Lucene.Net.Facet/DrillSidewaysScorer.cs
@@ -258,7 +258,7 @@ namespace Lucene.Net.Facet
                 docID = baseScorer.NextDoc();
                 nextDocContinue:;
             }
-            nextDocBreak:;
+            //nextDocBreak:; // Not referenced
         }
 
         /// <summary>

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/8cbc4927/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyReader.cs b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyReader.cs
index 0a60bff..4552f08 100644
--- a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyReader.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyReader.cs
@@ -425,7 +425,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
                     }
                     sb.Append(i + ": " + category.ToString() + "\n");
                 }
-                catch (IOException e)
+                catch (IOException)
                 {
                     throw;
                 }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/8cbc4927/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumValueSource.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumValueSource.cs b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumValueSource.cs
index 99dbed1..ffe18df 100644
--- a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumValueSource.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumValueSource.cs
@@ -200,7 +200,7 @@ namespace Lucene.Net.Facet.Taxonomy
                     {
                         return scorer.Score();
                     }
-                    catch (Exception exception)
+                    catch (Exception /*exception*/)
                     {
                         throw;
                     }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/8cbc4927/src/Lucene.Net.Tests.Facet/Range/TestRangeFacetCounts.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Facet/Range/TestRangeFacetCounts.cs b/src/Lucene.Net.Tests.Facet/Range/TestRangeFacetCounts.cs
index d39cefd..674e033 100644
--- a/src/Lucene.Net.Tests.Facet/Range/TestRangeFacetCounts.cs
+++ b/src/Lucene.Net.Tests.Facet/Range/TestRangeFacetCounts.cs
@@ -1148,7 +1148,7 @@ namespace Lucene.Net.Facet.Range
             private Filter fastMatchFilter;
 
 
-            public DrillSidewaysAnonymousInnerClassHelper2(TestRangeFacetCounts testRangeFacetCounts, IndexSearcher indexSearcher, FacetsConfig facetsConfig, TaxonomyReader org, ValueSource valueSource, DoubleRange[] doubleRanges, Filter filter)
+            public DrillSidewaysAnonymousInnerClassHelper2(TestRangeFacetCounts outerInstance, IndexSearcher indexSearcher, FacetsConfig facetsConfig, TaxonomyReader org, ValueSource valueSource, DoubleRange[] doubleRanges, Filter filter)
                 : base(indexSearcher, facetsConfig, org)
             {
                 this.outerInstance = outerInstance;

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/8cbc4927/src/Lucene.Net.Tests.Facet/SlowRAMDirectory.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Facet/SlowRAMDirectory.cs b/src/Lucene.Net.Tests.Facet/SlowRAMDirectory.cs
index c444992..5201f87 100644
--- a/src/Lucene.Net.Tests.Facet/SlowRAMDirectory.cs
+++ b/src/Lucene.Net.Tests.Facet/SlowRAMDirectory.cs
@@ -241,6 +241,8 @@ namespace Lucene.Net.Facet
                     return io.FilePointer;
                 }
             }
+
+            [Obsolete]
             public override void Seek(long pos)
             {
                 io.Seek(pos);

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/8cbc4927/src/Lucene.Net.Tests.Facet/TestDrillSideways.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Facet/TestDrillSideways.cs b/src/Lucene.Net.Tests.Facet/TestDrillSideways.cs
index e32693d..8060f49 100644
--- a/src/Lucene.Net.Tests.Facet/TestDrillSideways.cs
+++ b/src/Lucene.Net.Tests.Facet/TestDrillSideways.cs
@@ -1137,7 +1137,7 @@ namespace Lucene.Net.Facet
                 }
             nextDocContinue: ;
             }
-        nextDocBreak:
+            //nextDocBreak:// Not referenced
 
             IDictionary<string, int?> idToDocID = new Dictionary<string, int?>();
             for (int i = 0; i < s.IndexReader.MaxDoc; i++)


[37/46] lucenenet git commit: In Facet.Taxonomy.WriterCache.NameIntCacheLRU, renamed Size and MaxSize to Count and Capacity (.NETified)

Posted by sy...@apache.org.
In Facet.Taxonomy.WriterCache.NameIntCacheLRU, renamed Size and MaxSize to Count and Capacity (.NETified)


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/007b217c
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/007b217c
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/007b217c

Branch: refs/heads/master
Commit: 007b217c72ae728b8a55ee76e3d747cf9dfc88ce
Parents: 6475006
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Sep 25 17:35:07 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Mon Oct 3 23:32:04 2016 +0700

----------------------------------------------------------------------
 .../WriterCache/LruTaxonomyWriterCache.cs       |  2 +-
 .../Taxonomy/WriterCache/NameIntCacheLRU.cs     | 20 +++++++++++---------
 2 files changed, 12 insertions(+), 10 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/007b217c/src/Lucene.Net.Facet/Taxonomy/WriterCache/LruTaxonomyWriterCache.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/LruTaxonomyWriterCache.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/LruTaxonomyWriterCache.cs
index ad76ccd..7152094 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/LruTaxonomyWriterCache.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/LruTaxonomyWriterCache.cs
@@ -85,7 +85,7 @@
             {
                 lock (this)
                 {
-                    return cache.Size == cache.MaxSize;
+                    return cache.Count == cache.Capacity;
                 }
             }
         }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/007b217c/src/Lucene.Net.Facet/Taxonomy/WriterCache/NameIntCacheLRU.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/NameIntCacheLRU.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/NameIntCacheLRU.cs
index e5c81e5..ca0011a 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/NameIntCacheLRU.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/NameIntCacheLRU.cs
@@ -33,27 +33,27 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
         private Dictionary<object, int?> cache;
         internal long nMisses = 0; // for debug
         internal long nHits = 0; // for debug
-        private int maxCacheSize;
+        private int capacity;
 
-        internal NameIntCacheLRU(int maxCacheSize)
+        internal NameIntCacheLRU(int capacity)
         {
-            this.maxCacheSize = maxCacheSize;
-            CreateCache(maxCacheSize);
+            this.capacity = capacity;
+            CreateCache(capacity);
         }
 
         /// <summary>
         /// Maximum number of cache entries before eviction. </summary>
-        public virtual int MaxSize
+        public virtual int Capacity
         {
             get
             {
-                return maxCacheSize;
+                return capacity;
             }
         }
 
         /// <summary>
         /// Number of entries currently in the cache. </summary>
-        public virtual int Size
+        public virtual int Count
         {
             get
             {
@@ -63,6 +63,8 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
 
         private void CreateCache(int maxSize)
         {
+            // LUCENENET TODO: Create an adapter so we can plug in either a generic
+            // dictionary or LRUHashMap or alternatively make LRUHashMap implement IDictionary<TKey, TValue>
             //if (maxSize < int.MaxValue)
             //{
             //    cache = new LRUHashMap<object,int?>(1000,true); //for LRU
@@ -120,7 +122,7 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
         {
             get
             {
-                return cache.Count > maxCacheSize;
+                return cache.Count > capacity;
             }
         }
 
@@ -147,7 +149,7 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
             {
                 return false;
             }
-            int n = cache.Count - (2 * maxCacheSize) / 3;
+            int n = cache.Count - (2 * capacity) / 3;
             if (n <= 0)
             {
                 return false;


[16/46] lucenenet git commit: .NETify Facet: Method names should be PascalCase

Posted by sy...@apache.org.
.NETify Facet: Method names should be PascalCase


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/9604c0f7
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/9604c0f7
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/9604c0f7

Branch: refs/heads/master
Commit: 9604c0f723c4574cfe48f491a7ff6de06613c094
Parents: 2e5bae0
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Sep 25 14:50:19 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Mon Oct 3 23:31:09 2016 +0700

----------------------------------------------------------------------
 src/Lucene.Net.Facet/FacetsConfig.cs            | 10 ++++----
 src/Lucene.Net.Facet/Range/DoubleRange.cs       |  4 ++--
 .../Range/DoubleRangeFacetCounts.cs             |  4 ++--
 src/Lucene.Net.Facet/Range/LongRange.cs         |  4 ++--
 src/Lucene.Net.Facet/Range/LongRangeCounter.cs  | 24 ++++++++++----------
 .../Range/LongRangeFacetCounts.cs               |  4 ++--
 src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs   | 12 +++++-----
 .../Taxonomy/FloatAssociationFacetField.cs      | 12 +++++-----
 .../Taxonomy/IntAssociationFacetField.cs        |  8 +++----
 9 files changed, 41 insertions(+), 41 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/9604c0f7/src/Lucene.Net.Facet/FacetsConfig.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/FacetsConfig.cs b/src/Lucene.Net.Facet/FacetsConfig.cs
index 99aaf6c..16a0ea3 100644
--- a/src/Lucene.Net.Facet/FacetsConfig.cs
+++ b/src/Lucene.Net.Facet/FacetsConfig.cs
@@ -378,7 +378,7 @@ namespace Lucene.Net.Facet
             Document result = new Document();
 
             ProcessFacetFields(taxoWriter, byField, result);
-            processSSDVFacetFields(dvByField, result);
+            ProcessSSDVFacetFields(dvByField, result);
             ProcessAssocFacetFields(taxoWriter, assocByField, result);
 
             //System.out.println("add stored: " + addedStoredFields);
@@ -416,7 +416,7 @@ namespace Lucene.Net.Facet
 
                     FacetLabel cp = new FacetLabel(facetField.dim, facetField.path);
 
-                    checkTaxoWriter(taxoWriter);
+                    CheckTaxoWriter(taxoWriter);
                     int ordinal = taxoWriter.AddCategory(cp);
                     if (ordinals.Length == ordinals.Ints.Length)
                     {
@@ -461,7 +461,7 @@ namespace Lucene.Net.Facet
             }
         }
 
-        public void processSSDVFacetFields(IDictionary<string, IList<SortedSetDocValuesFacetField>> byField, Document doc)
+        public void ProcessSSDVFacetFields(IDictionary<string, IList<SortedSetDocValuesFacetField>> byField, Document doc)
         {
             //System.out.println("process SSDV: " + byField);
             foreach (KeyValuePair<string, IList<SortedSetDocValuesFacetField>> ent in byField)
@@ -496,7 +496,7 @@ namespace Lucene.Net.Facet
                 foreach (AssociationFacetField field in ent.Value)
                 {
                     // NOTE: we don't add parents for associations
-                    checkTaxoWriter(taxoWriter);
+                    CheckTaxoWriter(taxoWriter);
                     FacetLabel label = new FacetLabel(field.dim, field.path);
                     int ordinal = taxoWriter.AddCategory(label);
                     if (upto + 4 > bytes.Length)
@@ -592,7 +592,7 @@ namespace Lucene.Net.Facet
             return new BytesRef(bytes, 0, upto);
         }
 
-        private void checkTaxoWriter(ITaxonomyWriter taxoWriter)
+        private void CheckTaxoWriter(ITaxonomyWriter taxoWriter)
         {
             if (taxoWriter == null)
             {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/9604c0f7/src/Lucene.Net.Facet/Range/DoubleRange.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Range/DoubleRange.cs b/src/Lucene.Net.Facet/Range/DoubleRange.cs
index 28685d4..93f85cb 100644
--- a/src/Lucene.Net.Facet/Range/DoubleRange.cs
+++ b/src/Lucene.Net.Facet/Range/DoubleRange.cs
@@ -100,7 +100,7 @@ namespace Lucene.Net.Facet.Range
 
         /// <summary>
         /// True if this range accepts the provided value. </summary>
-        public bool accept(double value)
+        public bool Accept(double value)
         {
             return value >= minIncl && value <= maxIncl;
         }
@@ -216,7 +216,7 @@ namespace Lucene.Net.Facet.Range
                         {
                             return false;
                         }
-                        return outerInstance.outerInstance.outerInstance.accept(outerInstance.values.DoubleVal(docID));
+                        return outerInstance.outerInstance.outerInstance.Accept(outerInstance.values.DoubleVal(docID));
                     }
 
                     public virtual int Length()

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/9604c0f7/src/Lucene.Net.Facet/Range/DoubleRangeFacetCounts.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Range/DoubleRangeFacetCounts.cs b/src/Lucene.Net.Facet/Range/DoubleRangeFacetCounts.cs
index 1e3a996..1033a42 100644
--- a/src/Lucene.Net.Facet/Range/DoubleRangeFacetCounts.cs
+++ b/src/Lucene.Net.Facet/Range/DoubleRangeFacetCounts.cs
@@ -135,7 +135,7 @@ namespace Lucene.Net.Facet.Range
                     // Skip missing docs:
                     if (fv.Exists(doc))
                     {
-                        counter.add(NumericUtils.DoubleToSortableLong(fv.DoubleVal(doc)));
+                        counter.Add(NumericUtils.DoubleToSortableLong(fv.DoubleVal(doc)));
                     }
                     else
                     {
@@ -144,7 +144,7 @@ namespace Lucene.Net.Facet.Range
                 }
             }
 
-            missingCount += counter.fillCounts(Counts);
+            missingCount += counter.FillCounts(Counts);
             TotCount -= missingCount;
         }
     }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/9604c0f7/src/Lucene.Net.Facet/Range/LongRange.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Range/LongRange.cs b/src/Lucene.Net.Facet/Range/LongRange.cs
index 7a4f0c1..c54456d 100644
--- a/src/Lucene.Net.Facet/Range/LongRange.cs
+++ b/src/Lucene.Net.Facet/Range/LongRange.cs
@@ -101,7 +101,7 @@ namespace Lucene.Net.Facet.Range
 
         /// <summary>
         /// True if this range accepts the provided value. </summary>
-        public bool accept(long value)
+        public bool Accept(long value)
         {
             return value >= minIncl && value <= maxIncl;
         }
@@ -214,7 +214,7 @@ namespace Lucene.Net.Facet.Range
                         {
                             return false;
                         }
-                        return outerInstance.outerInstance.outerInstance.accept(outerInstance.values.LongVal(docID));
+                        return outerInstance.outerInstance.outerInstance.Accept(outerInstance.values.LongVal(docID));
                     }
 
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/9604c0f7/src/Lucene.Net.Facet/Range/LongRangeCounter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Range/LongRangeCounter.cs b/src/Lucene.Net.Facet/Range/LongRangeCounter.cs
index 43ef3b4..f5753fa 100644
--- a/src/Lucene.Net.Facet/Range/LongRangeCounter.cs
+++ b/src/Lucene.Net.Facet/Range/LongRangeCounter.cs
@@ -135,7 +135,7 @@ namespace Lucene.Net.Facet.Range
             // each node in the tree:
             for (int i = 0; i < ranges.Length; i++)
             {
-                root.addOutputs(i, ranges[i]);
+                root.AddOutputs(i, ranges[i]);
             }
 
             // Set boundaries (ends of each elementary interval):
@@ -153,7 +153,7 @@ namespace Lucene.Net.Facet.Range
             //System.out.println("root:\n" + root);
         }
 
-        public void add(long v)
+        public void Add(long v)
         {
             //System.out.println("add v=" + v);
 
@@ -208,23 +208,23 @@ namespace Lucene.Net.Facet.Range
         ///  ranges, returning the missing count (how many hits
         ///  didn't match any ranges). 
         /// </summary>
-        public int fillCounts(int[] counts)
+        public int FillCounts(int[] counts)
         {
             //System.out.println("  rollup");
             missingCount = 0;
             leafUpto = 0;
-            rollup(root, counts, false);
+            Rollup(root, counts, false);
             return missingCount;
         }
 
-        private int rollup(LongRangeNode node, int[] counts, bool sawOutputs)
+        private int Rollup(LongRangeNode node, int[] counts, bool sawOutputs)
         {
             int count;
             sawOutputs |= node.outputs != null;
             if (node.left != null)
             {
-                count = rollup(node.left, counts, sawOutputs);
-                count += rollup(node.right, counts, sawOutputs);
+                count = Rollup(node.left, counts, sawOutputs);
+                count += Rollup(node.right, counts, sawOutputs);
             }
             else
             {
@@ -319,7 +319,7 @@ namespace Lucene.Net.Facet.Range
                 return sb.ToString();
             }
 
-            internal static void indent(StringBuilder sb, int depth)
+            internal static void Indent(StringBuilder sb, int depth)
             {
                 for (int i = 0; i < depth; i++)
                 {
@@ -329,7 +329,7 @@ namespace Lucene.Net.Facet.Range
 
             /// <summary>
             /// Recursively assigns range outputs to each node. </summary>
-            internal void addOutputs(int index, LongRange range)
+            internal void AddOutputs(int index, LongRange range)
             {
                 if (start >= range.minIncl && end <= range.maxIncl)
                 {
@@ -345,14 +345,14 @@ namespace Lucene.Net.Facet.Range
                 {
                     Debug.Assert(right != null);
                     // Recurse:
-                    left.addOutputs(index, range);
-                    right.addOutputs(index, range);
+                    left.AddOutputs(index, range);
+                    right.AddOutputs(index, range);
                 }
             }
 
             internal void ToString(StringBuilder sb, int depth)
             {
-                indent(sb, depth);
+                Indent(sb, depth);
                 if (left == null)
                 {
                     Debug.Assert(right == null);

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/9604c0f7/src/Lucene.Net.Facet/Range/LongRangeFacetCounts.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Range/LongRangeFacetCounts.cs b/src/Lucene.Net.Facet/Range/LongRangeFacetCounts.cs
index 813757d..167eb85 100644
--- a/src/Lucene.Net.Facet/Range/LongRangeFacetCounts.cs
+++ b/src/Lucene.Net.Facet/Range/LongRangeFacetCounts.cs
@@ -120,7 +120,7 @@ namespace Lucene.Net.Facet.Range
                     // Skip missing docs:
                     if (fv.Exists(doc))
                     {
-                        counter.add(fv.LongVal(doc));
+                        counter.Add(fv.LongVal(doc));
                     }
                     else
                     {
@@ -129,7 +129,7 @@ namespace Lucene.Net.Facet.Range
                 }
             }
 
-            int x = counter.fillCounts(Counts);
+            int x = counter.FillCounts(Counts);
 
             missingCount += x;
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/9604c0f7/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs b/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs
index 3709b58..d00c50a 100644
--- a/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs
@@ -149,19 +149,19 @@ namespace Lucene.Net.Facet.Taxonomy
             return length - other.length;
         }
 
-        private void hasDelimiter(string offender, char delimiter)
+        private void HasDelimiter(string offender, char delimiter)
         {
             throw new System.ArgumentException("delimiter character '" + delimiter + 
                 "' (U+" + delimiter.ToString() + ") appears in path component \"" + offender + "\"");
         }
 
-        private void noDelimiter(char[] buf, int offset, int len, char delimiter)
+        private void NoDelimiter(char[] buf, int offset, int len, char delimiter)
         {
             for (int idx = 0; idx < len; idx++)
             {
                 if (buf[offset + idx] == delimiter)
                 {
-                    hasDelimiter(new string(buf, offset, len), delimiter);
+                    HasDelimiter(new string(buf, offset, len), delimiter);
                 }
             }
         }
@@ -190,12 +190,12 @@ namespace Lucene.Net.Facet.Taxonomy
             {
                 int len = components[i].Length;
                 components[i].CopyTo(0, buf, idx, len - 0);
-                noDelimiter(buf, idx, len, delimiter);
+                NoDelimiter(buf, idx, len, delimiter);
                 idx += len;
                 buf[idx++] = delimiter;
             }
             components[upto].CopyTo(0, buf, idx, components[upto].Length - 0);
-            noDelimiter(buf, idx, components[upto].Length, delimiter);
+            NoDelimiter(buf, idx, components[upto].Length, delimiter);
 
             return idx + components[upto].Length - start;
         }
@@ -301,7 +301,7 @@ namespace Lucene.Net.Facet.Taxonomy
             {
                 if (components[i].IndexOf(delimiter) != -1)
                 {
-                    hasDelimiter(components[i], delimiter);
+                    HasDelimiter(components[i], delimiter);
                 }
                 sb.Append(components[i]).Append(delimiter);
             }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/9604c0f7/src/Lucene.Net.Facet/Taxonomy/FloatAssociationFacetField.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/FloatAssociationFacetField.cs b/src/Lucene.Net.Facet/Taxonomy/FloatAssociationFacetField.cs
index a1e6c88..8b47430 100644
--- a/src/Lucene.Net.Facet/Taxonomy/FloatAssociationFacetField.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/FloatAssociationFacetField.cs
@@ -38,28 +38,28 @@ namespace Lucene.Net.Facet.Taxonomy
         ///  float association 
         /// </summary>
         public FloatAssociationFacetField(float assoc, string dim, params string[] path) 
-            : base(floatToBytesRef(assoc), dim, path)
+            : base(FloatToBytesRef(assoc), dim, path)
         {
         }
 
         /// <summary>
         /// Encodes a {@code float} as a 4-byte <seealso cref="BytesRef"/>. </summary>
-        public static BytesRef floatToBytesRef(float v)
+        public static BytesRef FloatToBytesRef(float v)
         {
-            return IntAssociationFacetField.intToBytesRef(Number.FloatToIntBits(v));
+            return IntAssociationFacetField.IntToBytesRef(Number.FloatToIntBits(v));
         }
 
         /// <summary>
         /// Decodes a previously encoded {@code float}. </summary>
-        public static float bytesRefToFloat(BytesRef b)
+        public static float BytesRefToFloat(BytesRef b)
         {
-            return Number.IntBitsToFloat(IntAssociationFacetField.bytesRefToInt(b));
+            return Number.IntBitsToFloat(IntAssociationFacetField.BytesRefToInt(b));
         }
 
         public override string ToString()
         {
             return "FloatAssociationFacetField(dim=" + dim + " path=" + Arrays.ToString(path) + 
-                " value=" + bytesRefToFloat(assoc).ToString("0.0#####", CultureInfo.InvariantCulture) + ")";
+                " value=" + BytesRefToFloat(assoc).ToString("0.0#####", CultureInfo.InvariantCulture) + ")";
         }
     }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/9604c0f7/src/Lucene.Net.Facet/Taxonomy/IntAssociationFacetField.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/IntAssociationFacetField.cs b/src/Lucene.Net.Facet/Taxonomy/IntAssociationFacetField.cs
index 417fbd6..1c854fd 100644
--- a/src/Lucene.Net.Facet/Taxonomy/IntAssociationFacetField.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/IntAssociationFacetField.cs
@@ -37,7 +37,7 @@ namespace Lucene.Net.Facet.Taxonomy
         ///  int association 
         /// </summary>
         public IntAssociationFacetField(int assoc, string dim, params string[] path)
-            : base(intToBytesRef(assoc), dim, path)
+            : base(IntToBytesRef(assoc), dim, path)
         {
         }
 
@@ -45,7 +45,7 @@ namespace Lucene.Net.Facet.Taxonomy
         /// Encodes an {@code int} as a 4-byte <seealso cref="BytesRef"/>,
         ///  big-endian. 
         /// </summary>
-        public static BytesRef intToBytesRef(int v)
+        public static BytesRef IntToBytesRef(int v)
         {
 
             byte[] bytes = new byte[4];
@@ -59,7 +59,7 @@ namespace Lucene.Net.Facet.Taxonomy
 
         /// <summary>
         /// Decodes a previously encoded {@code int}. </summary>
-        public static int bytesRefToInt(BytesRef b)
+        public static int BytesRefToInt(BytesRef b)
         {
             return ((b.Bytes[b.Offset] & 0xFF) << 24) | ((b.Bytes[b.Offset + 1] & 0xFF) << 16) | 
                 ((b.Bytes[b.Offset + 2] & 0xFF) << 8) | (b.Bytes[b.Offset + 3] & 0xFF);
@@ -67,7 +67,7 @@ namespace Lucene.Net.Facet.Taxonomy
 
         public override string ToString()
         {
-            return "IntAssociationFacetField(dim=" + dim + " path=" + Arrays.ToString(path) + " value=" + bytesRefToInt(assoc) + ")";
+            return "IntAssociationFacetField(dim=" + dim + " path=" + Arrays.ToString(path) + " value=" + BytesRefToInt(assoc) + ")";
         }
     }
 }
\ No newline at end of file


[34/46] lucenenet git commit: Changed Facet.Taxonomy.LRUHashMap Size and MaxSize methods to Count and Capacity properties (.NETified)

Posted by sy...@apache.org.
Changed Facet.Taxonomy.LRUHashMap Size and MaxSize methods to Count and Capacity properties (.NETified)


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/4319c5d1
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/4319c5d1
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/4319c5d1

Branch: refs/heads/master
Commit: 4319c5d1281469a0f7f34cc3bfa91f3fdbc21197
Parents: 66e3932
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Sep 25 16:18:05 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Mon Oct 3 23:31:56 2016 +0700

----------------------------------------------------------------------
 .../Directory/DirectoryTaxonomyReader.cs        |  4 +--
 src/Lucene.Net.Facet/Taxonomy/LRUHashMap.cs     | 30 +++++++++++---------
 .../Taxonomy/TestLRUHashMap.cs                  | 14 ++++-----
 3 files changed, 25 insertions(+), 23 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/4319c5d1/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyReader.cs b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyReader.cs
index 98bd09e..0a60bff 100644
--- a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyReader.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyReader.cs
@@ -393,8 +393,8 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
                 EnsureOpen();
                 // LUCENENET NOTE: No locking required here,
                 // since our LRU implementation is thread-safe
-                categoryCache.MaxSize = value;
-                ordinalCache.MaxSize = value;
+                categoryCache.Capacity = value;
+                ordinalCache.Capacity = value;
             }
         }
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/4319c5d1/src/Lucene.Net.Facet/Taxonomy/LRUHashMap.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/LRUHashMap.cs b/src/Lucene.Net.Facet/Taxonomy/LRUHashMap.cs
index 1f7883f..059f834 100644
--- a/src/Lucene.Net.Facet/Taxonomy/LRUHashMap.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/LRUHashMap.cs
@@ -62,28 +62,28 @@ namespace Lucene.Net.Facet.Taxonomy
         // Record last access so we can tie break if 2 calls make it in within
         // the same millisecond.
         private long lastAccess;
-        private int maxSize;
+        private int capacity;
 
-        public LRUHashMap(int maxSize)
+        public LRUHashMap(int capacity)
         {
-            if (maxSize < 1)
+            if (capacity < 1)
             {
-                throw new ArgumentOutOfRangeException("maxSize must be at least 1");
+                throw new ArgumentOutOfRangeException("capacity must be at least 1");
             }
-            this.maxSize = maxSize;
-            this.cache = new Dictionary<TKey, CacheDataObject>(maxSize);
+            this.capacity = capacity;
+            this.cache = new Dictionary<TKey, CacheDataObject>(capacity);
         }
 
-        public virtual int MaxSize
+        public virtual int Capacity
         {
-            get { return maxSize; }
+            get { return capacity; }
             set
             {
                 if (value < 1)
                 {
-                    throw new ArgumentOutOfRangeException("MaxSize must be at least 1");
+                    throw new ArgumentOutOfRangeException("Capacity must be at least 1");
                 }
-                maxSize = value;
+                capacity = value;
             }
         }
 
@@ -105,7 +105,7 @@ namespace Lucene.Net.Facet.Taxonomy
                         timestamp = GetTimestamp()
                     };
                     // We have added a new item, so we may need to remove the eldest
-                    if (cache.Count > MaxSize)
+                    if (cache.Count > Capacity)
                     {
                         // Remove the eldest item (lowest timestamp) from the cache
                         cache.Remove(cache.OrderBy(x => x.Value.timestamp).First().Key);
@@ -155,10 +155,12 @@ namespace Lucene.Net.Facet.Taxonomy
             return cache.ContainsKey(key);
         }
 
-        // LUCENENET TODO: Rename to Count (.NETify)
-        public int Size()
+        public int Count
         {
-            return cache.Count;
+            get
+            {
+                return cache.Count;
+            }
         }
 
         private long GetTimestamp()

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/4319c5d1/src/Lucene.Net.Tests.Facet/Taxonomy/TestLRUHashMap.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/TestLRUHashMap.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/TestLRUHashMap.cs
index c08bca8..1f7afb1 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/TestLRUHashMap.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/TestLRUHashMap.cs
@@ -31,27 +31,27 @@ namespace Lucene.Net.Facet.Taxonomy
         public virtual void TestLru()
         {
             LRUHashMap<string, string> lru = new LRUHashMap<string, string>(3);
-            Assert.AreEqual(0, lru.Size());
+            Assert.AreEqual(0, lru.Count);
             lru.Put("one", "Hello world");
-            Assert.AreEqual(1, lru.Size());
+            Assert.AreEqual(1, lru.Count);
             lru.Put("two", "Hi man");
-            Assert.AreEqual(2, lru.Size());
+            Assert.AreEqual(2, lru.Count);
             lru.Put("three", "Bonjour");
-            Assert.AreEqual(3, lru.Size());
+            Assert.AreEqual(3, lru.Count);
             lru.Put("four", "Shalom");
-            Assert.AreEqual(3, lru.Size());
+            Assert.AreEqual(3, lru.Count);
             Assert.NotNull(lru.Get("three"));
             Assert.NotNull(lru.Get("two"));
             Assert.NotNull(lru.Get("four"));
             Assert.Null(lru.Get("one"));
             lru.Put("five", "Yo!");
-            Assert.AreEqual(3, lru.Size());
+            Assert.AreEqual(3, lru.Count);
             Assert.Null(lru.Get("three")); // three was last used, so it got removed
             Assert.NotNull(lru.Get("five"));
             lru.Get("four");
             lru.Put("six", "hi");
             lru.Put("seven", "hey dude");
-            Assert.AreEqual(3, lru.Size());
+            Assert.AreEqual(3, lru.Count);
             Assert.Null(lru.Get("one"));
             Assert.Null(lru.Get("two"));
             Assert.Null(lru.Get("three"));


[40/46] lucenenet git commit: Facet: Updated documentation

Posted by sy...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/44958102/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs b/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs
index ae177b6..9b52503 100644
--- a/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs
@@ -31,19 +31,20 @@ namespace Lucene.Net.Facet.Taxonomy
     public class CategoryPath : IComparable<CategoryPath>
     {
         /// <summary>
-        /// An empty <seealso cref="CategoryPath"/> </summary>
+        /// An empty <see cref="CategoryPath"/>
+        /// </summary>
         public static readonly CategoryPath EMPTY = new CategoryPath();
 
         /// <summary>
-        /// The components of this <seealso cref="CategoryPath"/>. Note that this array may be
-        /// shared with other <seealso cref="CategoryPath"/> instances, e.g. as a result of
-        /// <seealso cref="#subpath(int)"/>, therefore you should traverse the array up to
-        /// <seealso cref="#length"/> for this path's components.
+        /// The components of this <see cref="CategoryPath"/>. Note that this array may be
+        /// shared with other <see cref="CategoryPath"/> instances, e.g. as a result of
+        /// <see cref="Subpath(int)"/>, therefore you should traverse the array up to
+        /// <see cref="Length"/> for this path's components.
         /// </summary>
         public string[] Components { get; private set; }
 
         /// <summary>
-        /// The number of components of this <seealso cref="CategoryPath"/>. </summary>
+        /// The number of components of this <see cref="CategoryPath"/>. </summary>
         public int Length { get; private set; }
 
         // Used by singleton EMPTY
@@ -65,7 +66,8 @@ namespace Lucene.Net.Facet.Taxonomy
         }
 
         /// <summary>
-        /// Construct from the given path components. </summary>
+        /// Construct from the given path <paramref name="components"/>.
+        /// </summary>
         public CategoryPath(params string[] components)
         {
             Debug.Assert(components.Length > 0, "use CategoryPath.EMPTY to create an empty path");
@@ -81,7 +83,8 @@ namespace Lucene.Net.Facet.Taxonomy
         }
 
         /// <summary>
-        /// Construct from a given path, separating path components with {@code delimiter}. </summary>
+        /// Construct from a given path, separating path components with <paramref name="delimiter"/>.
+        /// </summary>
         public CategoryPath(string pathString, char delimiter)
         {
             string[] comps = pathString.Split(new[] { delimiter }, StringSplitOptions.RemoveEmptyEntries);
@@ -107,7 +110,7 @@ namespace Lucene.Net.Facet.Taxonomy
         /// <summary>
         /// Returns the number of characters needed to represent the path, including
         /// delimiter characters, for using with
-        /// <seealso cref="#copyFullPath(char[], int, char)"/>.
+        /// <see cref="CopyFullPath(char[], int, char)"/>.
         /// </summary>
         public virtual int FullPathLength()
         {
@@ -126,7 +129,7 @@ namespace Lucene.Net.Facet.Taxonomy
         }
 
         /// <summary>
-        /// Compares this path with another <seealso cref="CategoryPath"/> for lexicographic
+        /// Compares this path with another <see cref="CategoryPath"/> for lexicographic
         /// order.
         /// </summary>
         public virtual int CompareTo(CategoryPath other)
@@ -167,14 +170,14 @@ namespace Lucene.Net.Facet.Taxonomy
         }
 
         /// <summary>
-        /// Copies the path components to the given {@code char[]}, starting at index
-        /// {@code start}. {@code delimiter} is copied between the path components.
+        /// Copies the path components to the given <see cref="char[]"/>, starting at index
+        /// <paramref name="start"/>. <paramref name="delimiter"/> is copied between the path components.
         /// Returns the number of chars copied.
         /// 
         /// <para>
         /// <b>NOTE:</b> this method relies on the array being large enough to hold the
         /// components and separators - the amount of needed space can be calculated
-        /// with <seealso cref="#fullPathLength()"/>.
+        /// with <see cref="FullPathLength()"/>.
         /// </para>
         /// </summary>
         public virtual int CopyFullPath(char[] buf, int start, char delimiter)
@@ -241,7 +244,8 @@ namespace Lucene.Net.Facet.Taxonomy
         }
 
         /// <summary>
-        /// Calculate a 64-bit hash function for this path. </summary>
+        /// Calculate a 64-bit hash function for this path.
+        /// </summary>
         public virtual long LongHashCode()
         {
             if (Length == 0)
@@ -258,7 +262,8 @@ namespace Lucene.Net.Facet.Taxonomy
         }
 
         /// <summary>
-        /// Returns a sub-path of this path up to {@code length} components. </summary>
+        /// Returns a sub-path of this path up to <paramref name="length"/> components.
+        /// </summary>
         public virtual CategoryPath Subpath(int length)
         {
             if (length >= this.Length || length < 0)
@@ -279,7 +284,7 @@ namespace Lucene.Net.Facet.Taxonomy
         /// Returns a string representation of the path, separating components with
         /// '/'.
         /// </summary>
-        /// <seealso cref= #toString(char) </seealso>
+        /// <see cref= #toString(char) </seealso>
         public override string ToString()
         {
             return ToString('/');

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/44958102/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyReader.cs b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyReader.cs
index 4552f08..0949614 100644
--- a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyReader.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyReader.cs
@@ -34,8 +34,8 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
     using MultiFields = Lucene.Net.Index.MultiFields;
 
     /// <summary>
-    /// A <seealso cref="TaxonomyReader"/> which retrieves stored taxonomy information from a
-    /// <seealso cref="Directory"/>.
+    /// A <see cref="TaxonomyReader"/> which retrieves stored taxonomy information from a
+    /// <see cref="Directory"/>.
     /// <P>
     /// Reading from the on-disk index on every method call is too slow, so this
     /// implementation employs caching: Some methods cache recent requests and their
@@ -64,8 +64,8 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
         private volatile TaxonomyIndexArrays taxoArrays;
 
         /// <summary>
-        /// Called only from <seealso cref="#doOpenIfChanged()"/>. If the taxonomy has been
-        /// recreated, you should pass {@code null} as the caches and parent/children
+        /// Called only from <see cref="DoOpenIfChanged()"/>. If the taxonomy has been
+        /// recreated, you should pass <c>null</c> as the caches and parent/children
         /// arrays.
         /// </summary>
         internal DirectoryTaxonomyReader(DirectoryReader indexReader, DirectoryTaxonomyWriter taxoWriter, 
@@ -84,14 +84,11 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
         }
 
         /// <summary>
-        /// Open for reading a taxonomy stored in a given <seealso cref="Directory"/>.
+        /// Open for reading a taxonomy stored in a given <see cref="Directory"/>.
         /// </summary>
-        /// <param name="directory">
-        ///          The <seealso cref="Directory"/> in which the taxonomy resides. </param>
-        /// <exception cref="CorruptIndexException">
-        ///           if the Taxonomy is corrupt. </exception>
-        /// <exception cref="IOException">
-        ///           if another error occurred. </exception>
+        /// <param name="directory"> The <see cref="Directory"/> in which the taxonomy resides. </param>
+        /// <exception cref="Index.CorruptIndexException"> if the Taxonomy is corrupt. </exception>
+        /// <exception cref="IOException"> if another error occurred. </exception>
         public DirectoryTaxonomyReader(Directory directory)
         {
             indexReader = OpenIndexReader(directory);
@@ -106,11 +103,11 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
         }
 
         /// <summary>
-        /// Opens a <seealso cref="DirectoryTaxonomyReader"/> over the given
-        /// <seealso cref="DirectoryTaxonomyWriter"/> (for NRT).
+        /// Opens a <see cref="DirectoryTaxonomyReader"/> over the given
+        /// <see cref="DirectoryTaxonomyWriter"/> (for NRT).
         /// </summary>
         /// <param name="taxoWriter">
-        ///          The <seealso cref="DirectoryTaxonomyWriter"/> from which to obtain newly
+        ///          The <see cref="DirectoryTaxonomyWriter"/> from which to obtain newly
         ///          added categories, in real-time. </param>
         public DirectoryTaxonomyReader(DirectoryTaxonomyWriter taxoWriter)
         {
@@ -150,16 +147,16 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
         }
 
         /// <summary>
-        /// Implements the opening of a new <seealso cref="DirectoryTaxonomyReader"/> instance if
+        /// Implements the opening of a new <see cref="DirectoryTaxonomyReader"/> instance if
         /// the taxonomy has changed.
         /// 
         /// <para>
-        /// <b>NOTE:</b> the returned <seealso cref="DirectoryTaxonomyReader"/> shares the
+        /// <b>NOTE:</b> the returned <see cref="DirectoryTaxonomyReader"/> shares the
         /// ordinal and category caches with this reader. This is not expected to cause
         /// any issues, unless the two instances continue to live. The reader
         /// guarantees that the two instances cannot affect each other in terms of
         /// correctness of the caches, however if the size of the cache is changed
-        /// through <seealso cref="#setCacheSize(int)"/>, it will affect both reader instances.
+        /// through <see cref="CacheSize"/>, it will affect both reader instances.
         /// </para>
         /// </summary>
         protected override TaxonomyReader DoOpenIfChanged()
@@ -231,8 +228,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
         }
 
         /// <summary>
-        /// Open the <seealso cref="DirectoryReader"/> from this {@link
-        ///  Directory}. 
+        /// Open the <see cref="DirectoryReader"/> from this <see cref="Directory"/>. 
         /// </summary>
         protected virtual DirectoryReader OpenIndexReader(Directory directory)
         {
@@ -240,8 +236,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
         }
 
         /// <summary>
-        /// Open the <seealso cref="DirectoryReader"/> from this {@link
-        ///  IndexWriter}. 
+        /// Open the <see cref="DirectoryReader"/> from this <see cref="IndexWriter"/>. 
         /// </summary>
         protected virtual DirectoryReader OpenIndexReader(IndexWriter writer)
         {
@@ -249,8 +244,8 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
         }
 
         /// <summary>
-        /// Expert: returns the underlying <seealso cref="DirectoryReader"/> instance that is
-        /// used by this <seealso cref="TaxonomyReader"/>.
+        /// Expert: returns the underlying <see cref="DirectoryReader"/> instance that is
+        /// used by this <see cref="TaxonomyReader"/>.
         /// </summary>
         internal virtual DirectoryReader InternalIndexReader
         {
@@ -379,13 +374,13 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
         }
 
         /// <summary>
-        /// setCacheSize controls the maximum allowed size of each of the caches
-        /// used by <seealso cref="#getPath(int)"/> and <seealso cref="#getOrdinal(FacetLabel)"/>.
+        /// Setting <see cref="CacheSize"/> controls the maximum allowed size of each of the caches
+        /// used by <see cref="GetPath(int)"/> and <see cref="GetOrdinal(FacetLabel)"/>.
         /// <P>
         /// Currently, if the given size is smaller than the current size of
         /// a cache, it will not shrink, and rather we be limited to its current
         /// size. </summary>
-        /// <param name="size"> the new maximum cache size, in number of entries. </param>
+        /// <param name="value"> the new maximum cache size, in number of entries. </param>
         public virtual int CacheSize
         {
             set
@@ -400,8 +395,8 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
 
         /// <summary>
         /// Returns ordinal -> label mapping, up to the provided
-        ///  max ordinal or number of ordinals, whichever is
-        ///  smaller. 
+        /// max ordinal or number of ordinals, whichever is
+        /// smaller. 
         /// </summary>
         public virtual string ToString(int max)
         {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/44958102/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs
index dd497fd..f59337e 100644
--- a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs
@@ -54,20 +54,20 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
     using TokenStream = Lucene.Net.Analysis.TokenStream;
 
     /// <summary>
-    /// <seealso cref="TaxonomyWriter"/> which uses a <seealso cref="Directory"/> to store the taxonomy
+    /// <see cref="ITaxonomyWriter"/> which uses a <see cref="Store.Directory"/> to store the taxonomy
     /// information on disk, and keeps an additional in-memory cache of some or all
     /// categories.
     /// <para>
-    /// In addition to the permanently-stored information in the <seealso cref="Directory"/>,
-    /// efficiency dictates that we also keep an in-memory cache of <B>recently
-    /// seen</B> or <B>all</B> categories, so that we do not need to go back to disk
+    /// In addition to the permanently-stored information in the <see cref="Store.Directory"/>,
+    /// efficiency dictates that we also keep an in-memory cache of <b>recently
+    /// seen</b> or <b>all</b> categories, so that we do not need to go back to disk
     /// for every category addition to see which ordinal this category already has,
-    /// if any. A <seealso cref="TaxonomyWriterCache"/> object determines the specific caching
+    /// if any. A <see cref="ITaxonomyWriterCache"/> object determines the specific caching
     /// algorithm used.
     /// </para>
     /// <para>
     /// This class offers some hooks for extending classes to control the
-    /// <seealso cref="IndexWriter"/> instance that is used. See <seealso cref="#openIndexWriter"/>.
+    /// <see cref="IndexWriter"/> instance that is used. See <see cref="OpenIndexWriter"/>.
     /// 
     /// @lucene.experimental
     /// </para>
@@ -77,7 +77,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
         /// <summary>
         /// Property name of user commit data that contains the index epoch. The epoch
         /// changes whenever the taxonomy is recreated (i.e. opened with
-        /// <seealso cref="OpenMode#CREATE"/>.
+        /// <see cref="OpenMode.CREATE"/>.
         /// <para>
         /// Applications should not use this property in their commit data because it
         /// will be overridden by this taxonomy writer.
@@ -107,12 +107,12 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
 
         /// <summary>
         /// We call the cache "complete" if we know that every category in our
-        /// taxonomy is in the cache. When the cache is <B>not</B> complete, and
+        /// taxonomy is in the cache. When the cache is <b>not</b> complete, and
         /// we can't find a category in the cache, we still need to look for it
         /// in the on-disk index; Therefore when the cache is not complete, we
         /// need to open a "reader" to the taxonomy index.
         /// The cache becomes incomplete if it was never filled with the existing
-        /// categories, or if a put() to the cache ever returned true (meaning
+        /// categories, or if a Put() to the cache ever returned true (meaning
         /// that some of the cached data was cleared).
         /// </summary>
         private volatile bool cacheIsComplete;
@@ -121,7 +121,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
         private volatile int nextID;
 
         /// <summary>
-        /// Reads the commit data from a Directory. </summary>
+        /// Reads the commit data from a <see cref="Store.Directory"/>. </summary>
         private static IDictionary<string, string> ReadCommitData(Directory dir)
         {
             SegmentInfos infos = new SegmentInfos();
@@ -136,9 +136,9 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
         /// known that no other process nor thread is in fact currently accessing
         /// this taxonomy.
         /// <P>
-        /// This method is unnecessary if your <seealso cref="Directory"/> uses a
-        /// <seealso cref="NativeFSLockFactory"/> instead of the default
-        /// <seealso cref="SimpleFSLockFactory"/>. When the "native" lock is used, a lock
+        /// This method is unnecessary if your <see cref="Store.Directory"/> uses a
+        /// <see cref="NativeFSLockFactory"/> instead of the default
+        /// <see cref="SimpleFSLockFactory"/>. When the "native" lock is used, a lock
         /// does not stay behind forever when the process using it dies. 
         /// </summary>
         public static void Unlock(Directory directory)
@@ -150,28 +150,28 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
         /// Construct a Taxonomy writer.
         /// </summary>
         /// <param name="directory">
-        ///    The <seealso cref="Directory"/> in which to store the taxonomy. Note that
+        ///    The <see cref="Store.Directory"/> in which to store the taxonomy. Note that
         ///    the taxonomy is written directly to that directory (not to a
         ///    subdirectory of it). </param>
         /// <param name="openMode">
-        ///    Specifies how to open a taxonomy for writing: <code>APPEND</code>
+        ///    Specifies how to open a taxonomy for writing: <see cref="OpenMode.APPEND"/>
         ///    means open an existing index for append (failing if the index does
-        ///    not yet exist). <code>CREATE</code> means create a new index (first
+        ///    not yet exist). <see cref="OpenMode.CREATE"/> means create a new index (first
         ///    deleting the old one if it already existed).
-        ///    <code>APPEND_OR_CREATE</code> appends to an existing index if there
+        ///    <see cref="OpenMode.CREATE_OR_APPEND"/> appends to an existing index if there
         ///    is one, otherwise it creates a new index. </param>
         /// <param name="cache">
-        ///    A <seealso cref="TaxonomyWriterCache"/> implementation which determines
+        ///    A <see cref="ITaxonomyWriterCache"/> implementation which determines
         ///    the in-memory caching policy. See for example
-        ///    <seealso cref="LruTaxonomyWriterCache"/> and <seealso cref="Cl2oTaxonomyWriterCache"/>.
-        ///    If null or missing, <seealso cref="#defaultTaxonomyWriterCache()"/> is used. </param>
+        ///    <see cref="WriterCache.LruTaxonomyWriterCache"/> and <see cref="Cl2oTaxonomyWriterCache"/>.
+        ///    If null or missing, <see cref="DefaultTaxonomyWriterCache()"/> is used. </param>
         /// <exception cref="CorruptIndexException">
         ///     if the taxonomy is corrupted. </exception>
         /// <exception cref="LockObtainFailedException">
         ///     if the taxonomy is locked by another writer. If it is known
         ///     that no other concurrent writer is active, the lock might
         ///     have been left around by an old dead process, and should be
-        ///     removed using <seealso cref="#unlock(Directory)"/>. </exception>
+        ///     removed using <see cref="Unlock(Directory)"/>. </exception>
         /// <exception cref="IOException">
         ///     if another error occurred. </exception>
         public DirectoryTaxonomyWriter(Directory directory, OpenMode openMode, 
@@ -250,18 +250,17 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
         /// <summary>
         /// Open internal index writer, which contains the taxonomy data.
         /// <para>
-        /// Extensions may provide their own <seealso cref="IndexWriter"/> implementation or instance. 
-        /// <br><b>NOTE:</b> the instance this method returns will be closed upon calling
-        /// to <seealso cref="#close()"/>.
+        /// Extensions may provide their own <see cref="IndexWriter"/> implementation or instance. 
+        /// <br><b>NOTE:</b> the instance this method returns will be disposed upon calling
+        /// to <see cref="Dispose()"/>.
         /// <br><b>NOTE:</b> the merge policy in effect must not merge none adjacent segments. See
-        /// comment in <seealso cref="#createIndexWriterConfig(IndexWriterConfig.OpenMode)"/> for the logic behind this.
+        /// comment in <see cref="CreateIndexWriterConfig(IndexWriterConfig.OpenMode)"/> for the logic behind this.
         ///  
         /// </para>
         /// </summary>
-        /// <seealso cref= #createIndexWriterConfig(IndexWriterConfig.OpenMode)
-        /// </seealso>
+        /// <seealso cref="CreateIndexWriterConfig(IndexWriterConfig.OpenMode)"/>
         /// <param name="directory">
-        ///          the <seealso cref="Directory"/> on top of which an <seealso cref="IndexWriter"/>
+        ///          the <see cref="Store.Directory"/> on top of which an <see cref="IndexWriter"/>
         ///          should be opened. </param>
         /// <param name="config">
         ///          configuration for the internal index writer. </param>
@@ -271,18 +270,17 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
         }
 
         /// <summary>
-        /// Create the <seealso cref="IndexWriterConfig"/> that would be used for opening the internal index writer.
-        /// <br>Extensions can configure the <seealso cref="IndexWriter"/> as they see fit,
-        /// including setting a <seealso cref="Lucene.Net.index.MergeScheduler merge-scheduler"/>, or
-        /// <seealso cref="Lucene.Net.index.IndexDeletionPolicy deletion-policy"/>, different RAM size
+        /// Create the <see cref="IndexWriterConfig"/> that would be used for opening the internal index writer.
+        /// <br>Extensions can configure the <see cref="IndexWriter"/> as they see fit,
+        /// including setting a <see cref="Index.MergeScheduler"/>, or
+        /// <see cref="Index.IndexDeletionPolicy"/>, different RAM size
         /// etc.<br>
         /// <br><b>NOTE:</b> internal docids of the configured index must not be altered.
         /// For that, categories are never deleted from the taxonomy index.
         /// In addition, merge policy in effect must not merge none adjacent segments.
         /// </summary>
-        /// <seealso cref= #openIndexWriter(Directory, IndexWriterConfig)
-        /// </seealso>
-        /// <param name="openMode"> see <seealso cref="OpenMode"/> </param>
+        /// <seealso cref="OpenIndexWriter(Directory, IndexWriterConfig)"/>
+        /// <param name="openMode"> see <see cref="OpenMode"/> </param>
         protected virtual IndexWriterConfig CreateIndexWriterConfig(OpenMode openMode)
         {
             // TODO: should we use a more optimized Codec, e.g. Pulsing (or write custom)?
@@ -295,7 +293,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
         }
 
         /// <summary>
-        /// Opens a <seealso cref="ReaderManager"/> from the internal <seealso cref="IndexWriter"/>. 
+        /// Opens a <see cref="ReaderManager"/> from the internal <see cref="IndexWriter"/>. 
         /// </summary>
         private void InitReaderManager()
         {
@@ -317,7 +315,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
 
         /// <summary>
         /// Creates a new instance with a default cache as defined by
-        /// <seealso cref="#defaultTaxonomyWriterCache()"/>.
+        /// <see cref="DefaultTaxonomyWriterCache()"/>.
         /// </summary>
         public DirectoryTaxonomyWriter(Directory directory, OpenMode openMode = OpenMode.CREATE_OR_APPEND)
             : this(directory, openMode, DefaultTaxonomyWriterCache())
@@ -325,12 +323,13 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
         }
 
         /// <summary>
-        /// Defines the default <seealso cref="TaxonomyWriterCache"/> to use in constructors
+        /// Defines the default <see cref="ITaxonomyWriterCache"/> to use in constructors
         /// which do not specify one.
-        /// <P>  
-        /// The current default is <seealso cref="Cl2oTaxonomyWriterCache"/> constructed
+        /// <para>  
+        /// The current default is <see cref="Cl2oTaxonomyWriterCache"/> constructed
         /// with the parameters (1024, 0.15f, 3), i.e., the entire taxonomy is
         /// cached in memory while building it.
+        /// </para>
         /// </summary>
         public static ITaxonomyWriterCache DefaultTaxonomyWriterCache()
         {
@@ -338,9 +337,9 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
         }
 
         /// <summary>
-        /// Frees used resources as well as closes the underlying <seealso cref="IndexWriter"/>,
+        /// Frees used resources as well as closes the underlying <see cref="IndexWriter"/>,
         /// which commits whatever changes made to it to the underlying
-        /// <seealso cref="Directory"/>.
+        /// <see cref="Store.Directory"/>.
         /// </summary>
         public void Dispose()
         {
@@ -363,10 +362,12 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
 
         /// <summary>
         /// A hook for extending classes to close additional resources that were used.
-        /// The default implementation closes the <seealso cref="IndexReader"/> as well as the
-        /// <seealso cref="TaxonomyWriterCache"/> instances that were used. <br>
+        /// The default implementation closes the <see cref="Index.IndexReader"/> as well as the
+        /// <see cref="ITaxonomyWriterCache"/> instances that were used.
+        /// <para>
         /// <b>NOTE:</b> if you override this method, you should include a
-        /// <code>super.closeResources()</code> call in your implementation.
+        /// <c>base.CloseResources()</c> call in your implementation.
+        /// </para>
         /// </summary>
         protected virtual void CloseResources()
         {
@@ -529,7 +530,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
 
         /// <summary>
         /// Verifies that this instance wasn't closed, or throws
-        /// <seealso cref="AlreadyClosedException"/> if it is.
+        /// <see cref="AlreadyClosedException"/> if it is.
         /// </summary>
         protected internal void EnsureOpen()
         {
@@ -540,7 +541,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
         }
 
         /// <summary>
-        /// Note that the methods calling addCategoryDocument() are synchornized, so
+        /// Note that the methods calling <see cref="AddCategoryDocument"/> are synchornized, so
         /// this method is effectively synchronized as well.
         /// </summary>
         private int AddCategoryDocument(FacetLabel categoryPath, int parent)
@@ -598,15 +599,16 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
 
             /// <summary>
             /// Set the value we want to keep, as the position increment.
-            /// Note that when TermPositions.nextPosition() is later used to
+            /// Note that when TermPositions.NextPosition() is later used to
             /// retrieve this value, val-1 will be returned, not val.
-            /// <P>
+            /// <para>
             /// IMPORTANT NOTE: Before Lucene 2.9, val>=0 were safe (for val==0,
             /// the retrieved position would be -1). But starting with Lucene 2.9,
             /// this unfortunately changed, and only val>0 are safe. val=0 can
             /// still be used, but don't count on the value you retrieve later
             /// (it could be 0 or -1, depending on circumstances or versions).
-            /// This change is described in Lucene's JIRA: LUCENE-1542. 
+            /// This change is described in Lucene's JIRA: LUCENE-1542.
+            /// </para>
             /// </summary>
             public virtual void Set(int val)
             {
@@ -679,7 +681,8 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
         }
 
         /// <summary>
-        /// Combine original user data with the taxonomy epoch. </summary>
+        /// Combine original user data with the taxonomy epoch.
+        /// </summary>
         private IDictionary<string, string> CombinedCommitData(IDictionary<string, string> commitData)
         {
             IDictionary<string, string> m = new Dictionary<string, string>();
@@ -706,7 +709,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
 
         /// <summary>
         /// prepare most of the work needed for a two-phase commit.
-        /// See <seealso cref="IndexWriter#prepareCommit"/>.
+        /// See <see cref="IndexWriter.PrepareCommit"/>.
         /// </summary>
         public virtual void PrepareCommit()
         {
@@ -744,7 +747,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
         /// after a certain number (defined by this method) of cache misses.
         /// </para>
         /// <para>
-        /// If the number is set to {@code 0}, the entire taxonomy is read into the
+        /// If the number is set to <c>0</c>, the entire taxonomy is read into the
         /// cache on first use, without fetching individual categories first.
         /// </para>
         /// <para>
@@ -894,7 +897,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
         /// <summary>
         /// Takes the categories from the given taxonomy directory, and adds the
         /// missing ones to this taxonomy. Additionally, it fills the given
-        /// <seealso cref="OrdinalMap"/> with a mapping from the original ordinal to the new
+        /// <see cref="IOrdinalMap"/> with a mapping from the original ordinal to the new
         /// ordinal.
         /// </summary>
         public virtual void AddTaxonomy(Directory taxoDir, IOrdinalMap map)
@@ -935,13 +938,13 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
         /// Mapping from old ordinal to new ordinals, used when merging indexes 
         /// wit separate taxonomies.
         /// <para> 
-        /// addToTaxonomies() merges one or more taxonomies into the given taxonomy
-        /// (this). An IOrdinalMap is filled for each of the added taxonomies,
+        /// <see cref="AddMapping"/> merges one or more taxonomies into the given taxonomy
+        /// (this). An <see cref="IOrdinalMap"/> is filled for each of the added taxonomies,
         /// containing the new ordinal (in the merged taxonomy) of each of the
         /// categories in the old taxonomy.
         /// <P>  
-        /// There exist two implementations of IOrdinalMap: MemoryOrdinalMap and
-        /// DiskOrdinalMap. As their names suggest, the former keeps the map in
+        /// There exist two implementations of <see cref="IOrdinalMap"/>: <see cref="MemoryOrdinalMap"/> and
+        /// <see cref="DiskOrdinalMap"/>. As their names suggest, the former keeps the map in
         /// memory and the latter in a temporary disk file. Because these maps will
         /// later be needed one by one (to remap the counting lists), not all at the
         /// same time, it is recommended to put the first taxonomy's map in memory,
@@ -952,10 +955,10 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
         public interface IOrdinalMap
         {
             /// <summary>
-            /// Set the size of the map. This MUST be called before addMapping().
-            /// It is assumed (but not verified) that addMapping() will then be
-            /// called exactly 'size' times, with different origOrdinals between 0
-            /// and size-1.  
+            /// Set the size of the map. This MUST be called before <see cref="AddMapping"/>.
+            /// It is assumed (but not verified) that <see cref="AddMapping"/> will then be
+            /// called exactly 'size' times, with different <paramref name="origOrdinals"/> between 0
+            /// and size - 1.  
             /// </summary>
             void SetSize(int taxonomySize);
 
@@ -964,7 +967,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
             void AddMapping(int origOrdinal, int newOrdinal);
 
             /// <summary>
-            /// Call addDone() to say that all addMapping() have been done.
+            /// Call <see cref="AddDone()"/> to say that all <see cref="AddMapping"/> have been done.
             /// In some implementations this might free some resources. 
             /// </summary>
             void AddDone();
@@ -981,7 +984,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
         }
 
         /// <summary>
-        /// <seealso cref="OrdinalMap"/> maintained in memory
+        /// <see cref="IOrdinalMap"/> maintained in memory
         /// </summary>
         public sealed class MemoryOrdinalMap : IOrdinalMap
         {
@@ -1026,7 +1029,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
         }
 
         /// <summary>
-        /// <seealso cref="OrdinalMap"/> maintained on file system
+        /// <see cref="IOrdinalMap"/> maintained on file system
         /// </summary>
         public sealed class DiskOrdinalMap : IOrdinalMap
         {
@@ -1102,7 +1105,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
         /// <summary>
         /// Rollback changes to the taxonomy writer and closes the instance. Following
         /// this method the instance becomes unusable (calling any of its API methods
-        /// will yield an <seealso cref="AlreadyClosedException"/>).
+        /// will yield an <see cref="AlreadyClosedException"/>).
         /// </summary>
         public virtual void Rollback()
         {
@@ -1117,7 +1120,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
         /// <summary>
         /// Replaces the current taxonomy with the given one. This method should
         /// generally be called in conjunction with
-        /// <seealso cref="IndexWriter#addIndexes(Directory...)"/> to replace both the taxonomy
+        /// <see cref="IndexWriter.AddIndexes(Directory[])"/> to replace both the taxonomy
         /// as well as the search index content.
         /// </summary>
         public virtual void ReplaceTaxonomy(Directory taxoDir)
@@ -1146,7 +1149,8 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
         }
 
         /// <summary>
-        /// Returns the <seealso cref="Directory"/> of this taxonomy writer. </summary>
+        /// Returns the <see cref="Store.Directory"/> of this taxonomy writer.
+        /// </summary>
         public virtual Directory Directory
         {
             get
@@ -1156,9 +1160,9 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
         }
 
         /// <summary>
-        /// Used by <seealso cref="DirectoryTaxonomyReader"/> to support NRT.
+        /// Used by <see cref="DirectoryTaxonomyReader"/> to support NRT.
         /// <para>
-        /// <b>NOTE:</b> you should not use the obtained <seealso cref="IndexWriter"/> in any
+        /// <b>NOTE:</b> you should not use the obtained <see cref="IndexWriter"/> in any
         /// way, other than opening an IndexReader on it, or otherwise, the taxonomy
         /// index may become corrupt!
         /// </para>
@@ -1173,8 +1177,8 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
 
         /// <summary>
         /// Expert: returns current index epoch, if this is a
-        /// near-real-time reader.  Used by {@link
-        /// DirectoryTaxonomyReader} to support NRT. 
+        /// near-real-time reader.  Used by 
+        /// <see cref="DirectoryTaxonomyReader"/> to support NRT. 
         /// 
         /// @lucene.internal 
         /// </summary>

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/44958102/src/Lucene.Net.Facet/Taxonomy/Directory/TaxonomyIndexArrays.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/Directory/TaxonomyIndexArrays.cs b/src/Lucene.Net.Facet/Taxonomy/Directory/TaxonomyIndexArrays.cs
index 9cbaaed..a261374 100644
--- a/src/Lucene.Net.Facet/Taxonomy/Directory/TaxonomyIndexArrays.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/Directory/TaxonomyIndexArrays.cs
@@ -28,7 +28,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
     using MultiFields = Lucene.Net.Index.MultiFields;
 
     /// <summary>
-    /// A <seealso cref="ParallelTaxonomyArrays"/> that are initialized from the taxonomy
+    /// A <see cref="ParallelTaxonomyArrays"/> that are initialized from the taxonomy
     /// index.
     /// 
     /// @lucene.experimental
@@ -45,7 +45,8 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
         private int[] children, siblings;
 
         /// <summary>
-        /// Used by <seealso cref="#add(int, int)"/> after the array grew. </summary>
+        /// Used by <see cref="Add(int, int)"/> after the array grew.
+        /// </summary>
         private TaxonomyIndexArrays(int[] parents)
         {
             this.parents = parents;
@@ -137,7 +138,9 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
             }
         }
 
-        // Read the parents of the new categories
+        /// <summary>
+        /// Read the parents of the new categories
+        /// </summary>
         private void InitParents(IndexReader reader, int first)
         {
             if (reader.MaxDoc == first)
@@ -204,8 +207,8 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
         }
 
         /// <summary>
-        /// Returns the parents array, where {@code parents[i]} denotes the parent of
-        /// category ordinal {@code i}.
+        /// Returns the parents array, where <c>Parents[i]</c> denotes the parent of
+        /// category ordinal <c>i</c>.
         /// </summary>
         public override int[] Parents
         {
@@ -216,10 +219,10 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
         }
 
         /// <summary>
-        /// Returns the children array, where {@code children[i]} denotes the youngest
-        /// child of category ordinal {@code i}. The youngest child is defined as the
+        /// Returns the children array, where <c>Children[i]</c> denotes the youngest
+        /// child of category ordinal <c>i</c>. The youngest child is defined as the
         /// category that was added last to the taxonomy as an immediate child of
-        /// {@code i}.
+        /// <c>i</c>.
         /// </summary>
         public override int[] Children
         {
@@ -236,9 +239,9 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
         }
 
         /// <summary>
-        /// Returns the siblings array, where {@code siblings[i]} denotes the sibling
-        /// of category ordinal {@code i}. The sibling is defined as the previous
-        /// youngest child of {@code parents[i]}.
+        /// Returns the siblings array, where <c>Siblings[i]</c> denotes the sibling
+        /// of category ordinal <c>i</c>. The sibling is defined as the previous
+        /// youngest child of <c>Parents[i]</c>.
         /// </summary>
         public override int[] Siblings
         {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/44958102/src/Lucene.Net.Facet/Taxonomy/DocValuesOrdinalsReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/DocValuesOrdinalsReader.cs b/src/Lucene.Net.Facet/Taxonomy/DocValuesOrdinalsReader.cs
index 3d5cc53..b48eafd 100644
--- a/src/Lucene.Net.Facet/Taxonomy/DocValuesOrdinalsReader.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/DocValuesOrdinalsReader.cs
@@ -25,21 +25,23 @@
     using IntsRef = Lucene.Net.Util.IntsRef;
 
     /// <summary>
-    /// Decodes ordinals previously indexed into a BinaryDocValues field </summary>
-
+    /// Decodes ordinals previously indexed into a <see cref="BinaryDocValues"/> field
+    /// </summary>
     public class DocValuesOrdinalsReader : OrdinalsReader
     {
         private readonly string field;
 
         /// <summary>
-        /// Default constructor. </summary>
+        /// Default constructor.
+        /// </summary>
         public DocValuesOrdinalsReader()
             : this(FacetsConfig.DEFAULT_INDEX_FIELD_NAME)
         {
         }
 
         /// <summary>
-        /// Create this, with the specified indexed field name. </summary>
+        /// Create this, with the specified indexed field name.
+        /// </summary>
         public DocValuesOrdinalsReader(string field)
         {
             this.field = field;
@@ -87,7 +89,8 @@
         }
 
         /// <summary>
-        /// Subclass & override if you change the encoding. </summary>
+        /// Subclass & override if you change the encoding.
+        /// </summary>
         protected virtual void Decode(BytesRef buf, IntsRef ordinals)
         {
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/44958102/src/Lucene.Net.Facet/Taxonomy/FacetLabel.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/FacetLabel.cs b/src/Lucene.Net.Facet/Taxonomy/FacetLabel.cs
index edc68c0..56a16af 100644
--- a/src/Lucene.Net.Facet/Taxonomy/FacetLabel.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/FacetLabel.cs
@@ -40,20 +40,21 @@ namespace Lucene.Net.Facet.Taxonomy
          * be on the safe side.
          */
         /// <summary>
-        /// The maximum number of characters a <seealso cref="FacetLabel"/> can have.
+        /// The maximum number of characters a <see cref="FacetLabel"/> can have.
         /// </summary>
         public static readonly int MAX_CATEGORY_PATH_LENGTH = (BYTE_BLOCK_SIZE - 2) / 4;
 
         /// <summary>
-        /// The components of this <seealso cref="FacetLabel"/>. Note that this array may be
-        /// shared with other <seealso cref="FacetLabel"/> instances, e.g. as a result of
-        /// <seealso cref="#subpath(int)"/>, therefore you should traverse the array up to
-        /// <seealso cref="#length"/> for this path's components.
+        /// The components of this <see cref="FacetLabel"/>. Note that this array may be
+        /// shared with other <see cref="FacetLabel"/> instances, e.g. as a result of
+        /// <see cref="Subpath(int)"/>, therefore you should traverse the array up to
+        /// <see cref="Length"/> for this path's components.
         /// </summary>
         public string[] Components { get; private set; }
 
         /// <summary>
-        /// The number of components of this <seealso cref="FacetLabel"/>. </summary>
+        /// The number of components of this <see cref="FacetLabel"/>.
+        /// </summary>
         public int Length { get; private set; }
 
         // Used by subpath
@@ -68,7 +69,8 @@ namespace Lucene.Net.Facet.Taxonomy
         }
 
         /// <summary>
-        /// Construct from the given path components. </summary>
+        /// Construct from the given path components.
+        /// </summary>
         public FacetLabel(params string[] components)
         {
             this.Components = components;
@@ -77,7 +79,8 @@ namespace Lucene.Net.Facet.Taxonomy
         }
 
         /// <summary>
-        /// Construct from the dimension plus the given path components. </summary>
+        /// Construct from the dimension plus the given path components.
+        /// </summary>
         public FacetLabel(string dim, string[] path)
         {
             Components = new string[1 + path.Length];
@@ -106,7 +109,7 @@ namespace Lucene.Net.Facet.Taxonomy
         }
 
         /// <summary>
-        /// Compares this path with another <seealso cref="FacetLabel"/> for lexicographic
+        /// Compares this path with another <see cref="FacetLabel"/> for lexicographic
         /// order.
         /// </summary>
         public virtual int CompareTo(FacetLabel other)
@@ -171,10 +174,9 @@ namespace Lucene.Net.Facet.Taxonomy
 
         /// <summary>
         /// Calculate a 64-bit hash function for this path.  This
-        ///  is necessary for <seealso cref="NameHashIntCacheLRU"/> (the
-        ///  default cache impl for {@link
-        ///  LruTaxonomyWriterCache}) to reduce the chance of
-        ///  "silent but deadly" collisions. 
+        /// is necessary for <see cref="NameHashIntCacheLRU"/> (the
+        /// default cache impl for <see cref="LruTaxonomyWriterCache"/>) 
+        /// to reduce the chance of "silent but deadly" collisions. 
         /// </summary>
         public virtual long LongHashCode()
         {
@@ -192,16 +194,17 @@ namespace Lucene.Net.Facet.Taxonomy
         }
 
         /// <summary>
-        /// Returns a sub-path of this path up to {@code length} components. </summary>
-        public virtual FacetLabel Subpath(int len)
+        /// Returns a sub-path of this path up to <paramref name="length"/> components.
+        /// </summary>
+        public virtual FacetLabel Subpath(int length)
         {
-            if (len >= this.Length || len < 0)
+            if (length >= this.Length || length < 0)
             {
                 return this;
             }
             else
             {
-                return new FacetLabel(this, len);
+                return new FacetLabel(this, length);
             }
         }
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/44958102/src/Lucene.Net.Facet/Taxonomy/FastTaxonomyFacetCounts.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/FastTaxonomyFacetCounts.cs b/src/Lucene.Net.Facet/Taxonomy/FastTaxonomyFacetCounts.cs
index 3511af8..6cb5043 100644
--- a/src/Lucene.Net.Facet/Taxonomy/FastTaxonomyFacetCounts.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/FastTaxonomyFacetCounts.cs
@@ -26,15 +26,15 @@ namespace Lucene.Net.Facet.Taxonomy
 
     /// <summary>
     /// Computes facets counts, assuming the default encoding
-    ///  into DocValues was used.
+    /// into DocValues was used.
     /// 
     /// @lucene.experimental 
     /// </summary>
     public class FastTaxonomyFacetCounts : IntTaxonomyFacets
     {
         /// <summary>
-        /// Create {@code FastTaxonomyFacetCounts}, which also
-        ///  counts all facet labels. 
+        /// Create <see cref="FastTaxonomyFacetCounts"/>, which also
+        /// counts all facet labels. 
         /// </summary>
         public FastTaxonomyFacetCounts(TaxonomyReader taxoReader, FacetsConfig config, FacetsCollector fc)
             : this(FacetsConfig.DEFAULT_INDEX_FIELD_NAME, taxoReader, config, fc)
@@ -42,11 +42,11 @@ namespace Lucene.Net.Facet.Taxonomy
         }
 
         /// <summary>
-        /// Create {@code FastTaxonomyFacetCounts}, using the
-        ///  specified {@code indexFieldName} for ordinals.  Use
-        ///  this if you had set {@link
-        ///  FacetsConfig#setIndexFieldName} to change the index
-        ///  field name for certain dimensions. 
+        /// Create <see cref="FastTaxonomyFacetCounts"/>, using the
+        /// specified <paramref name="indexFieldName"/> for ordinals.  Use
+        /// this if you had set <see cref="FacetsConfig.SetIndexFieldName"/>
+        /// to change the index
+        /// field name for certain dimensions. 
         /// </summary>
         public FastTaxonomyFacetCounts(string indexFieldName, TaxonomyReader taxoReader, FacetsConfig config, FacetsCollector fc)
             : base(indexFieldName, taxoReader, config)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/44958102/src/Lucene.Net.Facet/Taxonomy/FloatAssociationFacetField.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/FloatAssociationFacetField.cs b/src/Lucene.Net.Facet/Taxonomy/FloatAssociationFacetField.cs
index 1b070a6..dbcf615 100644
--- a/src/Lucene.Net.Facet/Taxonomy/FloatAssociationFacetField.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/FloatAssociationFacetField.cs
@@ -24,18 +24,17 @@ namespace Lucene.Net.Facet.Taxonomy
     using Document = Lucene.Net.Documents.Document;
 
     /// <summary>
-    /// Add an instance of this to your <seealso cref="Document"/> to add
-    ///  a facet label associated with a float.  Use {@link
-    ///  TaxonomyFacetSumFloatAssociations} to aggregate float values
-    ///  per facet label at search time.
+    /// Add an instance of this to your <see cref="Document"/> to add
+    /// a facet label associated with a float.  Use <see cref="TaxonomyFacetSumFloatAssociations"/>
+    /// to aggregate float values per facet label at search time.
     /// 
     ///  @lucene.experimental 
     /// </summary>
     public class FloatAssociationFacetField : AssociationFacetField
     {
         /// <summary>
-        /// Creates this from {@code dim} and {@code path} and a
-        ///  float association 
+        /// Creates this from <paramref name="dim"/> and <paramref name="path"/> and a
+        /// float association 
         /// </summary>
         public FloatAssociationFacetField(float assoc, string dim, params string[] path) 
             : base(FloatToBytesRef(assoc), dim, path)
@@ -43,14 +42,16 @@ namespace Lucene.Net.Facet.Taxonomy
         }
 
         /// <summary>
-        /// Encodes a {@code float} as a 4-byte <seealso cref="BytesRef"/>. </summary>
+        /// Encodes a <see cref="float"/> as a 4-byte <see cref="BytesRef"/>.
+        /// </summary>
         public static BytesRef FloatToBytesRef(float v)
         {
             return IntAssociationFacetField.IntToBytesRef(Number.FloatToIntBits(v));
         }
 
         /// <summary>
-        /// Decodes a previously encoded {@code float}. </summary>
+        /// Decodes a previously encoded <see cref="float"/>.
+        /// </summary>
         public static float BytesRefToFloat(BytesRef b)
         {
             return Number.IntBitsToFloat(IntAssociationFacetField.BytesRefToInt(b));

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/44958102/src/Lucene.Net.Facet/Taxonomy/FloatTaxonomyFacets.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/FloatTaxonomyFacets.cs b/src/Lucene.Net.Facet/Taxonomy/FloatTaxonomyFacets.cs
index 27c8eff..70dffb4 100644
--- a/src/Lucene.Net.Facet/Taxonomy/FloatTaxonomyFacets.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/FloatTaxonomyFacets.cs
@@ -25,7 +25,7 @@ namespace Lucene.Net.Facet.Taxonomy
 
     /// <summary>
     /// Base class for all taxonomy-based facets that aggregate
-    ///  to a per-ords float[]. 
+    /// to a per-ords <see cref="float[]"/>. 
     /// </summary>
     public abstract class FloatTaxonomyFacets : TaxonomyFacets
     {
@@ -34,7 +34,8 @@ namespace Lucene.Net.Facet.Taxonomy
         protected readonly float[] values;
 
         /// <summary>
-        /// Sole constructor. </summary>
+        /// Sole constructor.
+        /// </summary>
         protected internal FloatTaxonomyFacets(string indexFieldName, TaxonomyReader taxoReader, FacetsConfig config)
             : base(indexFieldName, taxoReader, config)
         {
@@ -42,7 +43,8 @@ namespace Lucene.Net.Facet.Taxonomy
         }
 
         /// <summary>
-        /// Rolls up any single-valued hierarchical dimensions. </summary>
+        /// Rolls up any single-valued hierarchical dimensions.
+        /// </summary>
         protected virtual void Rollup()
         {
             // Rollup any necessary dims:

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/44958102/src/Lucene.Net.Facet/Taxonomy/IntAssociationFacetField.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/IntAssociationFacetField.cs b/src/Lucene.Net.Facet/Taxonomy/IntAssociationFacetField.cs
index e7b56d7..3cdf528 100644
--- a/src/Lucene.Net.Facet/Taxonomy/IntAssociationFacetField.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/IntAssociationFacetField.cs
@@ -23,18 +23,17 @@ namespace Lucene.Net.Facet.Taxonomy
     using Document = Lucene.Net.Documents.Document;
 
     /// <summary>
-    /// Add an instance of this to your <seealso cref="Document"/> to add
-    ///  a facet label associated with an int.  Use {@link
-    ///  TaxonomyFacetSumIntAssociations} to aggregate int values
-    ///  per facet label at search time.
+    /// Add an instance of this to your <see cref="Document"/> to add
+    /// a facet label associated with an int.  Use <see cref="TaxonomyFacetSumIntAssociations"/>
+    /// to aggregate int values per facet label at search time.
     /// 
     ///  @lucene.experimental 
     /// </summary>
     public class IntAssociationFacetField : AssociationFacetField
     {
         /// <summary>
-        /// Creates this from {@code dim} and {@code path} and an
-        ///  int association 
+        /// Creates this from <paramref name="dim"/> and <paramref name="path"/> and an
+        /// int association 
         /// </summary>
         public IntAssociationFacetField(int assoc, string dim, params string[] path)
             : base(IntToBytesRef(assoc), dim, path)
@@ -42,8 +41,8 @@ namespace Lucene.Net.Facet.Taxonomy
         }
 
         /// <summary>
-        /// Encodes an {@code int} as a 4-byte <seealso cref="BytesRef"/>,
-        ///  big-endian. 
+        /// Encodes an <see cref="int"/> as a 4-byte <see cref="BytesRef"/>,
+        /// big-endian. 
         /// </summary>
         public static BytesRef IntToBytesRef(int v)
         {
@@ -58,7 +57,8 @@ namespace Lucene.Net.Facet.Taxonomy
         }
 
         /// <summary>
-        /// Decodes a previously encoded {@code int}. </summary>
+        /// Decodes a previously encoded <see cref="int"/>.
+        /// </summary>
         public static int BytesRefToInt(BytesRef b)
         {
             return ((b.Bytes[b.Offset] & 0xFF) << 24) | ((b.Bytes[b.Offset + 1] & 0xFF) << 16) | 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/44958102/src/Lucene.Net.Facet/Taxonomy/IntTaxonomyFacets.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/IntTaxonomyFacets.cs b/src/Lucene.Net.Facet/Taxonomy/IntTaxonomyFacets.cs
index 2c4e533..60b59c9 100644
--- a/src/Lucene.Net.Facet/Taxonomy/IntTaxonomyFacets.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/IntTaxonomyFacets.cs
@@ -24,7 +24,7 @@ namespace Lucene.Net.Facet.Taxonomy
 
     /// <summary>
     /// Base class for all taxonomy-based facets that aggregate
-    ///  to a per-ords int[]. 
+    /// to a per-ords <see cref="int[]"/>. 
     /// </summary>
 
     public abstract class IntTaxonomyFacets : TaxonomyFacets
@@ -34,7 +34,8 @@ namespace Lucene.Net.Facet.Taxonomy
         protected internal readonly int[] values;
 
         /// <summary>
-        /// Sole constructor. </summary>
+        /// Sole constructor.
+        /// </summary>
         protected internal IntTaxonomyFacets(string indexFieldName, TaxonomyReader taxoReader, FacetsConfig config)
             : base(indexFieldName, taxoReader, config)
         {
@@ -42,7 +43,8 @@ namespace Lucene.Net.Facet.Taxonomy
         }
 
         /// <summary>
-        /// Rolls up any single-valued hierarchical dimensions. </summary>
+        /// Rolls up any single-valued hierarchical dimensions.
+        /// </summary>
         protected virtual void Rollup()
         {
             // Rollup any necessary dims:

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/44958102/src/Lucene.Net.Facet/Taxonomy/LRUHashMap.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/LRUHashMap.cs b/src/Lucene.Net.Facet/Taxonomy/LRUHashMap.cs
index 059f834..5ff77fb 100644
--- a/src/Lucene.Net.Facet/Taxonomy/LRUHashMap.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/LRUHashMap.cs
@@ -22,33 +22,17 @@ namespace Lucene.Net.Facet.Taxonomy
      */
 
     /// <summary>
-    /// LRUHashMap is an extension of Java's HashMap, which has a bounded size();
-    /// When it reaches that size, each time a new element is added, the least
+    /// <see cref="LRUHashMap{TKey, TValue}"/> is similar to of Java's HashMap, which has a bounded <see cref="Capacity"/>;
+    /// When it reaches that <see cref="Capacity"/>, each time a new element is added, the least
     /// recently used (LRU) entry is removed.
     /// <para>
-    /// Java makes it very easy to implement LRUHashMap - all its functionality is
-    /// already available from <seealso cref="java.util.LinkedHashMap"/>, and we just need to
-    /// configure that properly.
+    /// Unlike the Java Lucene implementation, this one is thread safe. Do note
+    /// that every time an element is read from <see cref="LRUHashMap{TKey, TValue}"/>,
+    /// a write operation also takes place to update the element's last access time.
+    /// This is because the LRU order needs to be remembered to determine which element
+    /// to evict when the <see cref="Capacity"/> is exceeded. 
     /// </para>
     /// <para>
-    /// Note that like HashMap, LRUHashMap is unsynchronized, and the user MUST
-    /// synchronize the access to it if used from several threads. Moreover, while
-    /// with HashMap this is only a concern if one of the threads is modifies the
-    /// map, with LURHashMap every read is a modification (because the LRU order
-    /// needs to be remembered) so proper synchronization is always necessary.
-    /// </para>
-    /// <para>
-    /// With the usual synchronization mechanisms available to the user, this
-    /// unfortunately means that LRUHashMap will probably perform sub-optimally under
-    /// heavy contention: while one thread uses the hash table (reads or writes), any
-    /// other thread will be blocked from using it - or even just starting to use it
-    /// (e.g., calculating the hash function). A more efficient approach would be not
-    /// to use LinkedHashMap at all, but rather to use a non-locking (as much as
-    /// possible) thread-safe solution, something along the lines of
-    /// java.util.concurrent.ConcurrentHashMap (though that particular class does not
-    /// support the additional LRU semantics, which will need to be added separately
-    /// using a concurrent linked list or additional storage of timestamps (in an
-    /// array or inside the entry objects), or whatever).
     /// 
     /// @lucene.experimental
     /// </para>
@@ -74,6 +58,17 @@ namespace Lucene.Net.Facet.Taxonomy
             this.cache = new Dictionary<TKey, CacheDataObject>(capacity);
         }
 
+        /// <summary>
+        /// allows changing the map's maximal number of elements
+        /// which was defined at construction time.
+        /// <para>
+        /// Note that if the map is already larger than maxSize, the current 
+        /// implementation does not shrink it (by removing the oldest elements);
+        /// Rather, the map remains in its current size as new elements are
+        /// added, and will only start shrinking (until settling again on the
+        /// given <see cref="Capacity"/>) if existing elements are explicitly deleted.
+        /// </para>
+        /// </summary>
         public virtual int Capacity
         {
             get { return capacity; }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/44958102/src/Lucene.Net.Facet/Taxonomy/OrdinalsReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/OrdinalsReader.cs b/src/Lucene.Net.Facet/Taxonomy/OrdinalsReader.cs
index c8e4843..3223b6a 100644
--- a/src/Lucene.Net.Facet/Taxonomy/OrdinalsReader.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/OrdinalsReader.cs
@@ -23,16 +23,16 @@
     /// <summary>
     /// Provides per-document ordinals. 
     /// </summary>
-
     public abstract class OrdinalsReader
     {
         /// <summary>
-        /// Returns ordinals for documents in one segment. </summary>
+        /// Returns ordinals for documents in one segment.
+        /// </summary>
         public abstract class OrdinalsSegmentReader
         {
             /// <summary>
-            /// Get the ordinals for this document.  ordinals.offset
-            ///  must always be 0! 
+            /// Get the ordinals for this document. The <paramref name="ordinals"/>.<see cref="IntsRef.Offset"/>
+            /// must always be 0! 
             /// </summary>
             public abstract void Get(int doc, IntsRef ordinals);
 
@@ -57,8 +57,8 @@
         public abstract OrdinalsSegmentReader GetReader(AtomicReaderContext context);
 
         /// <summary>
-        /// Returns the indexed field name this {@code
-        ///  OrdinalsReader} is reading from. 
+        /// Returns the indexed field name this <see cref="OrdinalsReader"/>
+        /// is reading from. 
         /// </summary>
         public abstract string IndexFieldName { get; }
     }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/44958102/src/Lucene.Net.Facet/Taxonomy/ParallelTaxonomyArrays.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/ParallelTaxonomyArrays.cs b/src/Lucene.Net.Facet/Taxonomy/ParallelTaxonomyArrays.cs
index 1b51568..297c5c2 100644
--- a/src/Lucene.Net.Facet/Taxonomy/ParallelTaxonomyArrays.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/ParallelTaxonomyArrays.cs
@@ -19,18 +19,18 @@
 
     /// <summary>
     /// Returns 3 arrays for traversing the taxonomy:
-    /// <ul>
-    /// <li>{@code parents}: {@code parents[i]} denotes the parent of category
-    /// ordinal {@code i}.</li>
-    /// <li>{@code children}: {@code children[i]} denotes a child of category ordinal
-    /// {@code i}.</li>
-    /// <li>{@code siblings}: {@code siblings[i]} denotes the sibling of category
-    /// ordinal {@code i}.</li>
-    /// </ul>
+    /// <list type="bullet">
+    /// <item> <see cref="Parents"/>: <c>Parents[i]</c> denotes the parent of category
+    /// ordinal <c>i</c>.</item>
+    /// <item> <see cref="Children"/>: <c>Children[i]</c> denotes a child of category ordinal
+    /// <c>i</c>.</item>
+    /// <item> <see cref="Siblings"/>: <c>Siblings[i]</c> denotes the sibling of category
+    /// ordinal <c>i</c>.</item>
+    /// </list>
     /// 
-    /// To traverse the taxonomy tree, you typically start with {@code children[0]}
+    /// To traverse the taxonomy tree, you typically start with <c>Children[0]</c>
     /// (ordinal 0 is reserved for ROOT), and then depends if you want to do DFS or
-    /// BFS, you call {@code children[children[0]]} or {@code siblings[children[0]]}
+    /// BFS, you call <c>Children[Children[0]]</c> or <c>Siblings[Children[0]]</c>
     /// and so forth, respectively.
     /// 
     /// <para>
@@ -43,26 +43,27 @@
     public abstract class ParallelTaxonomyArrays
     {
         /// <summary>
-        /// Sole constructor. </summary>
+        /// Sole constructor.
+        /// </summary>
         public ParallelTaxonomyArrays()
         {
         }
 
         /// <summary>
-        /// Returns the parents array, where {@code parents[i]} denotes the parent of
-        /// category ordinal {@code i}.
+        /// Returns the parents array, where <c>Parents[i]</c> denotes the parent of
+        /// category ordinal <c>i</c>.
         /// </summary>
         public abstract int[] Parents { get; }
 
         /// <summary>
-        /// Returns the children array, where {@code children[i]} denotes a child of
-        /// category ordinal {@code i}.
+        /// Returns the children array, where <c>Children[i]</c> denotes a child of
+        /// category ordinal <c>i</c>.
         /// </summary>
         public abstract int[] Children { get; }
 
         /// <summary>
-        /// Returns the siblings array, where {@code siblings[i]} denotes the sibling
-        /// of category ordinal {@code i}.
+        /// Returns the siblings array, where <c>Siblings[i]</c> denotes the sibling
+        /// of category ordinal <c>i</c>.
         /// </summary>
         public abstract int[] Siblings { get; }
     }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/44958102/src/Lucene.Net.Facet/Taxonomy/PrintTaxonomyStats.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/PrintTaxonomyStats.cs b/src/Lucene.Net.Facet/Taxonomy/PrintTaxonomyStats.cs
index 601e5d8..1f94b86 100644
--- a/src/Lucene.Net.Facet/Taxonomy/PrintTaxonomyStats.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/PrintTaxonomyStats.cs
@@ -26,19 +26,22 @@ namespace Lucene.Net.Facet.Taxonomy
     using FSDirectory = Lucene.Net.Store.FSDirectory;
 
     /// <summary>
-    /// Prints how many ords are under each dimension. </summary>
+    /// Prints how many ords are under each dimension. 
+    /// </summary>
 
     // java -cp ../build/core/classes/java:../build/facet/classes/java org.apache.lucene.facet.util.PrintTaxonomyStats -printTree /s2/scratch/indices/wikibig.trunk.noparents.facets.Lucene41.nd1M/facets
     public class PrintTaxonomyStats
     {
         /// <summary>
-        /// Sole constructor. </summary>
+        /// Sole constructor.
+        /// </summary>
         public PrintTaxonomyStats()
         {
         }
 
         /// <summary>
-        /// Command-line tool. </summary>
+        /// Command-line tool.
+        /// </summary>
         public static void Main(string[] args)
         {
             bool printTree = false;
@@ -56,6 +59,7 @@ namespace Lucene.Net.Facet.Taxonomy
             }
             if (args.Length != (printTree ? 2 : 1))
             {
+                // LUCENENET TODO: Usage depends on wrapping this into a console application assembly.
                 Console.WriteLine("\nUsage: java -classpath ... org.apache.lucene.facet.util.PrintTaxonomyStats [-printTree] /path/to/taxononmy/index\n");
                 Environment.Exit(1);
             }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/44958102/src/Lucene.Net.Facet/Taxonomy/SearcherTaxonomyManager.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/SearcherTaxonomyManager.cs b/src/Lucene.Net.Facet/Taxonomy/SearcherTaxonomyManager.cs
index 2c8edb3..8fb75d8 100644
--- a/src/Lucene.Net.Facet/Taxonomy/SearcherTaxonomyManager.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/SearcherTaxonomyManager.cs
@@ -32,32 +32,35 @@ namespace Lucene.Net.Facet.Taxonomy
     using SearcherManager = Lucene.Net.Search.SearcherManager;
 
     /// <summary>
-    /// Manages near-real-time reopen of both an IndexSearcher
-    /// and a TaxonomyReader.
+    /// Manages near-real-time reopen of both an <see cref="IndexSearcher"/>
+    /// and a <see cref="TaxonomyReader"/>.
     /// 
-    /// <para><b>NOTE</b>: If you call {@link
-    /// DirectoryTaxonomyWriter#replaceTaxonomy} then you must
-    /// open a new {@code SearcherTaxonomyManager} afterwards.
+    /// <para>
+    /// <b>NOTE</b>: If you call <see cref="DirectoryTaxonomyWriter.ReplaceTaxonomy"/>
+    /// then you must open a new <see cref="SearcherTaxonomyManager"/> afterwards.
     /// </para>
     /// </summary>
     public class SearcherTaxonomyManager : ReferenceManager<SearcherTaxonomyManager.SearcherAndTaxonomy>
     {
         /// <summary>
-        /// Holds a matched pair of <seealso cref="IndexSearcher"/> and
-        ///  <seealso cref="Taxonomy.TaxonomyReader"/> 
+        /// Holds a matched pair of <see cref="IndexSearcher"/> and
+        /// <see cref="Taxonomy.TaxonomyReader"/> 
         /// </summary>
         public class SearcherAndTaxonomy
         {
             /// <summary>
-            /// Point-in-time <seealso cref="IndexSearcher"/>. </summary>
+            /// Point-in-time <see cref="IndexSearcher"/>.
+            /// </summary>
             public IndexSearcher Searcher { get; private set; }
 
             /// <summary>
-            /// Matching point-in-time <seealso cref="DirectoryTaxonomyReader"/>. </summary>
+            /// Matching point-in-time <see cref="DirectoryTaxonomyReader"/>.
+            /// </summary>
             public DirectoryTaxonomyReader TaxonomyReader { get; private set; }
 
             /// <summary>
-            /// Create a SearcherAndTaxonomy </summary>
+            /// Create a <see cref="SearcherAndTaxonomy"/>
+            /// </summary>
             public SearcherAndTaxonomy(IndexSearcher searcher, DirectoryTaxonomyReader taxonomyReader)
             {
                 this.Searcher = searcher;
@@ -71,7 +74,7 @@ namespace Lucene.Net.Facet.Taxonomy
 
         /// <summary>
         /// Creates near-real-time searcher and taxonomy reader
-        ///  from the corresponding writers. 
+        /// from the corresponding writers. 
         /// </summary>
         public SearcherTaxonomyManager(IndexWriter writer, bool applyAllDeletes, 
             SearcherFactory searcherFactory, DirectoryTaxonomyWriter taxoWriter)
@@ -93,8 +96,8 @@ namespace Lucene.Net.Facet.Taxonomy
         /// 
         /// <para>
         /// <b>NOTE:</b> you should only use this constructor if you commit and call
-        /// <seealso cref="#maybeRefresh()"/> in the same thread. Otherwise it could lead to an
-        /// unsync'd <seealso cref="IndexSearcher"/> and <seealso cref="TaxonomyReader"/> pair.
+        /// <see cref="Index.ReaderManager.MaybeRefresh()"/> in the same thread. Otherwise it could lead to an
+        /// unsync'd <see cref="IndexSearcher"/> and <see cref="TaxonomyReader"/> pair.
         /// </para>
         /// </summary>
         public SearcherTaxonomyManager(Store.Directory indexDir, Store.Directory taxoDir, SearcherFactory searcherFactory)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/44958102/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetCounts.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetCounts.cs b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetCounts.cs
index b86c924..aa4acac 100644
--- a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetCounts.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetCounts.cs
@@ -25,9 +25,8 @@ namespace Lucene.Net.Facet.Taxonomy
     using MatchingDocs = FacetsCollector.MatchingDocs;
 
     /// <summary>
-    /// Reads from any <seealso cref="OrdinalsReader"/>; use {@link
-    ///  FastTaxonomyFacetCounts} if you are using the
-    ///  default encoding from <seealso cref="BinaryDocValues"/>.
+    /// Reads from any <see cref="OrdinalsReader"/>; use <see cref="FastTaxonomyFacetCounts"/>
+    /// if you are using the default encoding from <see cref="BinaryDocValues"/>.
     /// 
     /// @lucene.experimental 
     /// </summary>
@@ -36,10 +35,9 @@ namespace Lucene.Net.Facet.Taxonomy
         private readonly OrdinalsReader ordinalsReader;
 
         /// <summary>
-        /// Create {@code TaxonomyFacetCounts}, which also
-        ///  counts all facet labels.  Use this for a non-default
-        ///  <seealso cref="OrdinalsReader"/>; otherwise use {@link
-        ///  FastTaxonomyFacetCounts}. 
+        /// Create <see cref="TaxonomyFacetCounts"/>, which also
+        /// counts all facet labels.  Use this for a non-default
+        /// <see cref="OrdinalsReader"/>; otherwise use <see cref="FastTaxonomyFacetCounts"/>. 
         /// </summary>
         public TaxonomyFacetCounts(OrdinalsReader ordinalsReader, TaxonomyReader taxoReader, FacetsConfig config, FacetsCollector fc)
             : base(ordinalsReader.IndexFieldName, taxoReader, config)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/44958102/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumFloatAssociations.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumFloatAssociations.cs b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumFloatAssociations.cs
index e30cef2..242466d 100644
--- a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumFloatAssociations.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumFloatAssociations.cs
@@ -27,16 +27,16 @@ namespace Lucene.Net.Facet.Taxonomy
 
     /// <summary>
     /// Aggregates sum of int values previously indexed with
-    ///  <seealso cref="FloatAssociationFacetField"/>, assuming the default
-    ///  encoding.
+    /// <see cref="FloatAssociationFacetField"/>, assuming the default
+    /// encoding.
     /// 
     ///  @lucene.experimental 
     /// </summary>
     public class TaxonomyFacetSumFloatAssociations : FloatTaxonomyFacets
     {
         /// <summary>
-        /// Create {@code TaxonomyFacetSumFloatAssociations} against
-        ///  the default index field. 
+        /// Create <see cref="TaxonomyFacetSumFloatAssociations"/> against
+        /// the default index field. 
         /// </summary>
         public TaxonomyFacetSumFloatAssociations(TaxonomyReader taxoReader, FacetsConfig config, FacetsCollector fc)
             : this(FacetsConfig.DEFAULT_INDEX_FIELD_NAME, taxoReader, config, fc)
@@ -44,8 +44,8 @@ namespace Lucene.Net.Facet.Taxonomy
         }
 
         /// <summary>
-        /// Create {@code TaxonomyFacetSumFloatAssociations} against
-        ///  the specified index field. 
+        /// Create <see cref="TaxonomyFacetSumFloatAssociations"/> against
+        /// the specified index field. 
         /// </summary>
         public TaxonomyFacetSumFloatAssociations(string indexFieldName, TaxonomyReader taxoReader, FacetsConfig config, FacetsCollector fc)
             : base(indexFieldName, taxoReader, config)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/44958102/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumIntAssociations.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumIntAssociations.cs b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumIntAssociations.cs
index 7ff982b..07b78b0 100644
--- a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumIntAssociations.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumIntAssociations.cs
@@ -26,16 +26,16 @@ namespace Lucene.Net.Facet.Taxonomy
 
     /// <summary>
     /// Aggregates sum of int values previously indexed with
-    ///  <seealso cref="IntAssociationFacetField"/>, assuming the default
-    ///  encoding.
+    /// <see cref="IntAssociationFacetField"/>, assuming the default
+    /// encoding.
     /// 
     ///  @lucene.experimental 
     /// </summary>
     public class TaxonomyFacetSumIntAssociations : IntTaxonomyFacets
     {
         /// <summary>
-        /// Create {@code TaxonomyFacetSumIntAssociations} against
-        ///  the default index field. 
+        /// Create <see cref="TaxonomyFacetSumIntAssociations"/> against
+        /// the default index field. 
         /// </summary>
         public TaxonomyFacetSumIntAssociations(TaxonomyReader taxoReader, FacetsConfig config, FacetsCollector fc)
             : this(FacetsConfig.DEFAULT_INDEX_FIELD_NAME, taxoReader, config, fc)
@@ -43,8 +43,8 @@ namespace Lucene.Net.Facet.Taxonomy
         }
 
         /// <summary>
-        /// Create {@code TaxonomyFacetSumIntAssociations} against
-        ///  the specified index field. 
+        /// Create <see cref="TaxonomyFacetSumIntAssociations"/> against
+        /// the specified index field. 
         /// </summary>
         public TaxonomyFacetSumIntAssociations(string indexFieldName, TaxonomyReader taxoReader, FacetsConfig config, FacetsCollector fc)
             : base(indexFieldName, taxoReader, config)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/44958102/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumValueSource.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumValueSource.cs b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumValueSource.cs
index ffe18df..c5e5cb5 100644
--- a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumValueSource.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumValueSource.cs
@@ -33,8 +33,8 @@ namespace Lucene.Net.Facet.Taxonomy
     using Weight = Lucene.Net.Search.Weight;
 
     /// <summary>
-    /// Aggregates sum of values from {@link
-    ///  FunctionValues#doubleVal}, for each facet label.
+    /// Aggregates sum of values from <see cref="FunctionValues.DoubleVal"/>, 
+    /// for each facet label.
     /// 
     ///  @lucene.experimental 
     /// </summary>
@@ -44,10 +44,9 @@ namespace Lucene.Net.Facet.Taxonomy
 
         /// <summary>
         /// Aggreggates float facet values from the provided
-        ///  <seealso cref="ValueSource"/>, pulling ordinals using {@link
-        ///  DocValuesOrdinalsReader} against the default indexed
-        ///  facet field {@link
-        ///  FacetsConfig#DEFAULT_INDEX_FIELD_NAME}. 
+        /// <see cref="ValueSource"/>, pulling ordinals using <see cref="DocValuesOrdinalsReader"/>
+        /// against the default indexed
+        /// facet field <see cref="FacetsConfig.DEFAULT_INDEX_FIELD_NAME"/>. 
         /// </summary>
         public TaxonomyFacetSumValueSource(TaxonomyReader taxoReader, FacetsConfig config,
             FacetsCollector fc, ValueSource valueSource)
@@ -58,8 +57,8 @@ namespace Lucene.Net.Facet.Taxonomy
 
         /// <summary>
         /// Aggreggates float facet values from the provided
-        ///  <seealso cref="ValueSource"/>, and pulls ordinals from the
-        ///  provided <seealso cref="OrdinalsReader"/>. 
+        /// <see cref="ValueSource"/>, and pulls ordinals from the
+        /// provided <see cref="OrdinalsReader"/>. 
         /// </summary>
         public TaxonomyFacetSumValueSource(OrdinalsReader ordinalsReader, TaxonomyReader taxoReader,
             FacetsConfig config, FacetsCollector fc, ValueSource valueSource)
@@ -158,15 +157,16 @@ namespace Lucene.Net.Facet.Taxonomy
         }
 
         /// <summary>
-        /// <seealso cref="ValueSource"/> that returns the score for each
-        ///  hit; use this to aggregate the sum of all hit scores
-        ///  for each facet label.  
+        /// <see cref="ValueSource"/> that returns the score for each
+        /// hit; use this to aggregate the sum of all hit scores
+        /// for each facet label.  
         /// </summary>
         public class ScoreValueSource : ValueSource
         {
 
             /// <summary>
-            /// Sole constructor. </summary>
+            /// Sole constructor.
+            /// </summary>
             public ScoreValueSource()
             {
             }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/44958102/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacets.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacets.cs b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacets.cs
index ef8dede..87ea6d0 100644
--- a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacets.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacets.cs
@@ -25,7 +25,8 @@ namespace Lucene.Net.Facet.Taxonomy
     using DimConfig = Lucene.Net.Facet.FacetsConfig.DimConfig; // javadocs
 
     /// <summary>
-    /// Base class for all taxonomy-based facets impls. </summary>
+    /// Base class for all taxonomy-based facets impls.
+    /// </summary>
     public abstract class TaxonomyFacets : Facets
     {
         private static readonly IComparer<FacetResult> BY_VALUE_THEN_DIM = new ComparatorAnonymousInnerClassHelper();
@@ -54,26 +55,29 @@ namespace Lucene.Net.Facet.Taxonomy
         }
 
         /// <summary>
-        /// Index field name provided to the constructor. </summary>
+        /// Index field name provided to the constructor.
+        /// </summary>
         protected readonly string indexFieldName;
 
         /// <summary>
-        /// {@code TaxonomyReader} provided to the constructor. </summary>
+        /// <see cref="TaxonomyReader"/> provided to the constructor.
+        /// </summary>
         protected readonly TaxonomyReader taxoReader;
 
         /// <summary>
-        /// {@code FacetsConfig} provided to the constructor. </summary>
+        /// <see cref="FacetsConfig"/> provided to the constructor.
+        /// </summary>
         protected readonly FacetsConfig config;
 
         /// <summary>
         /// Maps parent ordinal to its child, or -1 if the parent
-        ///  is childless. 
+        /// is childless. 
         /// </summary>
         protected readonly int[] children;
 
         /// <summary>
         /// Maps an ordinal to its sibling, or -1 if there is no
-        ///  sibling. 
+        /// sibling. 
         /// </summary>
         protected readonly int[] siblings;
 
@@ -91,9 +95,9 @@ namespace Lucene.Net.Facet.Taxonomy
         }
 
         /// <summary>
-        /// Throws {@code IllegalArgumentException} if the
-        ///  dimension is not recognized.  Otherwise, returns the
-        ///  <seealso cref="DimConfig"/> for this dimension. 
+        /// Throws <see cref="ArgumentException"/> if the
+        /// dimension is not recognized.  Otherwise, returns the
+        /// <see cref="DimConfig"/> for this dimension. 
         /// </summary>
         protected internal virtual DimConfig VerifyDim(string dim)
         {


[45/46] lucenenet git commit: Added CSharpTest.Net.Collections.LurchTable to our Support namespace and modified the DirectoryTaxonomyReader and NameIntCacheLRU to utilize it.

Posted by sy...@apache.org.
Added CSharpTest.Net.Collections.LurchTable to our Support namespace and modified the DirectoryTaxonomyReader and NameIntCacheLRU to utilize it.


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/36cde063
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/36cde063
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/36cde063

Branch: refs/heads/master
Commit: 36cde0630890082c3dd23fd54820de9a87be1bc3
Parents: d866888
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Mon Sep 26 01:36:34 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Tue Oct 4 01:50:24 2016 +0700

----------------------------------------------------------------------
 src/Lucene.Net.Core/Lucene.Net.csproj           |    1 +
 src/Lucene.Net.Core/Support/LurchTable.cs       | 1670 ++++++++++++++++++
 .../Directory/DirectoryTaxonomyReader.cs        |   64 +-
 src/Lucene.Net.Facet/Taxonomy/LRUHashMap.cs     |  202 ++-
 .../WriterCache/LruTaxonomyWriterCache.cs       |    8 +-
 .../Taxonomy/WriterCache/NameIntCacheLRU.cs     |   60 +-
 .../Taxonomy/TestLRUHashMap.cs                  |    6 +
 7 files changed, 1859 insertions(+), 152 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/36cde063/src/Lucene.Net.Core/Lucene.Net.csproj
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Lucene.Net.csproj b/src/Lucene.Net.Core/Lucene.Net.csproj
index e06e704..f9a3e28 100644
--- a/src/Lucene.Net.Core/Lucene.Net.csproj
+++ b/src/Lucene.Net.Core/Lucene.Net.csproj
@@ -637,6 +637,7 @@
     <Compile Include="Support\IdentityWeakReference.cs" />
     <Compile Include="Support\IDictionaryExtensions.cs" />
     <Compile Include="Support\ListExtensions.cs" />
+    <Compile Include="Support\LurchTable.cs" />
     <Compile Include="Support\MathExtension.cs" />
     <Compile Include="Support\MemoryMappedFileByteBuffer.cs" />
     <Compile Include="Support\ObjectExtensions.cs" />

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/36cde063/src/Lucene.Net.Core/Support/LurchTable.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Support/LurchTable.cs b/src/Lucene.Net.Core/Support/LurchTable.cs
new file mode 100644
index 0000000..dcb8ca6
--- /dev/null
+++ b/src/Lucene.Net.Core/Support/LurchTable.cs
@@ -0,0 +1,1670 @@
+\ufeff#region Copyright 2012-2014 by Roger Knapp, Licensed under the Apache License, Version 2.0
+/* Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+ // 2016-10-03: Modified from the original version by Shad Storhaug to 
+ //             allow read-write access to the Limit property
+#endregion
+using System;
+using System.Collections;
+using System.Collections.Generic;
+using System.Threading;
+
+namespace Lucene.Net.Support
+{
+    /// <summary>
+    /// Defines if and how items added to a LurchTable are linked together, this defines
+    /// the value returned from Peek/Dequeue as the oldest entry of the specified operation.
+    /// </summary>
+    public enum LurchTableOrder
+    {
+        /// <summary> No linking </summary>
+        None,
+        /// <summary> Linked in insertion order </summary>
+        Insertion,
+        /// <summary> Linked by most recently inserted or updated </summary>
+        Modified,
+        /// <summary> Linked by most recently inserted, updated, or fetched </summary>
+        Access,
+    }
+
+    /// <summary>
+    /// LurchTable stands for "Least Used Recently Concurrent Hash Table" and has definate
+    /// similarities to both the .NET 4 ConcurrentDictionary as well as Java's LinkedHashMap.
+    /// This gives you a thread-safe dictionary/hashtable that stores element ordering by
+    /// insertion, updates, or access.  In addition it can be configured to use a 'hard-limit'
+    /// count of items that will automatically 'pop' the oldest item in the collection.
+    /// </summary>
+    /// <typeparam name="TKey">The type of keys in the dictionary.</typeparam>
+    /// <typeparam name="TValue">The type of values in the dictionary.</typeparam>
+    public class LurchTable<TKey, TValue> : IDictionary<TKey, TValue>
+    {
+        /// <summary> Method signature for the ItemUpdated event </summary>
+        public delegate void ItemUpdatedMethod(KeyValuePair<TKey, TValue> previous, KeyValuePair<TKey, TValue> next);
+
+        /// <summary> Event raised after an item is removed from the collection </summary>
+        public event Action<KeyValuePair<TKey, TValue>> ItemRemoved;
+        /// <summary> Event raised after an item is updated in the collection </summary>
+        public event ItemUpdatedMethod ItemUpdated;
+        /// <summary> Event raised after an item is added to the collection </summary>
+        public event Action<KeyValuePair<TKey, TValue>> ItemAdded;
+
+        private const int OverAlloc = 128;
+        private const int FreeSlots = 32;
+
+        private readonly IEqualityComparer<TKey> _comparer;
+        private readonly int _hsize, _lsize;
+        private int _limit; // LUCENENET: Changed to read-write
+        private readonly int _allocSize, _shift, _shiftMask;
+        private readonly LurchTableOrder _ordering;
+        private readonly object[] _locks;
+        private readonly int[] _buckets;
+        private readonly FreeList[] _free;
+
+        private Entry[][] _entries;
+        private int _used, _count;
+        private int _allocNext, _freeVersion;
+
+        /// <summary>Creates a LurchTable that can store up to (capacity) items efficiently.</summary>
+        public LurchTable(int capacity)
+            : this(LurchTableOrder.None, int.MaxValue, capacity >> 1, capacity >> 4, capacity >> 8, EqualityComparer<TKey>.Default) { }
+
+        /// <summary>Creates a LurchTable that can store up to (capacity) items efficiently.</summary>
+        public LurchTable(int capacity, LurchTableOrder ordering)
+            : this(ordering, int.MaxValue, capacity >> 1, capacity >> 4, capacity >> 8, EqualityComparer<TKey>.Default) { }
+
+        /// <summary>Creates a LurchTable that can store up to (capacity) items efficiently.</summary>
+        public LurchTable(int capacity, LurchTableOrder ordering, IEqualityComparer<TKey> comparer)
+            : this(ordering, int.MaxValue, capacity >> 1, capacity >> 4, capacity >> 8, comparer) { }
+
+        /// <summary>Creates a LurchTable that orders items by (ordering) and removes items once the specified (limit) is reached.</summary>
+        public LurchTable(LurchTableOrder ordering, int limit)
+            : this(ordering, limit, limit >> 1, limit >> 4, limit >> 8, EqualityComparer<TKey>.Default) { }
+
+        /// <summary>Creates a LurchTable that orders items by (ordering) and removes items once the specified (limit) is reached.</summary>
+        public LurchTable(LurchTableOrder ordering, int limit, IEqualityComparer<TKey> comparer)
+            : this(ordering, limit, limit >> 1, limit >> 4, limit >> 8, comparer) { }
+
+        /// <summary>
+        /// Creates a LurchTable that orders items by (ordering) and removes items once the specified (limit) is reached.
+        /// </summary>
+        /// <param name="ordering">The type of linking for the items</param>
+        /// <param name="limit">The maximum allowable number of items, or int.MaxValue for unlimited</param>
+        /// <param name="hashSize">The number of hash buckets to use for the collection, usually 1/2 estimated capacity</param>
+        /// <param name="allocSize">The number of entries to allocate at a time, usually 1/16 estimated capacity</param>
+        /// <param name="lockSize">The number of concurrency locks to preallocate, usually 1/256 estimated capacity</param>
+        /// <param name="comparer">The element hash generator for keys</param>
+        public LurchTable(LurchTableOrder ordering, int limit, int hashSize, int allocSize, int lockSize, IEqualityComparer<TKey> comparer)
+        {
+            if (limit <= 0)
+                throw new ArgumentOutOfRangeException("limit");
+            if (ordering == LurchTableOrder.None && limit < int.MaxValue)
+                throw new ArgumentOutOfRangeException("ordering");
+
+            _limit = limit <= 0 ? int.MaxValue : limit;
+            _comparer = comparer;
+            _ordering = ordering;
+
+            allocSize = (int)Math.Min((long)allocSize + OverAlloc, 0x3fffffff);
+            //last power of 2 that is less than allocSize
+            for (_shift = 7; _shift < 24 && (1 << (_shift + 1)) < allocSize; _shift++) { }
+            _allocSize = 1 << _shift;
+            _shiftMask = _allocSize - 1;
+
+            _hsize = HashUtilities.SelectPrimeNumber(Math.Max(127, hashSize));
+            _buckets = new int[_hsize];
+
+            _lsize = HashUtilities.SelectPrimeNumber(lockSize);
+            _locks = new object[_lsize];
+            for (int i = 0; i < _lsize; i++)
+                _locks[i] = new object();
+
+            _free = new FreeList[FreeSlots];
+            Initialize();
+        }
+
+        #region IDisposable Members
+
+        /// <summary>
+        /// Clears references to all objects and invalidates the collection
+        /// </summary>
+        public void Dispose()
+        {
+            _entries = null;
+            _used = _count = 0;
+        }
+
+        #endregion
+
+        /// <summary>
+        /// Gets the number of elements contained in the <see cref="T:System.Collections.Generic.ICollection`1"/>.
+        /// </summary>
+        public int Count { get { return _count; } }
+        /// <summary>
+        /// Retrieves the LurchTableOrder Ordering enumeration this instance was created with.
+        /// </summary>
+        public LurchTableOrder Ordering { get { return _ordering; } }
+        /// <summary>
+        /// Retrives the key comparer being used by this instance.
+        /// </summary>
+        public IEqualityComparer<TKey> Comparer { get { return _comparer; } }
+        /// <summary>
+        /// Gets or Sets the record limit allowed in this instance.
+        /// </summary>
+        public int Limit
+        {
+            get { return _limit; }
+            set { _limit = value; }
+        }
+
+        /// <summary>
+        /// WARNING: not thread-safe, reinitializes all internal structures.  Use Clear() for a thread-safe
+        /// delete all.  If you have externally provided exclusive access this method may be used to more
+        /// efficiently clear the collection.
+        /// </summary>
+        public void Initialize()
+        {
+            lock (this)
+            {
+                _freeVersion = _allocNext = 0;
+                _count = 0;
+                _used = 1;
+
+                Array.Clear(_buckets, 0, _hsize);
+                _entries = new[] { new Entry[_allocSize] };
+                for (int slot = 0; slot < FreeSlots; slot++)
+                {
+                    var index = Interlocked.CompareExchange(ref _used, _used + 1, _used);
+                    if (index != slot + 1)
+                        throw new LurchTableCorruptionException();
+
+                    _free[slot].Tail = index;
+                    _free[slot].Head = index;
+                }
+
+                if (_count != 0 || _used != FreeSlots + 1)
+                    throw new LurchTableCorruptionException();
+            }
+        }
+
+        #region IDictionary<TKey,TValue> Members
+
+        /// <summary>
+        /// Removes all items from the <see cref="T:System.Collections.Generic.ICollection`1"/>.
+        /// </summary>
+        public void Clear()
+        {
+            if (_entries == null) throw new ObjectDisposedException(GetType().Name);
+            foreach (var item in this)
+                Remove(item.Key);
+        }
+
+        /// <summary>
+        /// Determines whether the <see cref="T:System.Collections.Generic.IDictionary`2"/> contains an element with the specified key.
+        /// </summary>
+        public bool ContainsKey(TKey key)
+        {
+            if (_entries == null) throw new ObjectDisposedException(GetType().Name);
+            TValue value;
+            return TryGetValue(key, out value);
+        }
+
+        /// <summary>
+        /// Gets or sets the element with the specified key.
+        /// </summary>
+        public TValue this[TKey key]
+        {
+            set
+            {
+                var info = new AddInfo { Value = value, CanUpdate = true };
+                Insert(key, ref info);
+            }
+            get
+            {
+                TValue value;
+                if (!TryGetValue(key, out value))
+                    throw new ArgumentOutOfRangeException();
+                return value;
+            }
+        }
+
+        /// <summary>
+        /// Gets the value associated with the specified key.
+        /// </summary>
+        /// <returns>
+        /// true if the object that implements <see cref="T:System.Collections.Generic.IDictionary`2"/> contains an element with the specified key; otherwise, false.
+        /// </returns>
+        public bool TryGetValue(TKey key, out TValue value)
+        {
+            int hash = _comparer.GetHashCode(key) & int.MaxValue;
+            return InternalGetValue(hash, key, out value);
+        }
+
+        /// <summary>
+        /// Adds an element with the provided key and value to the <see cref="T:System.Collections.Generic.IDictionary`2"/>.
+        /// </summary>
+        public void Add(TKey key, TValue value)
+        {
+            var info = new AddInfo { Value = value };
+            if (InsertResult.Inserted != Insert(key, ref info))
+                throw new ArgumentOutOfRangeException();
+        }
+
+        /// <summary>
+        /// Removes the element with the specified key from the <see cref="T:System.Collections.Generic.IDictionary`2"/>.
+        /// </summary>
+        /// <returns>
+        /// true if the element is successfully removed; otherwise, false.  This method also returns false if <paramref name="key"/> was not found in the original <see cref="T:System.Collections.Generic.IDictionary`2"/>.
+        /// </returns>
+        /// <param name="key">The key of the element to remove.</param>
+        public bool Remove(TKey key)
+        {
+            var del = new DelInfo();
+            return Delete(key, ref del);
+        }
+
+        #endregion
+
+        #region IDictionaryEx<TKey,TValue> Members
+
+        /// <summary>
+        /// Adds a key/value pair to the  <see cref="T:System.Collections.Generic.IDictionary`2"/> if the key does not already exist.
+        /// </summary>
+        /// <param name="key">The key of the element to add.</param>
+        /// <param name="value">The value to be added, if the key does not already exist.</param>
+        public TValue GetOrAdd(TKey key, TValue value)
+        {
+            var info = new AddInfo { Value = value, CanUpdate = false };
+            if (InsertResult.Exists == Insert(key, ref info))
+                return info.Value;
+            return value;
+        }
+
+        /// <summary>
+        /// Adds an element with the provided key and value to the <see cref="T:System.Collections.Generic.IDictionary`2"/>.
+        /// </summary>
+        /// <param name="key">The object to use as the key of the element to add.</param>
+        /// <param name="value">The object to use as the value of the element to add.</param>
+        public bool TryAdd(TKey key, TValue value)
+        {
+            var info = new AddInfo { Value = value, CanUpdate = false };
+            return InsertResult.Inserted == Insert(key, ref info);
+        }
+
+        /// <summary>
+        /// Updates an element with the provided key to the value if it exists.
+        /// </summary>
+        /// <returns>Returns true if the key provided was found and updated to the value.</returns>
+        /// <param name="key">The object to use as the key of the element to update.</param>
+        /// <param name="value">The new value for the key if found.</param>
+        public bool TryUpdate(TKey key, TValue value)
+        {
+            var info = new UpdateInfo { Value = value };
+            return InsertResult.Updated == Insert(key, ref info);
+        }
+
+        /// <summary>
+        /// Updates an element with the provided key to the value if it exists.
+        /// </summary>
+        /// <returns>Returns true if the key provided was found and updated to the value.</returns>
+        /// <param name="key">The object to use as the key of the element to update.</param>
+        /// <param name="value">The new value for the key if found.</param>
+        /// <param name="comparisonValue">The value that is compared to the value of the element with key.</param>
+        public bool TryUpdate(TKey key, TValue value, TValue comparisonValue)
+        {
+            var info = new UpdateInfo(comparisonValue) { Value = value };
+            return InsertResult.Updated == Insert(key, ref info);
+        }
+
+        /// <summary>
+        /// Removes the element with the specified key from the <see cref="T:System.Collections.Generic.IDictionary`2"/>.
+        /// </summary>
+        /// <returns>
+        /// true if the element is successfully removed; otherwise, false.  This method also returns false if <paramref name="key"/> was not found in the original <see cref="T:System.Collections.Generic.IDictionary`2"/>.
+        /// </returns>
+        /// <param name="key">The key of the element to remove.</param>
+        /// <param name="value">The value that was removed.</param>
+        public bool TryRemove(TKey key, out TValue value)
+        {
+            var info = new DelInfo();
+            if (Delete(key, ref info))
+            {
+                value = info.Value;
+                return true;
+            }
+            value = default(TValue);
+            return false;
+        }
+
+        #endregion
+
+        #region IConcurrentDictionary<TKey,TValue> Members
+
+        /// <summary>
+        /// Adds a key/value pair to the  <see cref="T:System.Collections.Generic.IDictionary`2"/> if the key does not already exist.
+        /// </summary>
+        /// <param name="key">The key of the element to add.</param>
+        /// <param name="fnCreate">Constructs a new value for the key.</param>
+        public TValue GetOrAdd(TKey key, Converter<TKey, TValue> fnCreate)
+        {
+            var info = new Add2Info { Create = fnCreate };
+            Insert(key, ref info);
+            return info.Value;
+        }
+
+        /// <summary>
+        /// Adds a key/value pair to the <see cref="T:System.Collections.Generic.IDictionary`2"/> if the key does not already exist, 
+        /// or updates a key/value pair if the key already exists.
+        /// </summary>
+        public TValue AddOrUpdate(TKey key, TValue addValue, KeyValueUpdate<TKey, TValue> fnUpdate)
+        {
+            var info = new Add2Info(addValue) { Update = fnUpdate };
+            Insert(key, ref info);
+            return info.Value;
+        }
+
+        /// <summary>
+        /// Adds a key/value pair to the <see cref="T:System.Collections.Generic.IDictionary`2"/> if the key does not already exist, 
+        /// or updates a key/value pair if the key already exists.
+        /// </summary>
+        /// <remarks>
+        /// Adds or modifies an element with the provided key and value.  If the key does not exist in the collection,
+        /// the factory method fnCreate will be called to produce the new value, if the key exists, the converter method
+        /// fnUpdate will be called to create an updated value.
+        /// </remarks>
+        public TValue AddOrUpdate(TKey key, Converter<TKey, TValue> fnCreate, KeyValueUpdate<TKey, TValue> fnUpdate)
+        {
+            var info = new Add2Info { Create = fnCreate, Update = fnUpdate };
+            Insert(key, ref info);
+            return info.Value;
+        }
+
+        /// <summary>
+        /// Add, update, or fetche a key/value pair from the dictionary via an implementation of the
+        /// <see cref="T:CSharpTest.Net.Collections.ICreateOrUpdateValue`2"/> interface.
+        /// </summary>
+        public bool AddOrUpdate<T>(TKey key, ref T createOrUpdateValue) where T : ICreateOrUpdateValue<TKey, TValue>
+        {
+            var result = Insert(key, ref createOrUpdateValue);
+            return result == InsertResult.Inserted || result == InsertResult.Updated;
+        }
+
+        /// <summary>
+        /// Adds an element with the provided key and value to the <see cref="T:System.Collections.Generic.IDictionary`2"/>
+        /// by calling the provided factory method to construct the value if the key is not already present in the collection.
+        /// </summary>
+        public bool TryAdd(TKey key, Converter<TKey, TValue> fnCreate)
+        {
+            var info = new Add2Info { Create = fnCreate };
+            return InsertResult.Inserted == Insert(key, ref info);
+        }
+
+        /// <summary>
+        /// Modify the value associated with the result of the provided update method
+        /// as an atomic operation, Allows for reading/writing a single record within
+        /// the syncronization lock.
+        /// </summary>
+        public bool TryUpdate(TKey key, KeyValueUpdate<TKey, TValue> fnUpdate)
+        {
+            var info = new Add2Info { Update = fnUpdate };
+            return InsertResult.Updated == Insert(key, ref info);
+        }
+
+        /// <summary>
+        /// Removes the element with the specified key from the <see cref="T:System.Collections.Generic.IDictionary`2"/>
+        /// if the fnCondition predicate is null or returns true.
+        /// </summary>
+        public bool TryRemove(TKey key, KeyValuePredicate<TKey, TValue> fnCondition)
+        {
+            var info = new DelInfo { Condition = fnCondition };
+            return Delete(key, ref info);
+        }
+
+        /// <summary>
+        /// Conditionally removes a key/value pair from the dictionary via an implementation of the
+        /// <see cref="T:CSharpTest.Net.Collections.IRemoveValue`2"/> interface.
+        /// </summary>
+        public bool TryRemove<T>(TKey key, ref T removeValue) where T : IRemoveValue<TKey, TValue>
+        {
+            return Delete(key, ref removeValue);
+        }
+
+        #endregion
+
+        #region ICollection<KeyValuePair<TKey,TValue>> Members
+
+        bool ICollection<KeyValuePair<TKey, TValue>>.IsReadOnly
+        {
+            get { return false; }
+        }
+
+        void ICollection<KeyValuePair<TKey, TValue>>.Add(KeyValuePair<TKey, TValue> item)
+        {
+            Add(item.Key, item.Value);
+        }
+
+        bool ICollection<KeyValuePair<TKey, TValue>>.Contains(KeyValuePair<TKey, TValue> item)
+        {
+            TValue test;
+            if (TryGetValue(item.Key, out test))
+                return EqualityComparer<TValue>.Default.Equals(item.Value, test);
+            return false;
+        }
+
+        void ICollection<KeyValuePair<TKey, TValue>>.CopyTo(KeyValuePair<TKey, TValue>[] array, int arrayIndex)
+        {
+            foreach (var item in this)
+                array[arrayIndex++] = item;
+        }
+
+        bool ICollection<KeyValuePair<TKey, TValue>>.Remove(KeyValuePair<TKey, TValue> item)
+        {
+            var del = new DelInfo(item.Value);
+            return Delete(item.Key, ref del);
+        }
+
+        #endregion
+
+        #region IEnumerator<KeyValuePair<TKey, TValue>>
+
+        private bool MoveNext(ref EnumState state)
+        {
+            if (_entries == null) throw new ObjectDisposedException(GetType().Name);
+
+            if (state.Current > 0)
+                state.Current = state.Next;
+
+            if (state.Current > 0)
+            {
+                state.Next = _entries[state.Current >> _shift][state.Current & _shiftMask].Link;
+                return true;
+            }
+
+            state.Unlock();
+            while (++state.Bucket < _hsize)
+            {
+                if (_buckets[state.Bucket] == 0)
+                    continue;
+
+                state.Lock(_locks[state.Bucket % _lsize]);
+
+                state.Current = _buckets[state.Bucket];
+                if (state.Current > 0)
+                {
+                    state.Next = _entries[state.Current >> _shift][state.Current & _shiftMask].Link;
+                    return true;
+                }
+
+                state.Unlock();
+            }
+
+            return false;
+        }
+
+        /// <summary>
+        /// Provides an enumerator that iterates through the collection.
+        /// </summary>
+        public struct Enumerator : IEnumerator<KeyValuePair<TKey, TValue>>
+        {
+            private readonly LurchTable<TKey, TValue> _owner;
+            private EnumState _state;
+
+            internal Enumerator(LurchTable<TKey, TValue> owner)
+            {
+                _owner = owner;
+                _state = new EnumState();
+                _state.Init();
+            }
+
+            /// <summary>
+            /// Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources.
+            /// </summary>
+            public void Dispose()
+            {
+                _state.Unlock();
+            }
+
+            object IEnumerator.Current { get { return Current; } }
+
+            /// <summary>
+            /// Gets the element in the collection at the current position of the enumerator.
+            /// </summary>
+            public KeyValuePair<TKey, TValue> Current
+            {
+                get
+                {
+                    int index = _state.Current;
+                    if (index <= 0)
+                        throw new InvalidOperationException();
+                    if (_owner._entries == null)
+                        throw new ObjectDisposedException(GetType().Name);
+
+                    return new KeyValuePair<TKey, TValue>
+                        (
+                            _owner._entries[index >> _owner._shift][index & _owner._shiftMask].Key,
+                            _owner._entries[index >> _owner._shift][index & _owner._shiftMask].Value
+                        );
+                }
+            }
+
+            /// <summary>
+            /// Advances the enumerator to the next element of the collection.
+            /// </summary>
+            public bool MoveNext()
+            {
+                return _owner.MoveNext(ref _state);
+            }
+
+            /// <summary>
+            /// Sets the enumerator to its initial position, which is before the first element in the collection.
+            /// </summary>
+            public void Reset()
+            {
+                _state.Unlock();
+                _state.Init();
+            }
+        }
+
+        /// <summary>
+        /// Returns an enumerator that iterates through the collection.
+        /// </summary>
+        public Enumerator GetEnumerator() { return new Enumerator(this); }
+        IEnumerator<KeyValuePair<TKey, TValue>> IEnumerable<KeyValuePair<TKey, TValue>>.GetEnumerator()
+        { return GetEnumerator(); }
+        IEnumerator IEnumerable.GetEnumerator()
+        { return GetEnumerator(); }
+        #endregion
+
+        #region KeyCollection
+        /// <summary>
+        /// Provides the collection of Keys for the LurchTable
+        /// </summary>
+        public class KeyCollection : ICollection<TKey>
+        {
+            private readonly LurchTable<TKey, TValue> _owner;
+
+            internal KeyCollection(LurchTable<TKey, TValue> owner)
+            {
+                _owner = owner;
+            }
+
+            #region ICollection<TKey> Members
+
+            /// <summary>
+            /// Determines whether the <see cref="T:System.Collections.Generic.ICollection`1"/> contains a specific value.
+            /// </summary>
+            public bool Contains(TKey item)
+            {
+                return _owner.ContainsKey(item);
+            }
+
+            /// <summary>
+            /// Copies the elements of the <see cref="T:System.Collections.Generic.ICollection`1"/> to an <see cref="T:System.Array"/>, starting at a particular <see cref="T:System.Array"/> index.
+            /// </summary>
+            public void CopyTo(TKey[] array, int arrayIndex)
+            {
+                foreach (var item in _owner)
+                    array[arrayIndex++] = item.Key;
+            }
+
+            /// <summary>
+            /// Gets the number of elements contained in the <see cref="T:System.Collections.Generic.ICollection`1"/>.
+            /// </summary>
+            public int Count
+            {
+                get { return _owner.Count; }
+            }
+
+            /// <summary>
+            /// Returns an enumerator that iterates through the collection.
+            /// </summary>
+            public Enumerator GetEnumerator()
+            {
+                return new Enumerator(_owner);
+            }
+
+            /// <summary>
+            /// Provides an enumerator that iterates through the collection.
+            /// </summary>
+            public struct Enumerator : IEnumerator<TKey>
+            {
+                private readonly LurchTable<TKey, TValue> _owner;
+                private EnumState _state;
+
+                internal Enumerator(LurchTable<TKey, TValue> owner)
+                {
+                    _owner = owner;
+                    _state = new EnumState();
+                    _state.Init();
+                }
+
+                /// <summary>
+                /// Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources.
+                /// </summary>
+                public void Dispose()
+                {
+                    _state.Unlock();
+                }
+
+                object IEnumerator.Current { get { return Current; } }
+
+                /// <summary>
+                /// Gets the element in the collection at the current position of the enumerator.
+                /// </summary>
+                public TKey Current
+                {
+                    get
+                    {
+                        int index = _state.Current;
+                        if (index <= 0)
+                            throw new InvalidOperationException();
+                        if (_owner._entries == null)
+                            throw new ObjectDisposedException(GetType().Name);
+                        return _owner._entries[index >> _owner._shift][index & _owner._shiftMask].Key;
+                    }
+                }
+
+                /// <summary>
+                /// Advances the enumerator to the next element of the collection.
+                /// </summary>
+                public bool MoveNext()
+                {
+                    return _owner.MoveNext(ref _state);
+                }
+
+                /// <summary>
+                /// Sets the enumerator to its initial position, which is before the first element in the collection.
+                /// </summary>
+                public void Reset()
+                {
+                    _state.Unlock();
+                    _state.Init();
+                }
+            }
+            [Obsolete]
+            IEnumerator<TKey> IEnumerable<TKey>.GetEnumerator()
+            {
+                return new Enumerator(_owner);
+            }
+            [Obsolete]
+            IEnumerator IEnumerable.GetEnumerator()
+            {
+                return new Enumerator(_owner);
+            }
+            [Obsolete]
+            bool ICollection<TKey>.IsReadOnly
+            {
+                get { return true; }
+            }
+            [Obsolete]
+            void ICollection<TKey>.Add(TKey item)
+            {
+                throw new NotSupportedException();
+            }
+            [Obsolete]
+            void ICollection<TKey>.Clear()
+            {
+                throw new NotSupportedException();
+            }
+            [Obsolete]
+            bool ICollection<TKey>.Remove(TKey item)
+            {
+                throw new NotSupportedException();
+            }
+
+            #endregion
+        }
+
+        private KeyCollection _keyCollection;
+        /// <summary>
+        /// Gets an <see cref="T:System.Collections.Generic.ICollection`1"/> containing the keys of the <see cref="T:System.Collections.Generic.IDictionary`2"/>.
+        /// </summary>
+        public KeyCollection Keys { get { return _keyCollection ?? (_keyCollection = new KeyCollection(this)); } }
+        [Obsolete]
+        ICollection<TKey> IDictionary<TKey, TValue>.Keys { get { return Keys; } }
+        #endregion
+
+        #region ValueCollection
+        /// <summary>
+        /// Provides the collection of Values for the LurchTable
+        /// </summary>
+        public class ValueCollection : ICollection<TValue>
+        {
+            private readonly LurchTable<TKey, TValue> _owner;
+
+            internal ValueCollection(LurchTable<TKey, TValue> owner)
+            {
+                _owner = owner;
+            }
+
+            #region ICollection<TValue> Members
+
+            /// <summary>
+            /// Determines whether the <see cref="T:System.Collections.Generic.ICollection`1"/> contains a specific value.
+            /// </summary>
+            public bool Contains(TValue value)
+            {
+                var comparer = EqualityComparer<TValue>.Default;
+                foreach (var item in _owner)
+                {
+                    if (comparer.Equals(item.Value, value))
+                        return true;
+                }
+                return false;
+            }
+
+            /// <summary>
+            /// Copies the elements of the <see cref="T:System.Collections.Generic.ICollection`1"/> to an <see cref="T:System.Array"/>, starting at a particular <see cref="T:System.Array"/> index.
+            /// </summary>
+            public void CopyTo(TValue[] array, int arrayIndex)
+            {
+                foreach (var item in _owner)
+                    array[arrayIndex++] = item.Value;
+            }
+
+            /// <summary>
+            /// Gets the number of elements contained in the <see cref="T:System.Collections.Generic.ICollection`1"/>.
+            /// </summary>
+            public int Count
+            {
+                get { return _owner.Count; }
+            }
+
+            /// <summary>
+            /// Returns an enumerator that iterates through the collection.
+            /// </summary>
+            public Enumerator GetEnumerator()
+            {
+                return new Enumerator(_owner);
+            }
+
+            /// <summary>
+            /// Provides an enumerator that iterates through the collection.
+            /// </summary>
+            public struct Enumerator : IEnumerator<TValue>
+            {
+                private readonly LurchTable<TKey, TValue> _owner;
+                private EnumState _state;
+
+                internal Enumerator(LurchTable<TKey, TValue> owner)
+                {
+                    _owner = owner;
+                    _state = new EnumState();
+                    _state.Init();
+                }
+
+                /// <summary>
+                /// Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources.
+                /// </summary>
+                public void Dispose()
+                {
+                    _state.Unlock();
+                }
+
+                object IEnumerator.Current { get { return Current; } }
+
+                /// <summary>
+                /// Gets the element in the collection at the current position of the enumerator.
+                /// </summary>
+                public TValue Current
+                {
+                    get
+                    {
+                        int index = _state.Current;
+                        if (index <= 0)
+                            throw new InvalidOperationException();
+                        if (_owner._entries == null)
+                            throw new ObjectDisposedException(GetType().Name);
+                        return _owner._entries[index >> _owner._shift][index & _owner._shiftMask].Value;
+                    }
+                }
+
+                /// <summary>
+                /// Advances the enumerator to the next element of the collection.
+                /// </summary>
+                public bool MoveNext()
+                {
+                    return _owner.MoveNext(ref _state);
+                }
+
+                /// <summary>
+                /// Sets the enumerator to its initial position, which is before the first element in the collection.
+                /// </summary>
+                public void Reset()
+                {
+                    _state.Unlock();
+                    _state.Init();
+                }
+            }
+            [Obsolete]
+            IEnumerator<TValue> IEnumerable<TValue>.GetEnumerator()
+            {
+                return new Enumerator(_owner);
+            }
+            [Obsolete]
+            IEnumerator IEnumerable.GetEnumerator()
+            {
+                return new Enumerator(_owner);
+            }
+            [Obsolete]
+            bool ICollection<TValue>.IsReadOnly
+            {
+                get { return true; }
+            }
+            [Obsolete]
+            void ICollection<TValue>.Add(TValue item)
+            {
+                throw new NotSupportedException();
+            }
+            [Obsolete]
+            void ICollection<TValue>.Clear()
+            {
+                throw new NotSupportedException();
+            }
+            [Obsolete]
+            bool ICollection<TValue>.Remove(TValue item)
+            {
+                throw new NotSupportedException();
+            }
+
+            #endregion
+        }
+
+        private ValueCollection _valueCollection;
+        /// <summary>
+        /// Gets an <see cref="T:System.Collections.Generic.ICollection`1"/> containing the values in the <see cref="T:System.Collections.Generic.IDictionary`2"/>.
+        /// </summary>
+        public ValueCollection Values { get { return _valueCollection ?? (_valueCollection = new ValueCollection(this)); } }
+        [Obsolete]
+        ICollection<TValue> IDictionary<TKey, TValue>.Values { get { return Values; } }
+
+        #endregion
+
+        #region Peek/Dequeue
+
+        /// <summary>
+        /// Retrieves the oldest entry in the collection based on the ordering supplied to the constructor.
+        /// </summary>
+        /// <returns>True if the out parameter value was set.</returns>
+        /// <exception cref="System.InvalidOperationException">Raised if the table is unordered</exception>
+        public bool Peek(out KeyValuePair<TKey, TValue> value)
+        {
+            if (_ordering == LurchTableOrder.None)
+                throw new InvalidOperationException();
+            if (_entries == null)
+                throw new ObjectDisposedException(GetType().Name);
+
+            while (true)
+            {
+                int index = Interlocked.CompareExchange(ref _entries[0][0].Prev, 0, 0);
+                if (index == 0)
+                {
+                    value = default(KeyValuePair<TKey, TValue>);
+                    return false;
+                }
+
+                int hash = _entries[index >> _shift][index & _shiftMask].Hash;
+                if (hash >= 0)
+                {
+                    int bucket = hash % _hsize;
+                    lock (_locks[bucket % _lsize])
+                    {
+                        if (index == _entries[0][0].Prev &&
+                            hash == _entries[index >> _shift][index & _shiftMask].Hash)
+                        {
+                            value = new KeyValuePair<TKey, TValue>(
+                                _entries[index >> _shift][index & _shiftMask].Key,
+                                _entries[index >> _shift][index & _shiftMask].Value
+                            );
+                            return true;
+                        }
+                    }
+                }
+            }
+        }
+
+        /// <summary>
+        /// Removes the oldest entry in the collection based on the ordering supplied to the constructor.
+        /// If an item is not available a busy-wait loop is used to wait for for an item.
+        /// </summary>
+        /// <returns>The Key/Value pair removed.</returns>
+        /// <exception cref="System.InvalidOperationException">Raised if the table is unordered</exception>
+        public KeyValuePair<TKey, TValue> Dequeue()
+        {
+            if (_ordering == LurchTableOrder.None)
+                throw new InvalidOperationException();
+            if (_entries == null)
+                throw new ObjectDisposedException(GetType().Name);
+
+            KeyValuePair<TKey, TValue> value;
+            while (!TryDequeue(out value))
+            {
+                while (0 == Interlocked.CompareExchange(ref _entries[0][0].Prev, 0, 0))
+                    Thread.Sleep(0);
+            }
+            return value;
+        }
+
+        /// <summary>
+        /// Removes the oldest entry in the collection based on the ordering supplied to the constructor.
+        /// </summary>
+        /// <returns>False if no item was available</returns>
+        /// <exception cref="System.InvalidOperationException">Raised if the table is unordered</exception>
+        public bool TryDequeue(out KeyValuePair<TKey, TValue> value)
+        {
+            return TryDequeue(null, out value);
+        }
+
+        /// <summary>
+        /// Removes the oldest entry in the collection based on the ordering supplied to the constructor.
+        /// </summary>
+        /// <returns>False if no item was available</returns>
+        /// <exception cref="System.InvalidOperationException">Raised if the table is unordered</exception>
+        public bool TryDequeue(Predicate<KeyValuePair<TKey, TValue>> predicate, out KeyValuePair<TKey, TValue> value)
+        {
+            if (_ordering == LurchTableOrder.None)
+                throw new InvalidOperationException();
+            if (_entries == null)
+                throw new ObjectDisposedException(GetType().Name);
+
+            while (true)
+            {
+                int index = Interlocked.CompareExchange(ref _entries[0][0].Prev, 0, 0);
+                if (index == 0)
+                {
+                    value = default(KeyValuePair<TKey, TValue>);
+                    return false;
+                }
+
+                int hash = _entries[index >> _shift][index & _shiftMask].Hash;
+                if (hash >= 0)
+                {
+                    int bucket = hash % _hsize;
+                    lock (_locks[bucket % _lsize])
+                    {
+                        if (index == _entries[0][0].Prev &&
+                            hash == _entries[index >> _shift][index & _shiftMask].Hash)
+                        {
+                            if (predicate != null)
+                            {
+                                var item = new KeyValuePair<TKey, TValue>(
+                                    _entries[index >> _shift][index & _shiftMask].Key,
+                                    _entries[index >> _shift][index & _shiftMask].Value
+                                );
+                                if (!predicate(item))
+                                {
+                                    value = item;
+                                    return false;
+                                }
+                            }
+
+                            int next = _entries[index >> _shift][index & _shiftMask].Link;
+                            bool removed = false;
+
+                            if (_buckets[bucket] == index)
+                            {
+                                _buckets[bucket] = next;
+                                removed = true;
+                            }
+                            else
+                            {
+                                int test = _buckets[bucket];
+                                while (test != 0)
+                                {
+                                    int cmp = _entries[test >> _shift][test & _shiftMask].Link;
+                                    if (cmp == index)
+                                    {
+                                        _entries[test >> _shift][test & _shiftMask].Link = next;
+                                        removed = true;
+                                        break;
+                                    }
+                                    test = cmp;
+                                }
+                            }
+                            if (!removed)
+                                throw new LurchTableCorruptionException();
+
+                            value = new KeyValuePair<TKey, TValue>(
+                                _entries[index >> _shift][index & _shiftMask].Key,
+                                _entries[index >> _shift][index & _shiftMask].Value
+                            );
+                            Interlocked.Decrement(ref _count);
+                            if (_ordering != LurchTableOrder.None)
+                                InternalUnlink(index);
+                            FreeSlot(ref index, Interlocked.Increment(ref _freeVersion));
+
+                            var handler = ItemRemoved;
+                            if (handler != null)
+                                handler(value);
+
+                            return true;
+                        }
+                    }
+                }
+            }
+        }
+
+        #endregion
+
+        #region Internal Implementation
+
+        enum InsertResult { Inserted = 1, Updated = 2, Exists = 3, NotFound = 4 }
+
+        bool InternalGetValue(int hash, TKey key, out TValue value)
+        {
+            if (_entries == null)
+                throw new ObjectDisposedException(GetType().Name);
+
+            int bucket = hash % _hsize;
+            lock (_locks[bucket % _lsize])
+            {
+                int index = _buckets[bucket];
+                while (index != 0)
+                {
+                    if (hash == _entries[index >> _shift][index & _shiftMask].Hash &&
+                        _comparer.Equals(key, _entries[index >> _shift][index & _shiftMask].Key))
+                    {
+                        value = _entries[index >> _shift][index & _shiftMask].Value;
+                        if (hash == _entries[index >> _shift][index & _shiftMask].Hash)
+                        {
+                            if (_ordering == LurchTableOrder.Access)
+                            {
+                                InternalUnlink(index);
+                                InternalLink(index);
+                            }
+                            return true;
+                        }
+                    }
+                    index = _entries[index >> _shift][index & _shiftMask].Link;
+                }
+
+                value = default(TValue);
+                return false;
+            }
+        }
+
+        InsertResult Insert<T>(TKey key, ref T value) where T : ICreateOrUpdateValue<TKey, TValue>
+        {
+            if (_entries == null)
+                throw new ObjectDisposedException(GetType().Name);
+
+            int hash = _comparer.GetHashCode(key) & int.MaxValue;
+            int added;
+
+            InsertResult result = InternalInsert(hash, key, out added, ref value);
+
+            if (added > _limit && _ordering != LurchTableOrder.None)
+            {
+                KeyValuePair<TKey, TValue> ignore;
+                TryDequeue(out ignore);
+            }
+            return result;
+        }
+
+        InsertResult InternalInsert<T>(int hash, TKey key, out int added, ref T value) where T : ICreateOrUpdateValue<TKey, TValue>
+        {
+            int bucket = hash % _hsize;
+            lock (_locks[bucket % _lsize])
+            {
+                TValue temp;
+                int index = _buckets[bucket];
+                while (index != 0)
+                {
+                    if (hash == _entries[index >> _shift][index & _shiftMask].Hash &&
+                        _comparer.Equals(key, _entries[index >> _shift][index & _shiftMask].Key))
+                    {
+                        temp = _entries[index >> _shift][index & _shiftMask].Value;
+                        var original = temp;
+                        if (value.UpdateValue(key, ref temp))
+                        {
+                            _entries[index >> _shift][index & _shiftMask].Value = temp;
+
+                            if (_ordering == LurchTableOrder.Modified || _ordering == LurchTableOrder.Access)
+                            {
+                                InternalUnlink(index);
+                                InternalLink(index);
+                            }
+
+                            var handler = ItemUpdated;
+                            if (handler != null)
+                                handler(new KeyValuePair<TKey, TValue>(key, original), new KeyValuePair<TKey, TValue>(key, temp));
+
+                            added = -1;
+                            return InsertResult.Updated;
+                        }
+
+                        added = -1;
+                        return InsertResult.Exists;
+                    }
+                    index = _entries[index >> _shift][index & _shiftMask].Link;
+                }
+                if (value.CreateValue(key, out temp))
+                {
+#pragma warning disable 612,618
+                    index = AllocSlot();
+#pragma warning restore 612,618
+                    _entries[index >> _shift][index & _shiftMask].Hash = hash;
+                    _entries[index >> _shift][index & _shiftMask].Key = key;
+                    _entries[index >> _shift][index & _shiftMask].Value = temp;
+                    _entries[index >> _shift][index & _shiftMask].Link = _buckets[bucket];
+                    _buckets[bucket] = index;
+
+                    added = Interlocked.Increment(ref _count);
+                    if (_ordering != LurchTableOrder.None)
+                        InternalLink(index);
+
+                    var handler = ItemAdded;
+                    if (handler != null)
+                        handler(new KeyValuePair<TKey, TValue>(key, temp));
+
+                    return InsertResult.Inserted;
+                }
+            }
+
+            added = -1;
+            return InsertResult.NotFound;
+        }
+
+        bool Delete<T>(TKey key, ref T value) where T : IRemoveValue<TKey, TValue>
+        {
+            if (_entries == null)
+                throw new ObjectDisposedException(GetType().Name);
+
+            int hash = _comparer.GetHashCode(key) & int.MaxValue;
+            int bucket = hash % _hsize;
+            lock (_locks[bucket % _lsize])
+            {
+                int prev = 0;
+                int index = _buckets[bucket];
+                while (index != 0)
+                {
+                    if (hash == _entries[index >> _shift][index & _shiftMask].Hash &&
+                        _comparer.Equals(key, _entries[index >> _shift][index & _shiftMask].Key))
+                    {
+                        TValue temp = _entries[index >> _shift][index & _shiftMask].Value;
+
+                        if (value.RemoveValue(key, temp))
+                        {
+                            int next = _entries[index >> _shift][index & _shiftMask].Link;
+                            if (prev == 0)
+                                _buckets[bucket] = next;
+                            else
+                                _entries[prev >> _shift][prev & _shiftMask].Link = next;
+
+                            Interlocked.Decrement(ref _count);
+                            if (_ordering != LurchTableOrder.None)
+                                InternalUnlink(index);
+                            FreeSlot(ref index, Interlocked.Increment(ref _freeVersion));
+
+                            var handler = ItemRemoved;
+                            if (handler != null)
+                                handler(new KeyValuePair<TKey, TValue>(key, temp));
+
+                            return true;
+                        }
+                        return false;
+                    }
+
+                    prev = index;
+                    index = _entries[index >> _shift][index & _shiftMask].Link;
+                }
+            }
+            return false;
+        }
+
+        void InternalLink(int index)
+        {
+            Interlocked.Exchange(ref _entries[index >> _shift][index & _shiftMask].Prev, 0);
+            Interlocked.Exchange(ref _entries[index >> _shift][index & _shiftMask].Next, ~0);
+            int next = Interlocked.Exchange(ref _entries[0][0].Next, index);
+            if (next < 0)
+                throw new LurchTableCorruptionException();
+
+            while (0 != Interlocked.CompareExchange(ref _entries[next >> _shift][next & _shiftMask].Prev, index, 0))
+            { }
+
+            Interlocked.Exchange(ref _entries[index >> _shift][index & _shiftMask].Next, next);
+        }
+
+        void InternalUnlink(int index)
+        {
+            while (true)
+            {
+                int cmp;
+                int prev = _entries[index >> _shift][index & _shiftMask].Prev;
+                while (prev >= 0 && prev != (cmp = Interlocked.CompareExchange(
+                            ref _entries[index >> _shift][index & _shiftMask].Prev, ~prev, prev)))
+                    prev = cmp;
+                if (prev < 0)
+                    throw new LurchTableCorruptionException();
+
+                int next = _entries[index >> _shift][index & _shiftMask].Next;
+                while (next >= 0 && next != (cmp = Interlocked.CompareExchange(
+                            ref _entries[index >> _shift][index & _shiftMask].Next, ~next, next)))
+                    next = cmp;
+                if (next < 0)
+                    throw new LurchTableCorruptionException();
+
+                if ((Interlocked.CompareExchange(
+                        ref _entries[prev >> _shift][prev & _shiftMask].Next, next, index) == index))
+                {
+                    while (Interlocked.CompareExchange(
+                               ref _entries[next >> _shift][next & _shiftMask].Prev, prev, index) != index)
+                    { }
+                    return;
+                }
+
+                //cancel the delete markers and retry
+                if (~next != Interlocked.CompareExchange(
+                        ref _entries[index >> _shift][index & _shiftMask].Next, next, ~next))
+                    throw new LurchTableCorruptionException();
+                if (~prev != Interlocked.CompareExchange(
+                        ref _entries[index >> _shift][index & _shiftMask].Prev, prev, ~prev))
+                    throw new LurchTableCorruptionException();
+            }
+        }
+
+        [Obsolete("Release build inlining, so we need to ignore for testing.")]
+        int AllocSlot()
+        {
+            while (true)
+            {
+                int allocated = _entries.Length * _allocSize;
+                var previous = _entries;
+
+                while (_count + OverAlloc < allocated || _used < allocated)
+                {
+                    int next;
+                    if (_count + FreeSlots < _used)
+                    {
+                        int freeSlotIndex = Interlocked.Increment(ref _allocNext);
+                        int slot = (freeSlotIndex & int.MaxValue) % FreeSlots;
+                        next = Interlocked.Exchange(ref _free[slot].Head, 0);
+                        if (next != 0)
+                        {
+                            int nextFree = _entries[next >> _shift][next & _shiftMask].Link;
+                            if (nextFree == 0)
+                            {
+                                Interlocked.Exchange(ref _free[slot].Head, next);
+                            }
+                            else
+                            {
+                                Interlocked.Exchange(ref _free[slot].Head, nextFree);
+                                return next;
+                            }
+                        }
+                    }
+
+                    next = _used;
+                    if (next < allocated)
+                    {
+                        int alloc = Interlocked.CompareExchange(ref _used, next + 1, next);
+                        if (alloc == next)
+                        {
+                            return next;
+                        }
+                    }
+                }
+
+                lock (this)
+                {
+                    //time to grow...
+                    if (ReferenceEquals(_entries, previous))
+                    {
+                        Entry[][] arentries = new Entry[_entries.Length + 1][];
+                        _entries.CopyTo(arentries, 0);
+                        arentries[arentries.Length - 1] = new Entry[_allocSize];
+
+                        Interlocked.CompareExchange(ref _entries, arentries, previous);
+                    }
+                }
+            }
+        }
+
+        void FreeSlot(ref int index, int ver)
+        {
+            _entries[index >> _shift][index & _shiftMask].Key = default(TKey);
+            _entries[index >> _shift][index & _shiftMask].Value = default(TValue);
+            Interlocked.Exchange(ref _entries[index >> _shift][index & _shiftMask].Link, 0);
+
+            int slot = (ver & int.MaxValue) % FreeSlots;
+            int prev = Interlocked.Exchange(ref _free[slot].Tail, index);
+
+            if (prev <= 0 || 0 != Interlocked.CompareExchange(ref _entries[prev >> _shift][prev & _shiftMask].Link, index, 0))
+            {
+                throw new LurchTableCorruptionException();
+            }
+        }
+
+        #endregion
+
+        #region Internal Structures
+
+        struct FreeList
+        {
+            public int Head;
+            public int Tail;
+        }
+
+        struct Entry
+        {
+            public int Prev, Next; // insertion/access sequence ordering
+            public int Link;
+            public int Hash; // hash value of entry's Key
+            public TKey Key; // key of entry
+            public TValue Value; // value of entry
+        }
+
+        struct EnumState
+        {
+            private object _locked;
+            public int Bucket, Current, Next;
+            public void Init()
+            {
+                Bucket = -1;
+                Current = 0;
+                Next = 0;
+                _locked = null;
+            }
+
+            public void Unlock()
+            {
+                if (_locked != null)
+                {
+                    Monitor.Exit(_locked);
+                    _locked = null;
+                }
+            }
+
+            public void Lock(object lck)
+            {
+                if (_locked != null)
+                    Monitor.Exit(_locked);
+                Monitor.Enter(_locked = lck);
+            }
+        }
+
+        struct DelInfo : IRemoveValue<TKey, TValue>
+        {
+            public TValue Value;
+            readonly bool _hasTestValue;
+            readonly TValue _testValue;
+            public KeyValuePredicate<TKey, TValue> Condition;
+
+            public DelInfo(TValue expected)
+            {
+                Value = default(TValue);
+                _testValue = expected;
+                _hasTestValue = true;
+                Condition = null;
+            }
+
+            public bool RemoveValue(TKey key, TValue value)
+            {
+                Value = value;
+
+                if (_hasTestValue && !EqualityComparer<TValue>.Default.Equals(_testValue, value))
+                    return false;
+                if (Condition != null && !Condition(key, value))
+                    return false;
+
+                return true;
+            }
+        }
+
+        struct AddInfo : ICreateOrUpdateValue<TKey, TValue>
+        {
+            public bool CanUpdate;
+            public TValue Value;
+            public bool CreateValue(TKey key, out TValue value)
+            {
+                value = Value;
+                return true;
+            }
+
+            public bool UpdateValue(TKey key, ref TValue value)
+            {
+                if (!CanUpdate)
+                {
+                    Value = value;
+                    return false;
+                }
+
+                value = Value;
+                return true;
+            }
+        }
+
+        struct Add2Info : ICreateOrUpdateValue<TKey, TValue>
+        {
+            readonly bool _hasAddValue;
+            readonly TValue _addValue;
+            public TValue Value;
+            public Converter<TKey, TValue> Create;
+            public KeyValueUpdate<TKey, TValue> Update;
+
+            public Add2Info(TValue addValue) : this()
+            {
+                _hasAddValue = true;
+                _addValue = addValue;
+            }
+
+            public bool CreateValue(TKey key, out TValue value)
+            {
+                if (_hasAddValue)
+                {
+                    value = Value = _addValue;
+                    return true;
+                }
+                if (Create != null)
+                {
+                    value = Value = Create(key);
+                    return true;
+                }
+                value = Value = default(TValue);
+                return false;
+            }
+
+            public bool UpdateValue(TKey key, ref TValue value)
+            {
+                if (Update == null)
+                {
+                    Value = value;
+                    return false;
+                }
+
+                value = Value = Update(key, value);
+                return true;
+            }
+        }
+
+        struct UpdateInfo : ICreateOrUpdateValue<TKey, TValue>
+        {
+            public TValue Value;
+            readonly bool _hasTestValue;
+            readonly TValue _testValue;
+
+            public UpdateInfo(TValue expected)
+            {
+                Value = default(TValue);
+                _testValue = expected;
+                _hasTestValue = true;
+            }
+
+            bool ICreateValue<TKey, TValue>.CreateValue(TKey key, out TValue value)
+            {
+                value = default(TValue);
+                return false;
+            }
+            public bool UpdateValue(TKey key, ref TValue value)
+            {
+                if (_hasTestValue && !EqualityComparer<TValue>.Default.Equals(_testValue, value))
+                    return false;
+
+                value = Value;
+                return true;
+            }
+        }
+        #endregion
+    }
+
+    #region LurchTable Support
+
+    #region Exceptions
+
+    /// <summary>
+    /// Exception class: LurchTableCorruptionException
+    /// The LurchTable internal datastructure appears to be corrupted.
+    /// </summary>
+    [System.SerializableAttribute()]
+    [global::System.Diagnostics.DebuggerStepThroughAttribute()]
+    [global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
+    [global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
+    [global::System.CodeDom.Compiler.GeneratedCodeAttribute("CSharpTest.Net.Generators", "2.13.222.435")]
+    public partial class LurchTableCorruptionException : System.ApplicationException
+    {
+        /// <summary>
+        /// Serialization constructor
+        /// </summary>
+        protected LurchTableCorruptionException(System.Runtime.Serialization.SerializationInfo info, System.Runtime.Serialization.StreamingContext context) : base(info, context)
+        {
+        }
+        /// <summary>
+        /// Used to create this exception from an hresult and message bypassing the message formatting
+        /// </summary>
+        internal static System.Exception Create(int hResult, string message)
+        {
+            return new LurchTableCorruptionException((System.Exception)null, hResult, message);
+        }
+        /// <summary>
+        /// Constructs the exception from an hresult and message bypassing the message formatting
+        /// </summary>
+        protected LurchTableCorruptionException(System.Exception innerException, int hResult, string message) : base(message, innerException)
+        {
+            base.HResult = hResult;
+        }
+        /// <summary>
+        /// The LurchTable internal datastructure appears to be corrupted.
+        /// </summary>
+        public LurchTableCorruptionException()
+            : this((System.Exception)null, -1, "The LurchTable internal datastructure appears to be corrupted.")
+        {
+        }
+        /// <summary>
+        /// The LurchTable internal datastructure appears to be corrupted.
+        /// </summary>
+        public LurchTableCorruptionException(System.Exception innerException)
+            : this(innerException, -1, "The LurchTable internal datastructure appears to be corrupted.")
+        {
+        }
+        /// <summary>
+        /// if(condition == false) throws The LurchTable internal datastructure appears to be corrupted.
+        /// </summary>
+        public static void Assert(bool condition)
+        {
+            if (!condition) throw new LurchTableCorruptionException();
+        }
+    }
+
+    #endregion // Exceptions
+
+    #region Delegates
+
+    /// <summary> Provides a delegate that performs an atomic update of a key/value pair </summary>
+    public delegate TValue KeyValueUpdate<TKey, TValue>(TKey key, TValue original);
+
+    /// <summary> Provides a delegate that performs a test on key/value pair </summary>
+    public delegate bool KeyValuePredicate<TKey, TValue>(TKey key, TValue original);
+
+    #endregion // Delegates
+
+    #region Interfaces
+
+    /// <summary>
+    /// An interface to provide conditional or custom creation logic to a concurrent dictionary.
+    /// </summary>
+    public interface ICreateValue<TKey, TValue>
+    {
+        /// <summary>
+        /// Called when the key was not found within the dictionary to produce a new value that can be added.
+        /// Return true to continue with the insertion, or false to prevent the key/value from being inserted.
+        /// </summary>
+        bool CreateValue(TKey key, out TValue value);
+    }
+    /// <summary>
+    /// An interface to provide conditional or custom update logic to a concurrent dictionary.
+    /// </summary>
+    public interface IUpdateValue<TKey, TValue>
+    {
+        /// <summary>
+        /// Called when the key was found within the dictionary to produce a modified value to update the item
+        /// to. Return true to continue with the update, or false to prevent the key/value from being updated.
+        /// </summary>
+        bool UpdateValue(TKey key, ref TValue value);
+    }
+    /// <summary>
+    /// An interface to provide conditional or custom creation or update logic to a concurrent dictionary.
+    /// </summary>
+    /// <remarks>
+    /// Generally implemented as a struct and passed by ref to save stack space and to retrieve the values
+    /// that where inserted or updated.
+    /// </remarks>
+    public interface ICreateOrUpdateValue<TKey, TValue> : ICreateValue<TKey, TValue>, IUpdateValue<TKey, TValue>
+    {
+    }
+
+    /// <summary>
+    /// An interface to provide conditional removal of an item from a concurrent dictionary.
+    /// </summary>
+    /// <remarks>
+    /// Generally implemented as a struct and passed by ref to save stack space and to retrieve the values
+    /// that where inserted or updated.
+    /// </remarks>
+    public interface IRemoveValue<TKey, TValue>
+    {
+        /// <summary>
+        /// Called when the dictionary is about to remove the key/value pair provided, return true to allow
+        /// it's removal, or false to prevent it from being removed.
+        /// </summary>
+        bool RemoveValue(TKey key, TValue value);
+    }
+
+    #endregion // interfaces
+
+    #region Classes
+
+    internal class HashUtilities
+    {
+        private static readonly int[] PrimeNumbers =
+            new[]
+                {
+                    17, 37, 67, 131, 257, 521, // ROK - Added smaller primes
+                    1103, 1327, 1597, 1931, 2333, 2801, 3371, 4049, 4861, 5839, 7013, 8419, 10103, 12143, 14591,
+                    17519, 21023, 25229, 30293, 36353, 43627, 52361, 62851, 75431, 90523, 108631, 130363, 156437,
+                    187751, 225307, 270371, 324449, 389357, 467237, 560689, 672827, 807403, 968897, 1162687,
+                    1395263, 1674319, 2009191, 2411033, 2893249, 3471899, 4166287, 4999559, 5999471, 7199369
+                };
+
+        internal static int SelectPrimeNumber(int hashSize)
+        {
+            int offset = Array.BinarySearch(PrimeNumbers, hashSize);
+            if (offset < 0)
+                offset = ~offset;
+            return PrimeNumbers[Math.Min(offset, PrimeNumbers.Length - 1)];
+        }
+    }
+
+    #endregion // Classes
+
+    #endregion // LurchTable Support
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/36cde063/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyReader.cs b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyReader.cs
index 0949614..1aeac45 100644
--- a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyReader.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyReader.cs
@@ -287,26 +287,26 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
             }
 
             // First try to find the answer in the LRU cache:
-            lock (ordinalCache)
+
+            // LUCENENET: Lock was removed here because the underlying cache is thread-safe,
+            // and removing the lock seems to make the performance better.
+            IntClass res = ordinalCache.Get(cp);
+            if (res != null && res.IntItem != null)
             {
-                IntClass res = ordinalCache.Get(cp);
-                if (res != null && res.IntItem != null)
+                if ((int)res.IntItem.Value < indexReader.MaxDoc)
                 {
-                    if ((int)res.IntItem.Value < indexReader.MaxDoc)
-                    {
-                        // Since the cache is shared with DTR instances allocated from
-                        // doOpenIfChanged, we need to ensure that the ordinal is one that
-                        // this DTR instance recognizes.
-                        return (int)res.IntItem.Value;
-                    }
-                    else
-                    {
-                        // if we get here, it means that the category was found in the cache,
-                        // but is not recognized by this TR instance. Therefore there's no
-                        // need to continue search for the path on disk, because we won't find
-                        // it there too.
-                        return TaxonomyReader.INVALID_ORDINAL;
-                    }
+                    // Since the cache is shared with DTR instances allocated from
+                    // doOpenIfChanged, we need to ensure that the ordinal is one that
+                    // this DTR instance recognizes.
+                    return (int)res.IntItem.Value;
+                }
+                else
+                {
+                    // if we get here, it means that the category was found in the cache,
+                    // but is not recognized by this TR instance. Therefore there's no
+                    // need to continue search for the path on disk, because we won't find
+                    // it there too.
+                    return TaxonomyReader.INVALID_ORDINAL;
                 }
             }
 
@@ -323,10 +323,10 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
                 // that are allocated from doOpenIfChanged. Therefore, if we only store
                 // information about found categories, we cannot accidently tell a new
                 // generation of DTR that a category does not exist.
-                lock (ordinalCache)
-                {
-                    ordinalCache.Put(cp, new IntClass { IntItem = Convert.ToInt32(ret) });
-                }
+
+                // LUCENENET: Lock was removed here because the underlying cache is thread-safe,
+                // and removing the lock seems to make the performance better.
+                ordinalCache.Put(cp, new IntClass { IntItem = Convert.ToInt32(ret) });
             }
 
             return ret;
@@ -348,10 +348,10 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
             // TODO: can we use an int-based hash impl, such as IntToObjectMap,
             // wrapped as LRU?
 
-            // LUCENENET NOTE: We don't need to convert int to int here.
-            // Also, our cache implementation is thread safe, so we can nix the
-            // locks.
+            // LUCENENET NOTE: We don't need to convert ordinal from int to int here as was done in Java.
             FacetLabel res;
+            // LUCENENET: Lock was removed here because the underlying cache is thread-safe,
+            // and removing the lock seems to make the performance better.
             if (categoryCache.TryGetValue(ordinal, out res))
             {
                 return res;
@@ -359,6 +359,8 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
 
             Document doc = indexReader.Document(ordinal);
             res = new FacetLabel(FacetsConfig.StringToPath(doc.Get(Consts.FULL)));
+            // LUCENENET: Lock was removed here because the underlying cache is thread-safe,
+            // and removing the lock seems to make the performance better.
             categoryCache.Put(ordinal, res);
 
             return res;
@@ -386,10 +388,14 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
             set
             {
                 EnsureOpen();
-                // LUCENENET NOTE: No locking required here,
-                // since our LRU implementation is thread-safe
-                categoryCache.Capacity = value;
-                ordinalCache.Capacity = value;
+                lock (categoryCache)
+                {
+                    categoryCache.Limit = value;
+                }
+                lock (ordinalCache)
+                {
+                    ordinalCache.Limit = value;
+                }
             }
         }
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/36cde063/src/Lucene.Net.Facet/Taxonomy/LRUHashMap.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/LRUHashMap.cs b/src/Lucene.Net.Facet/Taxonomy/LRUHashMap.cs
index 5ff77fb..956922f 100644
--- a/src/Lucene.Net.Facet/Taxonomy/LRUHashMap.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/LRUHashMap.cs
@@ -1,6 +1,7 @@
-\ufeffusing System;
+\ufeffusing Lucene.Net.Support;
+using System;
+using System.Collections;
 using System.Collections.Generic;
-using System.Linq;
 
 namespace Lucene.Net.Facet.Taxonomy
 {
@@ -26,8 +27,8 @@ namespace Lucene.Net.Facet.Taxonomy
     /// When it reaches that <see cref="Capacity"/>, each time a new element is added, the least
     /// recently used (LRU) entry is removed.
     /// <para>
-    /// Unlike the Java Lucene implementation, this one is thread safe. Do note
-    /// that every time an element is read from <see cref="LRUHashMap{TKey, TValue}"/>,
+    /// Unlike the Java Lucene implementation, this one is thread safe because it is backed by the <see cref="LurchTable{TKey, TValue}"/>.
+    /// Do note that every time an element is read from <see cref="LRUHashMap{TKey, TValue}"/>,
     /// a write operation also takes place to update the element's last access time.
     /// This is because the LRU order needs to be remembered to determine which element
     /// to evict when the <see cref="Capacity"/> is exceeded. 
@@ -37,157 +38,162 @@ namespace Lucene.Net.Facet.Taxonomy
     /// @lucene.experimental
     /// </para>
     /// </summary>
-    public class LRUHashMap<TKey, TValue> where TValue : class //this is implementation of LRU Cache
+    public class LRUHashMap<TKey, TValue> : IDictionary<TKey, TValue>
     {
-        private readonly Dictionary<TKey, CacheDataObject> cache;
-        // We can't use a ReaderWriterLockSlim because every read is also a 
-        // write, so we gain nothing by doing so
-        private readonly object syncLock = new object();
-        // Record last access so we can tie break if 2 calls make it in within
-        // the same millisecond.
-        private long lastAccess;
-        private int capacity;
+        private LurchTable<TKey, TValue> cache;
 
         public LRUHashMap(int capacity)
         {
-            if (capacity < 1)
-            {
-                throw new ArgumentOutOfRangeException("capacity must be at least 1");
-            }
-            this.capacity = capacity;
-            this.cache = new Dictionary<TKey, CacheDataObject>(capacity);
+            cache = new LurchTable<TKey, TValue>(LurchTableOrder.Access, capacity);
         }
 
         /// <summary>
         /// allows changing the map's maximal number of elements
         /// which was defined at construction time.
         /// <para>
-        /// Note that if the map is already larger than maxSize, the current 
+        /// Note that if the map is already larger than <see cref="Limit"/>, the current 
         /// implementation does not shrink it (by removing the oldest elements);
         /// Rather, the map remains in its current size as new elements are
         /// added, and will only start shrinking (until settling again on the
-        /// given <see cref="Capacity"/>) if existing elements are explicitly deleted.
+        /// given <see cref="Limit"/>) if existing elements are explicitly deleted.
         /// </para>
         /// </summary>
-        public virtual int Capacity
+        public virtual int Limit
         {
-            get { return capacity; }
+            get
+            {
+                return cache.Limit;
+            }
             set
             {
                 if (value < 1)
                 {
-                    throw new ArgumentOutOfRangeException("Capacity must be at least 1");
+                    throw new ArgumentOutOfRangeException("Limit must be at least 1");
                 }
-                capacity = value;
+                cache.Limit = value;
             }
         }
 
-        public bool Put(TKey key, TValue value)
+        public TValue Put(TKey key, TValue value)
         {
-            lock (syncLock)
-            { 
-                CacheDataObject cdo;
-                if (cache.TryGetValue(key, out cdo))
-                {
-                    // Item already exists, update our last access time
-                    cdo.timestamp = GetTimestamp();
-                }
-                else
-                {
-                    cache[key] = new CacheDataObject
-                    {
-                        value = value,
-                        timestamp = GetTimestamp()
-                    };
-                    // We have added a new item, so we may need to remove the eldest
-                    if (cache.Count > Capacity)
-                    {
-                        // Remove the eldest item (lowest timestamp) from the cache
-                        cache.Remove(cache.OrderBy(x => x.Value.timestamp).First().Key);
-                    }
-                }
-            }
-            return true;
+            TValue oldValue = default(TValue);
+            cache.AddOrUpdate(key, value, (k, v) =>
+            {
+                oldValue = cache[key];
+                return value;
+            });
+            return oldValue;
         }
 
         public TValue Get(TKey key)
         {
-            lock (syncLock)
+            TValue result;
+            if (!cache.TryGetValue(key, out result))
             {
-                CacheDataObject cdo;
-                if (cache.TryGetValue(key, out cdo))
-                {
-                    // Write our last access time
-                    cdo.timestamp = GetTimestamp();
-
-                    return cdo.value;
-                }
+                return default(TValue);
             }
-            return null;
+            return result;
         }
 
-        public bool TryGetValue(TKey key, out TValue value)
+        #region IDictionary<TKey, TValue> members
+
+        public TValue this[TKey key]
         {
-            lock (syncLock)
+            get
             {
-                CacheDataObject cdo;
-                if (cache.TryGetValue(key, out cdo))
-                {
-                    // Write our last access time
-                    cdo.timestamp = GetTimestamp();
-                    value = cdo.value;
-
-                    return true;
-                }
-
-                value = null;
-                return false;
+                return cache[key];
+            }
+            set
+            {
+                cache[key] = value;
             }
         }
 
-        public bool ContainsKey(TKey key)
+        public int Count
         {
-            return cache.ContainsKey(key);
+            get
+            {
+                return cache.Count;
+            }
         }
 
-        public int Count
+        public bool IsReadOnly
         {
             get
             {
-                return cache.Count;
+                return false;
             }
         }
 
-        private long GetTimestamp()
+        public ICollection<TKey> Keys
         {
-            long ticks = DateTime.UtcNow.Ticks;
-            if (ticks <= lastAccess)
+            get
             {
-                // Tie break by incrementing
-                // when 2 calls happen within the
-                // same millisecond
-                ticks = ++lastAccess;
+                return cache.Keys;
             }
-            else
+        }
+
+        public ICollection<TValue> Values
+        {
+            get
             {
-                lastAccess = ticks;
+                return cache.Values;
             }
-            return ticks;
         }
-        
 
-        #region Nested type: CacheDataObject
+        public void Add(KeyValuePair<TKey, TValue> item)
+        {
+            throw new NotSupportedException();
+        }
 
-        private class CacheDataObject
+        public void Add(TKey key, TValue value)
         {
-            // Ticks representing the last access time
-            public long timestamp;
-            public TValue value;
+            cache.Add(key, value);
+        }
 
-            public override string ToString()
-            {
-                return "Last Access: " + timestamp.ToString() + " - " + value.ToString();
-            }
+        public void Clear()
+        {
+            cache.Clear();
+        }
+
+        public bool Contains(KeyValuePair<TKey, TValue> item)
+        {
+            throw new NotSupportedException();
+        }
+
+        public bool ContainsKey(TKey key)
+        {
+            return cache.ContainsKey(key);
+        }
+
+        public void CopyTo(KeyValuePair<TKey, TValue>[] array, int arrayIndex)
+        {
+            throw new NotSupportedException();
+        }
+
+        public IEnumerator<KeyValuePair<TKey, TValue>> GetEnumerator()
+        {
+            return cache.GetEnumerator();
+        }
+
+        public bool Remove(KeyValuePair<TKey, TValue> item)
+        {
+            throw new NotSupportedException();
+        }
+
+        public bool Remove(TKey key)
+        {
+            return cache.Remove(key);
+        }
+
+        public bool TryGetValue(TKey key, out TValue value)
+        {
+            return cache.TryGetValue(key, out value);
+        }
+
+        IEnumerator IEnumerable.GetEnumerator()
+        {
+            return cache.GetEnumerator();
         }
 
         #endregion

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/36cde063/src/Lucene.Net.Facet/Taxonomy/WriterCache/LruTaxonomyWriterCache.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/LruTaxonomyWriterCache.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/LruTaxonomyWriterCache.cs
index abad1ea..d7d5676 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/LruTaxonomyWriterCache.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/LruTaxonomyWriterCache.cs
@@ -116,13 +116,13 @@
         {
             lock (this)
             {
-                int? res = cache.Get(categoryPath);
-                if (res == null)
+                int result;
+                if (!cache.TryGetValue(categoryPath, out result))
                 {
                     return -1;
                 }
 
-                return (int)res;
+                return result;
             }
         }
 
@@ -130,7 +130,7 @@
         {
             lock (this)
             {
-                bool ret = cache.Put(categoryPath, new int?(ordinal));
+                bool ret = cache.Put(categoryPath, ordinal);
                 // If the cache is full, we need to clear one or more old entries
                 // from the cache. However, if we delete from the cache a recent
                 // addition that isn't yet in our reader, for this entry to be


[14/46] lucenenet git commit: Facet: Normalized code formatting, license headers, and usings.

Posted by sy...@apache.org.
Facet: Normalized code formatting, license headers, and usings.


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/ae225b9e
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/ae225b9e
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/ae225b9e

Branch: refs/heads/master
Commit: ae225b9eeec8ed5d3999b364b2e7a4f880347557
Parents: c40662a
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Sep 25 01:14:36 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Mon Oct 3 23:31:03 2016 +0700

----------------------------------------------------------------------
 src/Lucene.Net.Facet/DrillDownQuery.cs          |  18 ++-
 src/Lucene.Net.Facet/DrillSideways.cs           |  15 +--
 src/Lucene.Net.Facet/DrillSidewaysQuery.cs      |  16 ++-
 src/Lucene.Net.Facet/DrillSidewaysScorer.cs     |  25 ++---
 src/Lucene.Net.Facet/FacetField.cs              |   2 -
 src/Lucene.Net.Facet/FacetResult.cs             |  10 +-
 src/Lucene.Net.Facet/Facets.cs                  |   6 -
 src/Lucene.Net.Facet/FacetsCollector.cs         |  11 +-
 src/Lucene.Net.Facet/FacetsConfig.cs            |  28 ++---
 src/Lucene.Net.Facet/LabelAndValue.cs           |   9 +-
 src/Lucene.Net.Facet/MultiFacets.cs             |   5 -
 .../RandomSamplingFacetsCollector.cs            |  18 +--
 src/Lucene.Net.Facet/Range/DoubleRange.cs       |  18 +--
 .../Range/DoubleRangeFacetCounts.cs             |  23 ++--
 src/Lucene.Net.Facet/Range/LongRange.cs         |  15 +--
 src/Lucene.Net.Facet/Range/LongRangeCounter.cs  |  54 +++++----
 .../Range/LongRangeFacetCounts.cs               |  19 ++--
 src/Lucene.Net.Facet/Range/Range.cs             |  10 +-
 src/Lucene.Net.Facet/Range/RangeFacetCounts.cs  |   5 -
 .../DefaultSortedSetDocValuesReaderState.cs     |   8 +-
 .../SortedSet/SortedSetDocValuesFacetCounts.cs  |  17 +--
 .../SortedSet/SortedSetDocValuesFacetField.cs   |   3 -
 .../SortedSet/SortedSetDocValuesReaderState.cs  |   5 -
 .../Taxonomy/AssociationFacetField.cs           |   8 +-
 .../Taxonomy/CachedOrdinalsReader.cs            |  13 +--
 src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs   |  12 +-
 .../Taxonomy/Directory/Consts.cs                |   6 +-
 .../Directory/DirectoryTaxonomyReader.cs        |  30 +++--
 .../Directory/DirectoryTaxonomyWriter.cs        |  79 ++++++-------
 .../Taxonomy/Directory/TaxonomyIndexArrays.cs   |  18 ++-
 .../Taxonomy/DocValuesOrdinalsReader.cs         |   6 +-
 src/Lucene.Net.Facet/Taxonomy/FacetLabel.cs     |   6 +-
 .../Taxonomy/FastTaxonomyFacetCounts.cs         |  10 +-
 .../Taxonomy/FloatAssociationFacetField.cs      |  79 +++++++------
 .../Taxonomy/FloatTaxonomyFacets.cs             |   4 +-
 .../Taxonomy/IntAssociationFacetField.cs        |  10 +-
 .../Taxonomy/IntTaxonomyFacets.cs               |   3 -
 src/Lucene.Net.Facet/Taxonomy/OrdinalsReader.cs |   3 -
 .../Taxonomy/ParallelTaxonomyArrays.cs          |  99 ++++++++--------
 .../Taxonomy/PrintTaxonomyStats.cs              |   6 +-
 .../Taxonomy/SearcherTaxonomyManager.cs         |  26 ++---
 .../Taxonomy/TaxonomyFacetCounts.cs             |   6 +-
 .../TaxonomyFacetSumFloatAssociations.cs        |  20 ++--
 .../Taxonomy/TaxonomyFacetSumIntAssociations.cs |  15 +--
 .../Taxonomy/TaxonomyFacetSumValueSource.cs     |  32 ++----
 src/Lucene.Net.Facet/Taxonomy/TaxonomyFacets.cs |   5 -
 src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs |  18 +--
 src/Lucene.Net.Facet/Taxonomy/TaxonomyWriter.cs |  11 +-
 .../Taxonomy/WriterCache/CategoryPathUtils.cs   |   1 -
 .../Taxonomy/WriterCache/CharBlockArray.cs      |   6 +-
 .../WriterCache/Cl2oTaxonomyWriterCache.cs      |   5 -
 .../Taxonomy/WriterCache/CollisionMap.cs        |   8 +-
 .../WriterCache/CompactLabelToOrdinal.cs        |   2 -
 .../Taxonomy/WriterCache/LabelToOrdinal.cs      | 112 +++++++++----------
 .../WriterCache/LruTaxonomyWriterCache.cs       |   5 -
 .../Taxonomy/WriterCache/NameHashIntCacheLRU.cs |  53 +++++----
 .../Taxonomy/WriterCache/NameIntCacheLRU.cs     |  23 ++--
 .../Taxonomy/WriterCache/TaxonomyWriterCache.cs |   8 +-
 src/Lucene.Net.Facet/TopOrdAndFloatQueue.cs     |  98 ++++++++--------
 src/Lucene.Net.Facet/TopOrdAndIntQueue.cs       |   6 -
 60 files changed, 464 insertions(+), 728 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ae225b9e/src/Lucene.Net.Facet/DrillDownQuery.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/DrillDownQuery.cs b/src/Lucene.Net.Facet/DrillDownQuery.cs
index 8234282..d6fa03b 100644
--- a/src/Lucene.Net.Facet/DrillDownQuery.cs
+++ b/src/Lucene.Net.Facet/DrillDownQuery.cs
@@ -1,12 +1,11 @@
-\ufeffusing System;
-using System.Diagnostics;
+\ufeffusing Lucene.Net.Support;
+using System;
 using System.Collections.Generic;
+using System.Diagnostics;
 using System.Linq;
-using Lucene.Net.Support;
 
 namespace Lucene.Net.Facet
 {
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -24,17 +23,16 @@ namespace Lucene.Net.Facet
      * limitations under the License.
      */
 
-
-    using IndexReader = Lucene.Net.Index.IndexReader;
-    using Term = Lucene.Net.Index.Term;
-    using Occur = Lucene.Net.Search.BooleanClause.Occur;
     using BooleanClause = Lucene.Net.Search.BooleanClause;
     using BooleanQuery = Lucene.Net.Search.BooleanQuery;
     using ConstantScoreQuery = Lucene.Net.Search.ConstantScoreQuery;
     using Filter = Lucene.Net.Search.Filter;
     using FilteredQuery = Lucene.Net.Search.FilteredQuery;
+    using IndexReader = Lucene.Net.Index.IndexReader;
     using MatchAllDocsQuery = Lucene.Net.Search.MatchAllDocsQuery;
+    using Occur = Lucene.Net.Search.BooleanClause.Occur;
     using Query = Lucene.Net.Search.Query;
+    using Term = Lucene.Net.Index.Term;
     using TermQuery = Lucene.Net.Search.TermQuery;
 
     /// <summary>
@@ -52,7 +50,6 @@ namespace Lucene.Net.Facet
     /// </summary>
     public sealed class DrillDownQuery : Query
     {
-
         /// <summary>
         /// Creates a drill-down term. </summary>
         public static Term Term(string field, string dim, params string[] path)
@@ -68,7 +65,7 @@ namespace Lucene.Net.Facet
         /// Used by clone() </summary>
         internal DrillDownQuery(FacetsConfig config, BooleanQuery query, IDictionary<string, int?> drillDownDims)
         {
-            this.query = (BooleanQuery) query.Clone();
+            this.query = (BooleanQuery)query.Clone();
             this.drillDownDims.AddAll(drillDownDims);
             this.config = config;
         }
@@ -384,5 +381,4 @@ namespace Lucene.Net.Facet
             }
         }
     }
-
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ae225b9e/src/Lucene.Net.Facet/DrillSideways.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/DrillSideways.cs b/src/Lucene.Net.Facet/DrillSideways.cs
index 5ba8f91..2c71be4 100644
--- a/src/Lucene.Net.Facet/DrillSideways.cs
+++ b/src/Lucene.Net.Facet/DrillSideways.cs
@@ -1,15 +1,13 @@
-\ufeffusing System;
+\ufeffusing Lucene.Net.Facet.SortedSet;
+using Lucene.Net.Facet.Taxonomy;
+using Lucene.Net.Search;
+using System;
 using System.Collections.Generic;
 using System.Diagnostics;
 using System.Linq;
-using Lucene.Net.Search;
-using Lucene.Net.Facet;
-using Lucene.Net.Facet.SortedSet;
-using Lucene.Net.Facet.Taxonomy;
 
 namespace Lucene.Net.Facet
 {
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -26,6 +24,7 @@ namespace Lucene.Net.Facet
      * See the License for the specific language governing permissions and
      * limitations under the License.
      */
+
     /// <summary>
     /// Computes drill down and sideways counts for the provided
     /// <seealso cref="DrillDownQuery"/>.  Drill sideways counts include
@@ -48,7 +47,6 @@ namespace Lucene.Net.Facet
     /// </summary>
     public class DrillSideways
     {
-
         /// <summary>
         /// <seealso cref="IndexSearcher"/> passed to constructor. </summary>
         protected internal readonly IndexSearcher searcher;
@@ -97,7 +95,6 @@ namespace Lucene.Net.Facet
             this.state = state;
         }
 
-        
         /// <summary>
         /// Subclass can override to customize per-dim Facets
         ///  impl. 
@@ -290,6 +287,4 @@ namespace Lucene.Net.Facet
             }
         }
     }
-
-
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ae225b9e/src/Lucene.Net.Facet/DrillSidewaysQuery.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/DrillSidewaysQuery.cs b/src/Lucene.Net.Facet/DrillSidewaysQuery.cs
index c8e782e..9b25dac 100644
--- a/src/Lucene.Net.Facet/DrillSidewaysQuery.cs
+++ b/src/Lucene.Net.Facet/DrillSidewaysQuery.cs
@@ -1,9 +1,8 @@
-\ufeffusing System;
-using Lucene.Net.Support;
+\ufeffusing Lucene.Net.Support;
+using System;
 
 namespace Lucene.Net.Facet
 {
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -22,18 +21,18 @@ namespace Lucene.Net.Facet
      */
 
     using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext;
-    using IndexReader = Lucene.Net.Index.IndexReader;
+    using Bits = Lucene.Net.Util.Bits;
+    using BulkScorer = Lucene.Net.Search.BulkScorer;
     using Collector = Lucene.Net.Search.Collector;
     using DocIdSet = Lucene.Net.Search.DocIdSet;
     using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
     using Explanation = Lucene.Net.Search.Explanation;
     using Filter = Lucene.Net.Search.Filter;
+    using IndexReader = Lucene.Net.Index.IndexReader;
     using IndexSearcher = Lucene.Net.Search.IndexSearcher;
     using Query = Lucene.Net.Search.Query;
     using Scorer = Lucene.Net.Search.Scorer;
-    using BulkScorer = Lucene.Net.Search.BulkScorer;
     using Weight = Lucene.Net.Search.Weight;
-    using Bits = Lucene.Net.Util.Bits;
 
     /// <summary>
     /// Only purpose is to punch through and return a
@@ -48,7 +47,8 @@ namespace Lucene.Net.Facet
         internal readonly Query[] drillDownQueries;
         internal readonly bool scoreSubDocsAtOnce;
 
-        internal DrillSidewaysQuery(Query baseQuery, Collector drillDownCollector, Collector[] drillSidewaysCollectors, Query[] drillDownQueries, bool scoreSubDocsAtOnce)
+        internal DrillSidewaysQuery(Query baseQuery, Collector drillDownCollector,
+            Collector[] drillSidewaysCollectors, Query[] drillDownQueries, bool scoreSubDocsAtOnce)
         {
             this.baseQuery = baseQuery;
             this.drillDownCollector = drillDownCollector;
@@ -162,7 +162,6 @@ namespace Lucene.Net.Facet
 
             public override BulkScorer BulkScorer(AtomicReaderContext context, bool scoreDocsInOrder, Bits acceptDocs)
             {
-
                 // TODO: it could be better if we take acceptDocs
                 // into account instead of baseScorer?
                 Scorer baseScorer = baseWeight.Scorer(context, acceptDocs);
@@ -312,5 +311,4 @@ namespace Lucene.Net.Facet
             return true;
         }
     }
-
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ae225b9e/src/Lucene.Net.Facet/DrillSidewaysScorer.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/DrillSidewaysScorer.cs b/src/Lucene.Net.Facet/DrillSidewaysScorer.cs
index bbd00c1..bd045c3 100644
--- a/src/Lucene.Net.Facet/DrillSidewaysScorer.cs
+++ b/src/Lucene.Net.Facet/DrillSidewaysScorer.cs
@@ -1,11 +1,9 @@
 \ufeffusing System;
-using System.Diagnostics;
 using System.Collections.Generic;
-using Scorer = Lucene.Net.Search.Scorer;
+using System.Diagnostics;
 
 namespace Lucene.Net.Facet
 {
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -23,20 +21,18 @@ namespace Lucene.Net.Facet
      * limitations under the License.
      */
 
-
     using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext;
-    using DocsEnum = Lucene.Net.Index.DocsEnum;
+    using Bits = Lucene.Net.Util.Bits;
+    using BulkScorer = Lucene.Net.Search.BulkScorer;
     using Collector = Lucene.Net.Search.Collector;
     using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
+    using DocsEnum = Lucene.Net.Index.DocsEnum;
+    using FixedBitSet = Lucene.Net.Util.FixedBitSet;
     using Scorer = Lucene.Net.Search.Scorer;
-    using BulkScorer = Lucene.Net.Search.BulkScorer;
     using Weight = Lucene.Net.Search.Weight;
-    using Bits = Lucene.Net.Util.Bits;
-    using FixedBitSet = Lucene.Net.Util.FixedBitSet;
 
     internal class DrillSidewaysScorer : BulkScorer
     {
-
         //private static boolean DEBUG = false;
 
         private readonly Collector drillDownCollector;
@@ -56,7 +52,8 @@ namespace Lucene.Net.Facet
         private int collectDocID = -1;
         private float collectScore;
 
-        internal DrillSidewaysScorer(AtomicReaderContext context, Scorer baseScorer, Collector drillDownCollector, DocsAndCost[] dims, bool scoreSubDocsAtOnce)
+        internal DrillSidewaysScorer(AtomicReaderContext context, Scorer baseScorer, 
+            Collector drillDownCollector, DocsAndCost[] dims, bool scoreSubDocsAtOnce)
         {
             this.dims = dims;
             this.context = context;
@@ -177,7 +174,8 @@ namespace Lucene.Net.Facet
         ///  this case we just .next() on base and .advance() on
         ///  the dim filters. 
         /// </summary>
-        private void DoQueryFirstScoring(Collector collector, DocIdSetIterator[] disis, Collector[] sidewaysCollectors, Bits[] bits, Collector[] bitsSidewaysCollectors)
+        private void DoQueryFirstScoring(Collector collector, DocIdSetIterator[] disis, 
+            Collector[] sidewaysCollectors, Bits[] bits, Collector[] bitsSidewaysCollectors)
         {
             //if (DEBUG) {
             //  System.out.println("  doQueryFirstScoring");
@@ -258,9 +256,9 @@ namespace Lucene.Net.Facet
                 }
 
                 docID = baseScorer.NextDoc();
-            nextDocContinue: ;
+                nextDocContinue:;
             }
-        nextDocBreak: ;
+            nextDocBreak:;
         }
 
         /// <summary>
@@ -822,5 +820,4 @@ namespace Lucene.Net.Facet
             }
         }
     }
-
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ae225b9e/src/Lucene.Net.Facet/FacetField.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/FacetField.cs b/src/Lucene.Net.Facet/FacetField.cs
index a11e5cf..5c85936 100644
--- a/src/Lucene.Net.Facet/FacetField.cs
+++ b/src/Lucene.Net.Facet/FacetField.cs
@@ -2,7 +2,6 @@
 
 namespace Lucene.Net.Facet
 {
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -87,5 +86,4 @@ namespace Lucene.Net.Facet
             }
         }
     }
-
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ae225b9e/src/Lucene.Net.Facet/FacetResult.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/FacetResult.cs b/src/Lucene.Net.Facet/FacetResult.cs
index 924f8d5..5699e6c 100644
--- a/src/Lucene.Net.Facet/FacetResult.cs
+++ b/src/Lucene.Net.Facet/FacetResult.cs
@@ -1,12 +1,10 @@
-\ufeffusing System.Text;
-using Lucene.Net.Support;
-using Lucene.Net.Util;
-using System.Globalization;
+\ufeffusing Lucene.Net.Support;
 using System;
+using System.Globalization;
+using System.Text;
 
 namespace Lucene.Net.Facet
 {
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -28,7 +26,6 @@ namespace Lucene.Net.Facet
     /// Counts or aggregates for a single dimension. </summary>
     public sealed class FacetResult
     {
-
         /// <summary>
         /// Dimension that was requested. </summary>
         public readonly string Dim;
@@ -136,5 +133,4 @@ namespace Lucene.Net.Facet
             return hashCode;
         }
     }
-
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ae225b9e/src/Lucene.Net.Facet/Facets.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Facets.cs b/src/Lucene.Net.Facet/Facets.cs
index f046127..405d1b3 100644
--- a/src/Lucene.Net.Facet/Facets.cs
+++ b/src/Lucene.Net.Facet/Facets.cs
@@ -1,10 +1,7 @@
 \ufeffusing System.Collections.Generic;
-using Lucene.Net.Support;
-using Lucene.Net.Facet;
 
 namespace Lucene.Net.Facet
 {
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -22,7 +19,6 @@ namespace Lucene.Net.Facet
      * limitations under the License.
      */
 
-
     /// <summary>
     /// Common base class for all facets implementations.
     /// 
@@ -30,7 +26,6 @@ namespace Lucene.Net.Facet
     /// </summary>
     public abstract class Facets
     {
-
         /// <summary>
         /// Default constructor. </summary>
         public Facets()
@@ -60,5 +55,4 @@ namespace Lucene.Net.Facet
         /// </summary>
         public abstract IList<FacetResult> GetAllDims(int topN);
     }
-
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ae225b9e/src/Lucene.Net.Facet/FacetsCollector.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/FacetsCollector.cs b/src/Lucene.Net.Facet/FacetsCollector.cs
index 4bbb76f..0739367 100644
--- a/src/Lucene.Net.Facet/FacetsCollector.cs
+++ b/src/Lucene.Net.Facet/FacetsCollector.cs
@@ -1,12 +1,11 @@
-\ufeffusing System;
-using System.Collections.Generic;
-using Lucene.Net.Index;
+\ufeffusing Lucene.Net.Index;
 using Lucene.Net.Search;
 using Lucene.Net.Util;
+using System;
+using System.Collections.Generic;
 
 namespace Lucene.Net.Facet
 {
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -23,6 +22,7 @@ namespace Lucene.Net.Facet
      * See the License for the specific language governing permissions and
      * limitations under the License.
      */
+
     /// <summary>
     /// Collects hits for subsequent faceting.  Once you've run
     ///  a search and collect hits into this, instantiate one of
@@ -33,7 +33,6 @@ namespace Lucene.Net.Facet
     /// </summary>
     public class FacetsCollector : Collector
     {
-
         private AtomicReaderContext context;
         private Scorer scorer;
         private int totalHits;
@@ -233,7 +232,6 @@ namespace Lucene.Net.Facet
             }
         }
 
-
         /// <summary>
         /// Utility method, to search and also collect all hits
         ///  into the provided <seealso cref="Collector"/>. 
@@ -368,5 +366,4 @@ namespace Lucene.Net.Facet
             }
         }
     }
-
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ae225b9e/src/Lucene.Net.Facet/FacetsConfig.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/FacetsConfig.cs b/src/Lucene.Net.Facet/FacetsConfig.cs
index 97ed4cf..a62d625 100644
--- a/src/Lucene.Net.Facet/FacetsConfig.cs
+++ b/src/Lucene.Net.Facet/FacetsConfig.cs
@@ -1,16 +1,13 @@
 \ufeffusing System;
-using System.Diagnostics;
-using System.Collections.Generic;
 using System.Collections.Concurrent;
+using System.Collections.Generic;
+using System.Diagnostics;
 using System.Linq;
 using System.Text;
 using System.Threading;
-using Lucene.Net.Facet.SortedSet;
-using Lucene.Net.Facet.Taxonomy;
 
 namespace Lucene.Net.Facet
 {
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -28,23 +25,22 @@ namespace Lucene.Net.Facet
      * limitations under the License.
      */
 
-
+    using ArrayUtil = Lucene.Net.Util.ArrayUtil;
+    using AssociationFacetField = Lucene.Net.Facet.Taxonomy.AssociationFacetField;
     using BinaryDocValuesField = Lucene.Net.Documents.BinaryDocValuesField;
+    using BytesRef = Lucene.Net.Util.BytesRef;
     using Document = Lucene.Net.Documents.Document;
-    using Field = Lucene.Net.Documents.Field;
-    using SortedSetDocValuesField = Lucene.Net.Documents.SortedSetDocValuesField;
-    using StringField = Lucene.Net.Documents.StringField;
-    using SortedSetDocValuesFacetField = Lucene.Net.Facet.SortedSet.SortedSetDocValuesFacetField;
-    using AssociationFacetField = Lucene.Net.Facet.Taxonomy.AssociationFacetField;
     using FacetLabel = Lucene.Net.Facet.Taxonomy.FacetLabel;
+    using Field = Lucene.Net.Documents.Field;
     using FloatAssociationFacetField = Lucene.Net.Facet.Taxonomy.FloatAssociationFacetField;
-    using IntAssociationFacetField = Lucene.Net.Facet.Taxonomy.IntAssociationFacetField;
-    using TaxonomyWriter = Lucene.Net.Facet.Taxonomy.TaxonomyWriter;
     using IndexableField = Lucene.Net.Index.IndexableField;
     using IndexableFieldType = Lucene.Net.Index.IndexableFieldType;
-    using ArrayUtil = Lucene.Net.Util.ArrayUtil;
-    using BytesRef = Lucene.Net.Util.BytesRef;
+    using IntAssociationFacetField = Lucene.Net.Facet.Taxonomy.IntAssociationFacetField;
     using IntsRef = Lucene.Net.Util.IntsRef;
+    using SortedSetDocValuesFacetField = Lucene.Net.Facet.SortedSet.SortedSetDocValuesFacetField;
+    using SortedSetDocValuesField = Lucene.Net.Documents.SortedSetDocValuesField;
+    using StringField = Lucene.Net.Documents.StringField;
+    using TaxonomyWriter = Lucene.Net.Facet.Taxonomy.TaxonomyWriter;
 
     /// <summary>
     /// Records per-dimension configuration.  By default a
@@ -63,7 +59,6 @@ namespace Lucene.Net.Facet
     /// </summary>
     public class FacetsConfig
     {
-
         /// <summary>
         /// Which Lucene field holds the drill-downs and ords (as
         ///  doc values). 
@@ -708,5 +703,4 @@ namespace Lucene.Net.Facet
             return parts.ToArray();
         }
     }
-
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ae225b9e/src/Lucene.Net.Facet/LabelAndValue.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/LabelAndValue.cs b/src/Lucene.Net.Facet/LabelAndValue.cs
index 6cf1991..1503e3e 100644
--- a/src/Lucene.Net.Facet/LabelAndValue.cs
+++ b/src/Lucene.Net.Facet/LabelAndValue.cs
@@ -1,10 +1,8 @@
-\ufeffusing Lucene.Net.Support;
-using System;
+\ufeffusing System;
 using System.Globalization;
 
 namespace Lucene.Net.Facet
 {
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -65,8 +63,8 @@ namespace Lucene.Net.Facet
 
         public override string ToString()
         {
-            string valueString = (typeOfValue == typeof(int)) 
-                ? value.ToString("0", CultureInfo.InvariantCulture) 
+            string valueString = (typeOfValue == typeof(int))
+                ? value.ToString("0", CultureInfo.InvariantCulture)
                 : value.ToString("0.0#####", CultureInfo.InvariantCulture);
             return label + " (" + valueString + ")";
         }
@@ -86,5 +84,4 @@ namespace Lucene.Net.Facet
             return label.GetHashCode() + 1439 * value.GetHashCode();
         }
     }
-
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ae225b9e/src/Lucene.Net.Facet/MultiFacets.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/MultiFacets.cs b/src/Lucene.Net.Facet/MultiFacets.cs
index b40772e..051de44 100644
--- a/src/Lucene.Net.Facet/MultiFacets.cs
+++ b/src/Lucene.Net.Facet/MultiFacets.cs
@@ -1,10 +1,7 @@
 \ufeffusing System.Collections.Generic;
-using Lucene.Net.Facet;
-using Lucene.Net.Support;
 
 namespace Lucene.Net.Facet
 {
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -22,7 +19,6 @@ namespace Lucene.Net.Facet
      * limitations under the License.
      */
 
-
     /// <summary>
     /// Maps specified dims to provided Facets impls; else, uses
     ///  the default Facets impl. 
@@ -99,5 +95,4 @@ namespace Lucene.Net.Facet
             return results;
         }
     }
-
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ae225b9e/src/Lucene.Net.Facet/RandomSamplingFacetsCollector.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/RandomSamplingFacetsCollector.cs b/src/Lucene.Net.Facet/RandomSamplingFacetsCollector.cs
index 47bb029..a662b6a 100644
--- a/src/Lucene.Net.Facet/RandomSamplingFacetsCollector.cs
+++ b/src/Lucene.Net.Facet/RandomSamplingFacetsCollector.cs
@@ -1,12 +1,9 @@
 \ufeffusing System;
 using System.Collections.Generic;
 using System.IO;
-using Lucene.Net.Facet;
-using Lucene.Net.Search;
 
 namespace Lucene.Net.Facet
 {
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -24,13 +21,12 @@ namespace Lucene.Net.Facet
      * limitations under the License.
      */
 
-
     using DimConfig = FacetsConfig.DimConfig;
-    using IndexReader = Lucene.Net.Index.IndexReader;
-    using Term = Lucene.Net.Index.Term;
     using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
-    using IndexSearcher = Lucene.Net.Search.IndexSearcher;
     using FixedBitSet = Lucene.Net.Util.FixedBitSet;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using IndexSearcher = Lucene.Net.Search.IndexSearcher;
+    using Term = Lucene.Net.Index.Term;
 
     /// <summary>
     /// Collects hits for subsequent faceting, using sampling if needed. Once you've
@@ -47,18 +43,15 @@ namespace Lucene.Net.Facet
     /// </summary>
     public class RandomSamplingFacetsCollector : FacetsCollector
     {
-
         /// <summary>
         /// Faster alternative for java.util.Random, inspired by
-        /// http://dmurphy747.wordpress.com/2011/03/23/xorshift-vs-random-
-        /// performance-in-java/
+        /// http://dmurphy747.wordpress.com/2011/03/23/xorshift-vs-random-performance-in-java/
         /// <para>
         /// Has a period of 2^64-1
         /// </para>
         /// </summary>
         private class XORShift64Random
         {
-
             internal long x;
 
             /// <summary>
@@ -85,7 +78,6 @@ namespace Lucene.Net.Facet
                 int res = (int)(RandomLong() % n);
                 return (res < 0) ? -res : res;
             }
-
         }
 
         private const int NOT_CALCULATED = -1;
@@ -317,7 +309,5 @@ namespace Lucene.Net.Facet
                 return samplingRate;
             }
         }
-
     }
-
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ae225b9e/src/Lucene.Net.Facet/Range/DoubleRange.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Range/DoubleRange.cs b/src/Lucene.Net.Facet/Range/DoubleRange.cs
index 8ad1439..28685d4 100644
--- a/src/Lucene.Net.Facet/Range/DoubleRange.cs
+++ b/src/Lucene.Net.Facet/Range/DoubleRange.cs
@@ -1,10 +1,7 @@
-\ufeffusing System;
-using System.Collections.Generic;
-using Lucene.Net.Support;
+\ufeffusing System.Collections.Generic;
 
 namespace Lucene.Net.Facet.Range
 {
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -22,15 +19,14 @@ namespace Lucene.Net.Facet.Range
      * limitations under the License.
      */
 
-
     using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext;
-    using FunctionValues = Lucene.Net.Queries.Function.FunctionValues;
-    using ValueSource = Lucene.Net.Queries.Function.ValueSource;
+    using Bits = Lucene.Net.Util.Bits;
     using DocIdSet = Lucene.Net.Search.DocIdSet;
     using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
     using Filter = Lucene.Net.Search.Filter;
-    using Bits = Lucene.Net.Util.Bits;
+    using FunctionValues = Lucene.Net.Queries.Function.FunctionValues;
     using NumericUtils = Lucene.Net.Util.NumericUtils;
+    using ValueSource = Lucene.Net.Queries.Function.ValueSource;
 
     /// <summary>
     /// Represents a range over double values.
@@ -138,7 +134,6 @@ namespace Lucene.Net.Facet.Range
                 this.valueSource = valueSource;
             }
 
-
             public override string ToString()
             {
                 return "Filter(" + outerInstance.ToString() + ")";
@@ -152,7 +147,7 @@ namespace Lucene.Net.Facet.Range
                 // ValueSourceRangeFilter (solr); also,
                 // https://issues.apache.org/jira/browse/LUCENE-4251
 
-                var values = valueSource.GetValues(new Dictionary<string,Lucene.Net.Search.Scorer>(), context);
+                var values = valueSource.GetValues(new Dictionary<string, Lucene.Net.Search.Scorer>(), context);
 
                 int maxDoc = context.Reader.MaxDoc;
 
@@ -197,7 +192,6 @@ namespace Lucene.Net.Facet.Range
                     this.fastMatchBits = fastMatchBits;
                 }
 
-
                 public override Bits GetBits()
                 {
                     return new BitsAnonymousInnerClassHelper(this);
@@ -238,6 +232,4 @@ namespace Lucene.Net.Facet.Range
             }
         }
     }
-
-
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ae225b9e/src/Lucene.Net.Facet/Range/DoubleRangeFacetCounts.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Range/DoubleRangeFacetCounts.cs b/src/Lucene.Net.Facet/Range/DoubleRangeFacetCounts.cs
index d0163d7..1e3a996 100644
--- a/src/Lucene.Net.Facet/Range/DoubleRangeFacetCounts.cs
+++ b/src/Lucene.Net.Facet/Range/DoubleRangeFacetCounts.cs
@@ -1,10 +1,7 @@
 \ufeffusing System.Collections.Generic;
-using Lucene.Net.Facet;
-using Lucene.Net.Support;
 
 namespace Lucene.Net.Facet.Range
 {
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -22,19 +19,15 @@ namespace Lucene.Net.Facet.Range
      * limitations under the License.
      */
 
-
-    using DoubleDocValuesField = Lucene.Net.Documents.DoubleDocValuesField; // javadocs
-    using FloatDocValuesField = Lucene.Net.Documents.FloatDocValuesField; // javadocs
-    using MatchingDocs = FacetsCollector.MatchingDocs;
-    using FunctionValues = Lucene.Net.Queries.Function.FunctionValues;
-    using ValueSource = Lucene.Net.Queries.Function.ValueSource;
-    using DoubleFieldSource = Lucene.Net.Queries.Function.ValueSources.DoubleFieldSource;
-    using FloatFieldSource = Lucene.Net.Queries.Function.ValueSources.FloatFieldSource; // javadocs
-    using DocIdSet = Lucene.Net.Search.DocIdSet;
-    using Filter = Lucene.Net.Search.Filter;
     using Bits = Lucene.Net.Util.Bits;
+    using DocIdSet = Lucene.Net.Search.DocIdSet;
     using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
+    using DoubleFieldSource = Lucene.Net.Queries.Function.ValueSources.DoubleFieldSource;
+    using Filter = Lucene.Net.Search.Filter;
+    using FunctionValues = Lucene.Net.Queries.Function.FunctionValues;
+    using MatchingDocs = FacetsCollector.MatchingDocs;
     using NumericUtils = Lucene.Net.Util.NumericUtils;
+    using ValueSource = Lucene.Net.Queries.Function.ValueSource;
 
     /// <summary>
     /// <seealso cref="Facets"/> implementation that computes counts for
@@ -58,7 +51,6 @@ namespace Lucene.Net.Facet.Range
     /// </summary>
     public class DoubleRangeFacetCounts : RangeFacetCounts
     {
-
         /// <summary>
         /// Create {@code RangeFacetCounts}, using {@link
         ///  DoubleFieldSource} from the specified field. 
@@ -107,7 +99,7 @@ namespace Lucene.Net.Facet.Range
             int missingCount = 0;
             foreach (MatchingDocs hits in matchingDocs)
             {
-                FunctionValues fv = valueSource.GetValues(new Dictionary<string,object>(), hits.context);
+                FunctionValues fv = valueSource.GetValues(new Dictionary<string, object>(), hits.context);
 
                 TotCount += hits.totalHits;
                 Bits bits;
@@ -156,5 +148,4 @@ namespace Lucene.Net.Facet.Range
             TotCount -= missingCount;
         }
     }
-
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ae225b9e/src/Lucene.Net.Facet/Range/LongRange.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Range/LongRange.cs b/src/Lucene.Net.Facet/Range/LongRange.cs
index 52da204..7a4f0c1 100644
--- a/src/Lucene.Net.Facet/Range/LongRange.cs
+++ b/src/Lucene.Net.Facet/Range/LongRange.cs
@@ -1,9 +1,7 @@
 \ufeffusing System.Collections.Generic;
-using Lucene.Net.Support;
 
 namespace Lucene.Net.Facet.Range
 {
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -21,14 +19,13 @@ namespace Lucene.Net.Facet.Range
      * limitations under the License.
      */
 
-
     using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext;
-    using FunctionValues = Lucene.Net.Queries.Function.FunctionValues;
-    using ValueSource = Lucene.Net.Queries.Function.ValueSource;
+    using Bits = Lucene.Net.Util.Bits;
     using DocIdSet = Lucene.Net.Search.DocIdSet;
     using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
     using Filter = Lucene.Net.Search.Filter;
-    using Bits = Lucene.Net.Util.Bits;
+    using FunctionValues = Lucene.Net.Queries.Function.FunctionValues;
+    using ValueSource = Lucene.Net.Queries.Function.ValueSource;
 
     /// <summary>
     /// Represents a range over long values.
@@ -147,7 +144,7 @@ namespace Lucene.Net.Facet.Range
                 // ValueSourceRangeFilter (solr); also,
                 // https://issues.apache.org/jira/browse/LUCENE-4251
 
-                FunctionValues values = valueSource.GetValues(new Dictionary<string,object>(), context);
+                FunctionValues values = valueSource.GetValues(new Dictionary<string, object>(), context);
 
                 int maxDoc = context.Reader.MaxDoc;
 
@@ -220,7 +217,7 @@ namespace Lucene.Net.Facet.Range
                         return outerInstance.outerInstance.outerInstance.accept(outerInstance.values.LongVal(docID));
                     }
 
-                    
+
                     public virtual int Length()
                     {
                         return outerInstance.maxDoc;
@@ -231,9 +228,7 @@ namespace Lucene.Net.Facet.Range
                 {
                     throw new System.NotSupportedException("this filter can only be accessed via bits()");
                 }
-
             }
         }
     }
-
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ae225b9e/src/Lucene.Net.Facet/Range/LongRangeCounter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Range/LongRangeCounter.cs b/src/Lucene.Net.Facet/Range/LongRangeCounter.cs
index 98123c2..43ef3b4 100644
--- a/src/Lucene.Net.Facet/Range/LongRangeCounter.cs
+++ b/src/Lucene.Net.Facet/Range/LongRangeCounter.cs
@@ -1,10 +1,9 @@
-\ufeffusing System.Diagnostics;
-using System.Collections.Generic;
+\ufeffusing System.Collections.Generic;
+using System.Diagnostics;
 using System.Text;
 
 namespace Lucene.Net.Facet.Range
 {
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -22,7 +21,6 @@ namespace Lucene.Net.Facet.Range
      * limitations under the License.
      */
 
-
     /// <summary>
     /// Counts how many times each range was seen;
     ///  per-hit it's just a binary search (<seealso cref="#add"/>)
@@ -32,7 +30,6 @@ namespace Lucene.Net.Facet.Range
 
     internal sealed class LongRangeCounter
     {
-
         internal readonly LongRangeNode root;
         internal readonly long[] boundaries;
         internal readonly int[] leafCounts;
@@ -56,7 +53,7 @@ namespace Lucene.Net.Facet.Range
             foreach (LongRange range in ranges)
             {
                 int? cur;
-                if (!endsMap.TryGetValue(range.minIncl,out cur))
+                if (!endsMap.TryGetValue(range.minIncl, out cur))
                 {
                     endsMap[range.minIncl] = 1;
                 }
@@ -64,8 +61,8 @@ namespace Lucene.Net.Facet.Range
                 {
                     endsMap[range.minIncl] = (int)cur | 1;
                 }
-                
-                if (!endsMap.TryGetValue(range.maxIncl,out cur))
+
+                if (!endsMap.TryGetValue(range.maxIncl, out cur))
                 {
                     endsMap[range.maxIncl] = 2;
                 }
@@ -95,7 +92,7 @@ namespace Lucene.Net.Facet.Range
 
             while (upto0 < endsList.Count)
             {
-                v = endsList[upto0].HasValue ?  endsList[upto0].Value : 0;
+                v = endsList[upto0].HasValue ? endsList[upto0].Value : 0;
                 int flags = endsMap[v].HasValue ? endsMap[v].Value : 0;
                 //System.out.println("  v=" + v + " flags=" + flags);
                 if (flags == 3)
@@ -333,25 +330,25 @@ namespace Lucene.Net.Facet.Range
             /// <summary>
             /// Recursively assigns range outputs to each node. </summary>
             internal void addOutputs(int index, LongRange range)
-		{
-		  if (start >= range.minIncl && end <= range.maxIncl)
-		  {
-			// Our range is fully included in the incoming
-			// range; add to our output list:
-			if (outputs == null)
-			{
-			  outputs = new List<int?>();
-			}
-			outputs.Add(index);
-		  }
-		  else if (left != null)
-		  {
-			Debug.Assert(right != null);
-			// Recurse:
-			left.addOutputs(index, range);
-			right.addOutputs(index, range);
-		  }
-		}
+            {
+                if (start >= range.minIncl && end <= range.maxIncl)
+                {
+                    // Our range is fully included in the incoming
+                    // range; add to our output list:
+                    if (outputs == null)
+                    {
+                        outputs = new List<int?>();
+                    }
+                    outputs.Add(index);
+                }
+                else if (left != null)
+                {
+                    Debug.Assert(right != null);
+                    // Recurse:
+                    left.addOutputs(index, range);
+                    right.addOutputs(index, range);
+                }
+            }
 
             internal void ToString(StringBuilder sb, int depth)
             {
@@ -381,5 +378,4 @@ namespace Lucene.Net.Facet.Range
             }
         }
     }
-
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ae225b9e/src/Lucene.Net.Facet/Range/LongRangeFacetCounts.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Range/LongRangeFacetCounts.cs b/src/Lucene.Net.Facet/Range/LongRangeFacetCounts.cs
index 8435bbe..813757d 100644
--- a/src/Lucene.Net.Facet/Range/LongRangeFacetCounts.cs
+++ b/src/Lucene.Net.Facet/Range/LongRangeFacetCounts.cs
@@ -1,9 +1,7 @@
 \ufeffusing System.Collections.Generic;
-using Lucene.Net.Facet;
 
 namespace Lucene.Net.Facet.Range
 {
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -21,15 +19,14 @@ namespace Lucene.Net.Facet.Range
      * limitations under the License.
      */
 
-
-    using MatchingDocs = FacetsCollector.MatchingDocs;
-    using FunctionValues = Lucene.Net.Queries.Function.FunctionValues;
-    using ValueSource = Lucene.Net.Queries.Function.ValueSource;
-    using LongFieldSource = Lucene.Net.Queries.Function.ValueSources.LongFieldSource;
-    using DocIdSet = Lucene.Net.Search.DocIdSet;
-    using Filter = Lucene.Net.Search.Filter;
     using Bits = Lucene.Net.Util.Bits;
+    using DocIdSet = Lucene.Net.Search.DocIdSet;
     using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
+    using Filter = Lucene.Net.Search.Filter;
+    using FunctionValues = Lucene.Net.Queries.Function.FunctionValues;
+    using LongFieldSource = Lucene.Net.Queries.Function.ValueSources.LongFieldSource;
+    using MatchingDocs = FacetsCollector.MatchingDocs;
+    using ValueSource = Lucene.Net.Queries.Function.ValueSource;
 
     /// <summary>
     /// <seealso cref="Facets"/> implementation that computes counts for
@@ -71,7 +68,8 @@ namespace Lucene.Net.Facet.Range
         ///  checked for the matching ranges.  The filter must be
         ///  random access (implement <seealso cref="DocIdSet#bits"/>). 
         /// </summary>
-        public LongRangeFacetCounts(string field, ValueSource valueSource, FacetsCollector hits, Filter fastMatchFilter, params LongRange[] ranges)
+        public LongRangeFacetCounts(string field, ValueSource valueSource, 
+            FacetsCollector hits, Filter fastMatchFilter, params LongRange[] ranges)
             : base(field, ranges, fastMatchFilter)
         {
             Count(valueSource, hits.GetMatchingDocs);
@@ -139,5 +137,4 @@ namespace Lucene.Net.Facet.Range
             TotCount -= missingCount;
         }
     }
-
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ae225b9e/src/Lucene.Net.Facet/Range/Range.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Range/Range.cs b/src/Lucene.Net.Facet/Range/Range.cs
index 548b915..c207031 100644
--- a/src/Lucene.Net.Facet/Range/Range.cs
+++ b/src/Lucene.Net.Facet/Range/Range.cs
@@ -1,8 +1,5 @@
-\ufeffusing Lucene.Net.Facet;
-
-namespace Lucene.Net.Facet.Range
+\ufeffnamespace Lucene.Net.Facet.Range
 {
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -20,10 +17,8 @@ namespace Lucene.Net.Facet.Range
      * limitations under the License.
      */
 
-    using ValueSource = Lucene.Net.Queries.Function.ValueSource;
     using Filter = Lucene.Net.Search.Filter;
-    using FilteredQuery = Lucene.Net.Search.FilteredQuery; // javadocs
-    using Lucene.Net.Search; // javadocs
+    using ValueSource = Lucene.Net.Queries.Function.ValueSource;
 
     /// <summary>
     /// Base class for a single labeled range.
@@ -86,5 +81,4 @@ namespace Lucene.Net.Facet.Range
             throw new System.ArgumentException("range \"" + Label + "\" matches nothing");
         }
     }
-
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ae225b9e/src/Lucene.Net.Facet/Range/RangeFacetCounts.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Range/RangeFacetCounts.cs b/src/Lucene.Net.Facet/Range/RangeFacetCounts.cs
index 29e02fb..53a4d26 100644
--- a/src/Lucene.Net.Facet/Range/RangeFacetCounts.cs
+++ b/src/Lucene.Net.Facet/Range/RangeFacetCounts.cs
@@ -1,10 +1,7 @@
 \ufeffusing System.Collections.Generic;
-using Lucene.Net.Facet;
-using Lucene.Net.Support;
 
 namespace Lucene.Net.Facet.Range
 {
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -22,7 +19,6 @@ namespace Lucene.Net.Facet.Range
      * limitations under the License.
      */
 
-
     using Filter = Lucene.Net.Search.Filter;
 
     /// <summary>
@@ -95,5 +91,4 @@ namespace Lucene.Net.Facet.Range
             return new[] { GetTopChildren(topN, null) };
         }
     }
-
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ae225b9e/src/Lucene.Net.Facet/SortedSet/DefaultSortedSetDocValuesReaderState.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/SortedSet/DefaultSortedSetDocValuesReaderState.cs b/src/Lucene.Net.Facet/SortedSet/DefaultSortedSetDocValuesReaderState.cs
index 8113d80..649135c 100644
--- a/src/Lucene.Net.Facet/SortedSet/DefaultSortedSetDocValuesReaderState.cs
+++ b/src/Lucene.Net.Facet/SortedSet/DefaultSortedSetDocValuesReaderState.cs
@@ -1,8 +1,7 @@
-\ufeffusing System.Collections.Generic;
-using Lucene.Net.Index;
+\ufeffusing Lucene.Net.Index;
 using Lucene.Net.Support;
 using Lucene.Net.Util;
-using Lucene.Net.Facet;
+using System.Collections.Generic;
 
 namespace Lucene.Net.Facet.SortedSet
 {
@@ -28,7 +27,6 @@ namespace Lucene.Net.Facet.SortedSet
     /// </summary>
     public class DefaultSortedSetDocValuesReaderState : SortedSetDocValuesReaderState
     {
-
         private readonly string field;
         private readonly AtomicReader topReader;
         private readonly int valueCount;
@@ -152,7 +150,5 @@ namespace Lucene.Net.Facet.SortedSet
                 return valueCount;
             }
         }
-
     }
-
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ae225b9e/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetCounts.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetCounts.cs b/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetCounts.cs
index 509b473..1039fff 100644
--- a/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetCounts.cs
+++ b/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetCounts.cs
@@ -4,7 +4,6 @@ using System.Linq;
 
 namespace Lucene.Net.Facet.SortedSet
 {
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -22,18 +21,13 @@ namespace Lucene.Net.Facet.SortedSet
      * limitations under the License.
      */
 
-
-    using MatchingDocs = FacetsCollector.MatchingDocs;
-    using OrdRange = Lucene.Net.Facet.SortedSet.SortedSetDocValuesReaderState.OrdRange;
-    using AtomicReader = Lucene.Net.Index.AtomicReader;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
     using IndexReader = Lucene.Net.Index.IndexReader;
     using MultiDocValues = Lucene.Net.Index.MultiDocValues;
-    using MultiSortedSetDocValues = Lucene.Net.Index.MultiDocValues.MultiSortedSetDocValues;
+    using OrdRange = Lucene.Net.Facet.SortedSet.SortedSetDocValuesReaderState.OrdRange;
     using ReaderUtil = Lucene.Net.Index.ReaderUtil;
     using SortedSetDocValues = Lucene.Net.Index.SortedSetDocValues;
-    using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
-    using BytesRef = Lucene.Net.Util.BytesRef;
-    using LongValues = Lucene.Net.Util.LongValues;
 
     /// <summary>
     /// Compute facets counts from previously
@@ -58,7 +52,6 @@ namespace Lucene.Net.Facet.SortedSet
     /// </summary>
     public class SortedSetDocValuesFacetCounts : Facets
     {
-
         internal readonly SortedSetDocValuesReaderState state;
         internal readonly SortedSetDocValues dv;
         internal readonly string field;
@@ -98,7 +91,6 @@ namespace Lucene.Net.Facet.SortedSet
 
         private FacetResult GetDim(string dim, OrdRange ordRange, int topN)
         {
-
             TopOrdAndIntQueue q = null;
 
             int bottomCount = 0;
@@ -180,7 +172,6 @@ namespace Lucene.Net.Facet.SortedSet
 
             foreach (FacetsCollector.MatchingDocs hits in matchingDocs)
             {
-
                 var reader = hits.context.AtomicReader;
                 //System.out.println("  reader=" + reader);
                 // LUCENE-5090: make sure the provided reader context "matches"
@@ -301,7 +292,6 @@ namespace Lucene.Net.Facet.SortedSet
 
         public override IList<FacetResult> GetAllDims(int topN)
         {
-
             IList<FacetResult> results = new List<FacetResult>();
             foreach (KeyValuePair<string, OrdRange> ent in state.PrefixToOrdRange)
             {
@@ -344,5 +334,4 @@ namespace Lucene.Net.Facet.SortedSet
             }
         }
     }
-
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ae225b9e/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetField.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetField.cs b/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetField.cs
index c860d56..3729583 100644
--- a/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetField.cs
+++ b/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetField.cs
@@ -1,6 +1,5 @@
 \ufeffnamespace Lucene.Net.Facet.SortedSet
 {
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -27,7 +26,6 @@
     /// </summary>
     public class SortedSetDocValuesFacetField : Field
     {
-
         /// <summary>
         /// Indexed <seealso cref="FieldType"/>. </summary>
         public static readonly FieldType TYPE = new FieldType();
@@ -61,5 +59,4 @@
             return "SortedSetDocValuesFacetField(dim=" + Dim + " label=" + Label + ")";
         }
     }
-
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ae225b9e/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesReaderState.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesReaderState.cs b/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesReaderState.cs
index ef7e0fe..2b3c2b1 100644
--- a/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesReaderState.cs
+++ b/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesReaderState.cs
@@ -2,7 +2,6 @@
 
 namespace Lucene.Net.Facet.SortedSet
 {
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -20,7 +19,6 @@ namespace Lucene.Net.Facet.SortedSet
      * limitations under the License.
      */
 
-
     using IndexReader = Lucene.Net.Index.IndexReader;
     using SortedSetDocValues = Lucene.Net.Index.SortedSetDocValues;
 
@@ -42,10 +40,8 @@ namespace Lucene.Net.Facet.SortedSet
     ///  for a given <seealso cref="IndexReader"/>. 
     /// </para>
     /// </summary>
-
     public abstract class SortedSetDocValuesReaderState
     {
-
         /// <summary>
         /// Holds start/end range of ords, which maps to one
         ///  dimension (someday we may generalize it to map to
@@ -99,5 +95,4 @@ namespace Lucene.Net.Facet.SortedSet
         /// Number of unique labels. </summary>
         public abstract int Size { get; }
     }
-
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ae225b9e/src/Lucene.Net.Facet/Taxonomy/AssociationFacetField.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/AssociationFacetField.cs b/src/Lucene.Net.Facet/Taxonomy/AssociationFacetField.cs
index 6113eef..6092f9a 100644
--- a/src/Lucene.Net.Facet/Taxonomy/AssociationFacetField.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/AssociationFacetField.cs
@@ -1,9 +1,7 @@
-\ufeffusing Lucene.Net.Facet;
-using Lucene.Net.Support;
+\ufeffusing Lucene.Net.Support;
 
 namespace Lucene.Net.Facet.Taxonomy
 {
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -21,10 +19,10 @@ namespace Lucene.Net.Facet.Taxonomy
      * limitations under the License.
      */
 
+    using BytesRef = Lucene.Net.Util.BytesRef;
     using Document = Lucene.Net.Documents.Document; // javadocs
     using Field = Lucene.Net.Documents.Field;
     using FieldType = Lucene.Net.Documents.FieldType;
-    using BytesRef = Lucene.Net.Util.BytesRef;
 
     /// <summary>
     /// Add an instance of this to your <seealso cref="Document"/> to add
@@ -39,7 +37,6 @@ namespace Lucene.Net.Facet.Taxonomy
     /// </summary>
     public class AssociationFacetField : Field
     {
-
         /// <summary>
         /// Indexed <seealso cref="FieldType"/>. </summary>
         public static readonly FieldType TYPE = new FieldType();
@@ -87,5 +84,4 @@ namespace Lucene.Net.Facet.Taxonomy
             return "AssociationFacetField(dim=" + dim + " path=" + Arrays.ToString(path) + " bytes=" + assoc + ")";
         }
     }
-
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ae225b9e/src/Lucene.Net.Facet/Taxonomy/CachedOrdinalsReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/CachedOrdinalsReader.cs b/src/Lucene.Net.Facet/Taxonomy/CachedOrdinalsReader.cs
index 0471f65..70bbd26 100644
--- a/src/Lucene.Net.Facet/Taxonomy/CachedOrdinalsReader.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/CachedOrdinalsReader.cs
@@ -1,11 +1,10 @@
-\ufeffusing System;
+\ufeffusing Lucene.Net.Support;
+using System;
 using System.Collections.Generic;
 using System.Threading;
-using Lucene.Net.Support;
 
 namespace Lucene.Net.Facet.Taxonomy
 {
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -23,12 +22,11 @@ namespace Lucene.Net.Facet.Taxonomy
      * limitations under the License.
      */
 
-
-    using DocValuesFormat = Lucene.Net.Codecs.DocValuesFormat;
-    using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext;
     using BinaryDocValues = Lucene.Net.Index.BinaryDocValues;
     using Accountable = Lucene.Net.Util.Accountable;
     using ArrayUtil = Lucene.Net.Util.ArrayUtil;
+    using DocValuesFormat = Lucene.Net.Codecs.DocValuesFormat;
+    using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext;
     using IntsRef = Lucene.Net.Util.IntsRef;
     using RamUsageEstimator = Lucene.Net.Util.RamUsageEstimator;
 
@@ -62,7 +60,6 @@ namespace Lucene.Net.Facet.Taxonomy
     /// </summary>
     public class CachedOrdinalsReader : OrdinalsReader, Accountable
     {
-
         private readonly OrdinalsReader source;
 
         private readonly IDictionary<object, CachedOrds> ordsCache = new WeakDictionary<object, CachedOrds>();
@@ -128,7 +125,6 @@ namespace Lucene.Net.Facet.Taxonomy
         /// Holds the cached ordinals in two parallel {@code int[]} arrays. </summary>
         public sealed class CachedOrds : Accountable
         {
-
             /// <summary>
             /// Index into <seealso cref="#ordinals"/> for each document. </summary>
             public readonly int[] offsets;
@@ -204,5 +200,4 @@ namespace Lucene.Net.Facet.Taxonomy
             }
         }
     }
-
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ae225b9e/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs b/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs
index df69862..3709b58 100644
--- a/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs
@@ -1,11 +1,10 @@
-\ufeffusing System;
+\ufeffusing Lucene.Net.Support;
+using System;
 using System.Diagnostics;
 using System.Text;
-using Lucene.Net.Support;
 
 namespace Lucene.Net.Facet.Taxonomy
 {
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -23,7 +22,6 @@ namespace Lucene.Net.Facet.Taxonomy
      * limitations under the License.
      */
 
-
     /// <summary>
     /// Holds a sequence of string components, specifying the hierarchical name of a
     /// category.
@@ -32,7 +30,6 @@ namespace Lucene.Net.Facet.Taxonomy
     /// </summary>
     public class CategoryPath : IComparable<CategoryPath>
     {
-
         /// <summary>
         /// An empty <seealso cref="CategoryPath"/> </summary>
         public static readonly CategoryPath EMPTY = new CategoryPath();
@@ -154,7 +151,8 @@ namespace Lucene.Net.Facet.Taxonomy
 
         private void hasDelimiter(string offender, char delimiter)
         {
-            throw new System.ArgumentException("delimiter character '" + delimiter + "' (U+" + delimiter.ToString() + ") appears in path component \"" + offender + "\"");
+            throw new System.ArgumentException("delimiter character '" + delimiter + 
+                "' (U+" + delimiter.ToString() + ") appears in path component \"" + offender + "\"");
         }
 
         private void noDelimiter(char[] buf, int offset, int len, char delimiter)
@@ -310,7 +308,5 @@ namespace Lucene.Net.Facet.Taxonomy
             sb.Length = sb.Length - 1; // remove last delimiter
             return sb.ToString();
         }
-
     }
-
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ae225b9e/src/Lucene.Net.Facet/Taxonomy/Directory/Consts.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/Directory/Consts.cs b/src/Lucene.Net.Facet/Taxonomy/Directory/Consts.cs
index 5b69985..3660ff0 100644
--- a/src/Lucene.Net.Facet/Taxonomy/Directory/Consts.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/Directory/Consts.cs
@@ -1,8 +1,5 @@
 \ufeffnamespace Lucene.Net.Facet.Taxonomy.Directory
 {
-
-    using BytesRef = Lucene.Net.Util.BytesRef;
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -20,6 +17,8 @@
      * limitations under the License.
      */
 
+    using BytesRef = Lucene.Net.Util.BytesRef;
+
     /// <summary>
     /// @lucene.experimental
     /// </summary>
@@ -30,5 +29,4 @@
         internal const string PAYLOAD_PARENT = "p";
         internal static readonly BytesRef PAYLOAD_PARENT_BYTES_REF = new BytesRef(PAYLOAD_PARENT);
     }
-
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ae225b9e/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyReader.cs b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyReader.cs
index da82cbf..e540a00 100644
--- a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyReader.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyReader.cs
@@ -2,22 +2,9 @@
 using System.Collections.Generic;
 using System.IO;
 using System.Text;
-using Lucene.Net.Store;
 
 namespace Lucene.Net.Facet.Taxonomy.Directory
 {
-    using Document = Lucene.Net.Documents.Document;
-    using Lucene.Net.Facet.Taxonomy;
-    using CorruptIndexException = Lucene.Net.Index.CorruptIndexException; // javadocs
-    using DirectoryReader = Lucene.Net.Index.DirectoryReader;
-    using DocsEnum = Lucene.Net.Index.DocsEnum;
-    using IndexWriter = Lucene.Net.Index.IndexWriter;
-    using MultiFields = Lucene.Net.Index.MultiFields;
-    using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
-    using Directory = Lucene.Net.Store.Directory;
-    using BytesRef = Lucene.Net.Util.BytesRef;
-    using IOUtils = Lucene.Net.Util.IOUtils;
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -35,6 +22,17 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
      * limitations under the License.
      */
 
+    using Lucene.Net.Facet.Taxonomy;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Directory = Lucene.Net.Store.Directory;
+    using DirectoryReader = Lucene.Net.Index.DirectoryReader;
+    using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
+    using DocsEnum = Lucene.Net.Index.DocsEnum;
+    using Document = Lucene.Net.Documents.Document;
+    using IndexWriter = Lucene.Net.Index.IndexWriter;
+    using IOUtils = Lucene.Net.Util.IOUtils;
+    using MultiFields = Lucene.Net.Index.MultiFields;
+
     /// <summary>
     /// A <seealso cref="TaxonomyReader"/> which retrieves stored taxonomy information from a
     /// <seealso cref="Directory"/>.
@@ -49,7 +47,6 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
     /// </summary>
     public class DirectoryTaxonomyReader : TaxonomyReader, IDisposable
     {
-
         public class IntClass
         {
             public int? IntItem { get; set; }
@@ -71,7 +68,9 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
         /// recreated, you should pass {@code null} as the caches and parent/children
         /// arrays.
         /// </summary>
-        internal DirectoryTaxonomyReader(DirectoryReader indexReader, DirectoryTaxonomyWriter taxoWriter, LRUHashMap<FacetLabel, IntClass> ordinalCache, LRUHashMap<int, FacetLabel> categoryCache, TaxonomyIndexArrays taxoArrays)
+        internal DirectoryTaxonomyReader(DirectoryReader indexReader, DirectoryTaxonomyWriter taxoWriter, 
+            LRUHashMap<FacetLabel, IntClass> ordinalCache, LRUHashMap<int, FacetLabel> categoryCache, 
+            TaxonomyIndexArrays taxoArrays)
         {
             this.indexReader = indexReader;
             this.taxoWriter = taxoWriter;
@@ -439,5 +438,4 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
             Dispose(true);
         }
     }
-
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ae225b9e/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs
index 8ead122..c93d0e7 100644
--- a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs
@@ -1,50 +1,14 @@
-\ufeffusing System;
-using System.Collections;
-using System.Collections.Concurrent;
-using System.Diagnostics;
-using System.Collections.Generic;
-using System.IO;
-using Lucene.Net.Analysis.Tokenattributes;
+\ufeffusing Lucene.Net.Analysis.Tokenattributes;
 using Lucene.Net.Store;
 using Lucene.Net.Support;
 using Lucene.Net.Util;
+using System;
+using System.Collections.Generic;
+using System.Diagnostics;
+using System.IO;
 
 namespace Lucene.Net.Facet.Taxonomy.Directory
 {
-
-    using TokenStream = Lucene.Net.Analysis.TokenStream;
-    using CharTermAttribute = Lucene.Net.Analysis.Tokenattributes.CharTermAttribute;
-    using PositionIncrementAttribute = Lucene.Net.Analysis.Tokenattributes.PositionIncrementAttribute;
-    using Document = Lucene.Net.Documents.Document;
-    using Field = Lucene.Net.Documents.Field;
-    using FieldType = Lucene.Net.Documents.FieldType;
-    using StringField = Lucene.Net.Documents.StringField;
-    using TextField = Lucene.Net.Documents.TextField;
-    using TaxonomyWriterCache = Lucene.Net.Facet.Taxonomy.WriterCache.TaxonomyWriterCache;
-    using Cl2oTaxonomyWriterCache = Lucene.Net.Facet.Taxonomy.WriterCache.Cl2oTaxonomyWriterCache;
-    using LruTaxonomyWriterCache = Lucene.Net.Facet.Taxonomy.WriterCache.LruTaxonomyWriterCache;
-    using AtomicReader = Lucene.Net.Index.AtomicReader;
-    using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext;
-    using CorruptIndexException = Lucene.Net.Index.CorruptIndexException; // javadocs
-    using DirectoryReader = Lucene.Net.Index.DirectoryReader;
-    using DocsEnum = Lucene.Net.Index.DocsEnum;
-    using IndexReader = Lucene.Net.Index.IndexReader;
-    using IndexWriter = Lucene.Net.Index.IndexWriter;
-    using OpenMode = Lucene.Net.Index.IndexWriterConfig.OpenMode_e;
-    using IndexWriterConfig = Lucene.Net.Index.IndexWriterConfig;
-    using LogByteSizeMergePolicy = Lucene.Net.Index.LogByteSizeMergePolicy;
-    using ReaderManager = Lucene.Net.Index.ReaderManager;
-    using SegmentInfos = Lucene.Net.Index.SegmentInfos;
-    using Terms = Lucene.Net.Index.Terms;
-    using TermsEnum = Lucene.Net.Index.TermsEnum;
-    using TieredMergePolicy = Lucene.Net.Index.TieredMergePolicy;
-    using AlreadyClosedException = Lucene.Net.Store.AlreadyClosedException;
-    using Directory = Lucene.Net.Store.Directory;
-    using LockObtainFailedException = Lucene.Net.Store.LockObtainFailedException; // javadocs
-    using NativeFSLockFactory = Lucene.Net.Store.NativeFSLockFactory;
-    using SimpleFSLockFactory = Lucene.Net.Store.SimpleFSLockFactory;
-    using BytesRef = Lucene.Net.Util.BytesRef;
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -62,6 +26,33 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
      * limitations under the License.
      */
 
+    using AlreadyClosedException = Lucene.Net.Store.AlreadyClosedException;
+    using AtomicReader = Lucene.Net.Index.AtomicReader;
+    using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext;
+    using LockObtainFailedException = Lucene.Net.Store.LockObtainFailedException; // javadocs
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Cl2oTaxonomyWriterCache = Lucene.Net.Facet.Taxonomy.WriterCache.Cl2oTaxonomyWriterCache;
+    using Directory = Lucene.Net.Store.Directory;
+    using CorruptIndexException = Lucene.Net.Index.CorruptIndexException; // javadocs
+    using DirectoryReader = Lucene.Net.Index.DirectoryReader;
+    using DocsEnum = Lucene.Net.Index.DocsEnum;
+    using Document = Lucene.Net.Documents.Document;
+    using Field = Lucene.Net.Documents.Field;
+    using FieldType = Lucene.Net.Documents.FieldType;
+    using IndexWriter = Lucene.Net.Index.IndexWriter;
+    using IndexWriterConfig = Lucene.Net.Index.IndexWriterConfig;
+    using LogByteSizeMergePolicy = Lucene.Net.Index.LogByteSizeMergePolicy;
+    using OpenMode = Lucene.Net.Index.IndexWriterConfig.OpenMode_e;
+    using ReaderManager = Lucene.Net.Index.ReaderManager;
+    using SegmentInfos = Lucene.Net.Index.SegmentInfos;
+    using StringField = Lucene.Net.Documents.StringField;
+    using TaxonomyWriterCache = Lucene.Net.Facet.Taxonomy.WriterCache.TaxonomyWriterCache;
+    using Terms = Lucene.Net.Index.Terms;
+    using TermsEnum = Lucene.Net.Index.TermsEnum;
+    using TextField = Lucene.Net.Documents.TextField;
+    using TieredMergePolicy = Lucene.Net.Index.TieredMergePolicy;
+    using TokenStream = Lucene.Net.Analysis.TokenStream;
+
     /// <summary>
     /// <seealso cref="TaxonomyWriter"/> which uses a <seealso cref="Directory"/> to store the taxonomy
     /// information on disk, and keeps an additional in-memory cache of some or all
@@ -83,7 +74,6 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
     /// </summary>
     public class DirectoryTaxonomyWriter : TaxonomyWriter
     {
-
         /// <summary>
         /// Property name of user commit data that contains the index epoch. The epoch
         /// changes whenever the taxonomy is recreated (i.e. opened with
@@ -184,9 +174,9 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
         ///     removed using <seealso cref="#unlock(Directory)"/>. </exception>
         /// <exception cref="IOException">
         ///     if another error occurred. </exception>
-        public DirectoryTaxonomyWriter(Directory directory, OpenMode openMode, TaxonomyWriterCache cache)
+        public DirectoryTaxonomyWriter(Directory directory, OpenMode openMode, 
+            TaxonomyWriterCache cache)
         {
-
             dir = directory;
             IndexWriterConfig config = CreateIndexWriterConfig(openMode);
             indexWriter = OpenIndexWriter(dir, config);
@@ -1198,5 +1188,4 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
             }
         }
     }
-
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ae225b9e/src/Lucene.Net.Facet/Taxonomy/Directory/TaxonomyIndexArrays.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/Directory/TaxonomyIndexArrays.cs b/src/Lucene.Net.Facet/Taxonomy/Directory/TaxonomyIndexArrays.cs
index 9a99f4a..2319550 100644
--- a/src/Lucene.Net.Facet/Taxonomy/Directory/TaxonomyIndexArrays.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/Directory/TaxonomyIndexArrays.cs
@@ -3,14 +3,6 @@ using System.Diagnostics;
 
 namespace Lucene.Net.Facet.Taxonomy.Directory
 {
-
-    using CorruptIndexException = Lucene.Net.Index.CorruptIndexException;
-    using DocsAndPositionsEnum = Lucene.Net.Index.DocsAndPositionsEnum;
-    using IndexReader = Lucene.Net.Index.IndexReader;
-    using MultiFields = Lucene.Net.Index.MultiFields;
-    using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
-    using ArrayUtil = Lucene.Net.Util.ArrayUtil;
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -28,6 +20,13 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
      * limitations under the License.
      */
 
+    using ArrayUtil = Lucene.Net.Util.ArrayUtil;
+    using CorruptIndexException = Lucene.Net.Index.CorruptIndexException;
+    using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
+    using DocsAndPositionsEnum = Lucene.Net.Index.DocsAndPositionsEnum;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using MultiFields = Lucene.Net.Index.MultiFields;
+
     /// <summary>
     /// A <seealso cref="ParallelTaxonomyArrays"/> that are initialized from the taxonomy
     /// index.
@@ -36,7 +35,6 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
     /// </summary>
     internal class TaxonomyIndexArrays : ParallelTaxonomyArrays
     {
-
         private readonly int[] parents_Renamed;
 
         // the following two arrays are lazily intialized. note that we only keep a
@@ -246,7 +244,5 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
             // the array is guaranteed to be populated
             return siblings_Renamed;
         }
-
     }
-
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ae225b9e/src/Lucene.Net.Facet/Taxonomy/DocValuesOrdinalsReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/DocValuesOrdinalsReader.cs b/src/Lucene.Net.Facet/Taxonomy/DocValuesOrdinalsReader.cs
index 3d50275..3d5cc53 100644
--- a/src/Lucene.Net.Facet/Taxonomy/DocValuesOrdinalsReader.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/DocValuesOrdinalsReader.cs
@@ -1,6 +1,5 @@
 \ufeffnamespace Lucene.Net.Facet.Taxonomy
 {
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -18,11 +17,11 @@
      * limitations under the License.
      */
 
+    using ArrayUtil = Lucene.Net.Util.ArrayUtil;
     using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext;
     using BinaryDocValues = Lucene.Net.Index.BinaryDocValues;
-    using DocValues = Lucene.Net.Index.DocValues;
-    using ArrayUtil = Lucene.Net.Util.ArrayUtil;
     using BytesRef = Lucene.Net.Util.BytesRef;
+    using DocValues = Lucene.Net.Index.DocValues;
     using IntsRef = Lucene.Net.Util.IntsRef;
 
     /// <summary>
@@ -126,5 +125,4 @@
             }
         }
     }
-
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ae225b9e/src/Lucene.Net.Facet/Taxonomy/FacetLabel.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/FacetLabel.cs b/src/Lucene.Net.Facet/Taxonomy/FacetLabel.cs
index 9a6884c..94eb2d6 100644
--- a/src/Lucene.Net.Facet/Taxonomy/FacetLabel.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/FacetLabel.cs
@@ -1,10 +1,9 @@
-\ufeffusing System;
+\ufeffusing Lucene.Net.Support;
+using System;
 using System.Diagnostics;
-using Lucene.Net.Support;
 
 namespace Lucene.Net.Facet.Taxonomy
 {
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -220,5 +219,4 @@ namespace Lucene.Net.Facet.Taxonomy
             return "FacetLabel: [" + Arrays.ToString(parts) + "]";
         }
     }
-
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ae225b9e/src/Lucene.Net.Facet/Taxonomy/FastTaxonomyFacetCounts.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/FastTaxonomyFacetCounts.cs b/src/Lucene.Net.Facet/Taxonomy/FastTaxonomyFacetCounts.cs
index f0d5fa0..f54b70b 100644
--- a/src/Lucene.Net.Facet/Taxonomy/FastTaxonomyFacetCounts.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/FastTaxonomyFacetCounts.cs
@@ -1,10 +1,7 @@
 \ufeffusing System.Collections.Generic;
-using Lucene.Net.Facet;
-using Lucene.Net.Search;
 
 namespace Lucene.Net.Facet.Taxonomy
 {
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -22,11 +19,10 @@ namespace Lucene.Net.Facet.Taxonomy
      * limitations under the License.
      */
 
-
     using MatchingDocs = FacetsCollector.MatchingDocs;
     using BinaryDocValues = Lucene.Net.Index.BinaryDocValues;
-    using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
     using BytesRef = Lucene.Net.Util.BytesRef;
+    using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
 
     /// <summary>
     /// Computes facets counts, assuming the default encoding
@@ -36,7 +32,6 @@ namespace Lucene.Net.Facet.Taxonomy
     /// </summary>
     public class FastTaxonomyFacetCounts : IntTaxonomyFacets
     {
-
         /// <summary>
         /// Create {@code FastTaxonomyFacetCounts}, which also
         ///  counts all facet labels. 
@@ -75,7 +70,7 @@ namespace Lucene.Net.Facet.Taxonomy
                 BytesRef bytesRef = new BytesRef();
                 while ((doc = docs.NextDoc()) != DocIdSetIterator.NO_MORE_DOCS)
                 {
-                    dv.Get(doc,bytesRef);
+                    dv.Get(doc, bytesRef);
                     var bytes = bytesRef.Bytes;
                     int end = bytesRef.Offset + bytesRef.Length;
                     int ord = 0;
@@ -101,5 +96,4 @@ namespace Lucene.Net.Facet.Taxonomy
             Rollup();
         }
     }
-
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ae225b9e/src/Lucene.Net.Facet/Taxonomy/FloatAssociationFacetField.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/FloatAssociationFacetField.cs b/src/Lucene.Net.Facet/Taxonomy/FloatAssociationFacetField.cs
index 54e8ef2..a1e6c88 100644
--- a/src/Lucene.Net.Facet/Taxonomy/FloatAssociationFacetField.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/FloatAssociationFacetField.cs
@@ -3,8 +3,7 @@ using System.Globalization;
 
 namespace Lucene.Net.Facet.Taxonomy
 {
-
-	/*
+    /*
 	 * Licensed to the Apache Software Foundation (ASF) under one or more
 	 * contributor license agreements.  See the NOTICE file distributed with
 	 * this work for additional information regarding copyright ownership.
@@ -21,46 +20,46 @@ namespace Lucene.Net.Facet.Taxonomy
 	 * limitations under the License.
 	 */
 
-	using Document = Lucene.Net.Documents.Document;
-	using BytesRef = Lucene.Net.Util.BytesRef;
-
-	/// <summary>
-	/// Add an instance of this to your <seealso cref="Document"/> to add
-	///  a facet label associated with a float.  Use {@link
-	///  TaxonomyFacetSumFloatAssociations} to aggregate float values
-	///  per facet label at search time.
-	/// 
-	///  @lucene.experimental 
-	/// </summary>
-	public class FloatAssociationFacetField : AssociationFacetField
-	{
-
-	  /// <summary>
-	  /// Creates this from {@code dim} and {@code path} and a
-	  ///  float association 
-	  /// </summary>
-	  public FloatAssociationFacetField(float assoc, string dim, params string[] path) : base(floatToBytesRef(assoc), dim, path)
-	  {
-	  }
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Document = Lucene.Net.Documents.Document;
 
-	  /// <summary>
-	  /// Encodes a {@code float} as a 4-byte <seealso cref="BytesRef"/>. </summary>
-	  public static BytesRef floatToBytesRef(float v)
-	  {
-		return IntAssociationFacetField.intToBytesRef(Number.FloatToIntBits(v));
-	  }
+    /// <summary>
+    /// Add an instance of this to your <seealso cref="Document"/> to add
+    ///  a facet label associated with a float.  Use {@link
+    ///  TaxonomyFacetSumFloatAssociations} to aggregate float values
+    ///  per facet label at search time.
+    /// 
+    ///  @lucene.experimental 
+    /// </summary>
+    public class FloatAssociationFacetField : AssociationFacetField
+    {
+        /// <summary>
+        /// Creates this from {@code dim} and {@code path} and a
+        ///  float association 
+        /// </summary>
+        public FloatAssociationFacetField(float assoc, string dim, params string[] path) 
+            : base(floatToBytesRef(assoc), dim, path)
+        {
+        }
 
-	  /// <summary>
-	  /// Decodes a previously encoded {@code float}. </summary>
-	  public static float bytesRefToFloat(BytesRef b)
-	  {
-		return Number.IntBitsToFloat(IntAssociationFacetField.bytesRefToInt(b));
-	  }
+        /// <summary>
+        /// Encodes a {@code float} as a 4-byte <seealso cref="BytesRef"/>. </summary>
+        public static BytesRef floatToBytesRef(float v)
+        {
+            return IntAssociationFacetField.intToBytesRef(Number.FloatToIntBits(v));
+        }
 
-	  public override string ToString()
-	  {
-		return "FloatAssociationFacetField(dim=" + dim + " path=" + Arrays.ToString(path) + " value=" + bytesRefToFloat(assoc).ToString("0.0#####", CultureInfo.InvariantCulture) + ")";
-	  }
-	}
+        /// <summary>
+        /// Decodes a previously encoded {@code float}. </summary>
+        public static float bytesRefToFloat(BytesRef b)
+        {
+            return Number.IntBitsToFloat(IntAssociationFacetField.bytesRefToInt(b));
+        }
 
+        public override string ToString()
+        {
+            return "FloatAssociationFacetField(dim=" + dim + " path=" + Arrays.ToString(path) + 
+                " value=" + bytesRefToFloat(assoc).ToString("0.0#####", CultureInfo.InvariantCulture) + ")";
+        }
+    }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ae225b9e/src/Lucene.Net.Facet/Taxonomy/FloatTaxonomyFacets.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/FloatTaxonomyFacets.cs b/src/Lucene.Net.Facet/Taxonomy/FloatTaxonomyFacets.cs
index 3921540..e0b3688 100644
--- a/src/Lucene.Net.Facet/Taxonomy/FloatTaxonomyFacets.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/FloatTaxonomyFacets.cs
@@ -1,10 +1,9 @@
 \ufeffusing System;
-using System.Diagnostics;
 using System.Collections.Generic;
+using System.Diagnostics;
 
 namespace Lucene.Net.Facet.Taxonomy
 {
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -30,7 +29,6 @@ namespace Lucene.Net.Facet.Taxonomy
     /// </summary>
     public abstract class FloatTaxonomyFacets : TaxonomyFacets
     {
-
         /// <summary>
         /// Per-ordinal value. </summary>
         protected readonly float[] values;

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ae225b9e/src/Lucene.Net.Facet/Taxonomy/IntAssociationFacetField.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/IntAssociationFacetField.cs b/src/Lucene.Net.Facet/Taxonomy/IntAssociationFacetField.cs
index 61d8177..417fbd6 100644
--- a/src/Lucene.Net.Facet/Taxonomy/IntAssociationFacetField.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/IntAssociationFacetField.cs
@@ -2,7 +2,6 @@
 
 namespace Lucene.Net.Facet.Taxonomy
 {
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -20,8 +19,8 @@ namespace Lucene.Net.Facet.Taxonomy
      * limitations under the License.
      */
 
-    using Document = Lucene.Net.Documents.Document;
     using BytesRef = Lucene.Net.Util.BytesRef;
+    using Document = Lucene.Net.Documents.Document;
 
     /// <summary>
     /// Add an instance of this to your <seealso cref="Document"/> to add
@@ -33,7 +32,6 @@ namespace Lucene.Net.Facet.Taxonomy
     /// </summary>
     public class IntAssociationFacetField : AssociationFacetField
     {
-
         /// <summary>
         /// Creates this from {@code dim} and {@code path} and an
         ///  int association 
@@ -49,7 +47,7 @@ namespace Lucene.Net.Facet.Taxonomy
         /// </summary>
         public static BytesRef intToBytesRef(int v)
         {
-            
+
             byte[] bytes = new byte[4];
             // big-endian:
             bytes[0] = (byte)(v >> 24);
@@ -63,7 +61,8 @@ namespace Lucene.Net.Facet.Taxonomy
         /// Decodes a previously encoded {@code int}. </summary>
         public static int bytesRefToInt(BytesRef b)
         {
-            return ((b.Bytes[b.Offset] & 0xFF) << 24) | ((b.Bytes[b.Offset + 1] & 0xFF) << 16) | ((b.Bytes[b.Offset + 2] & 0xFF) << 8) | (b.Bytes[b.Offset + 3] & 0xFF);
+            return ((b.Bytes[b.Offset] & 0xFF) << 24) | ((b.Bytes[b.Offset + 1] & 0xFF) << 16) | 
+                ((b.Bytes[b.Offset + 2] & 0xFF) << 8) | (b.Bytes[b.Offset + 3] & 0xFF);
         }
 
         public override string ToString()
@@ -71,5 +70,4 @@ namespace Lucene.Net.Facet.Taxonomy
             return "IntAssociationFacetField(dim=" + dim + " path=" + Arrays.ToString(path) + " value=" + bytesRefToInt(assoc) + ")";
         }
     }
-
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ae225b9e/src/Lucene.Net.Facet/Taxonomy/IntTaxonomyFacets.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/IntTaxonomyFacets.cs b/src/Lucene.Net.Facet/Taxonomy/IntTaxonomyFacets.cs
index f73f167..11f76f0 100644
--- a/src/Lucene.Net.Facet/Taxonomy/IntTaxonomyFacets.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/IntTaxonomyFacets.cs
@@ -3,7 +3,6 @@ using System.Collections.Generic;
 
 namespace Lucene.Net.Facet.Taxonomy
 {
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -21,7 +20,6 @@ namespace Lucene.Net.Facet.Taxonomy
      * limitations under the License.
      */
 
-
     using DimConfig = Lucene.Net.Facet.FacetsConfig.DimConfig;
 
     /// <summary>
@@ -31,7 +29,6 @@ namespace Lucene.Net.Facet.Taxonomy
 
     public abstract class IntTaxonomyFacets : TaxonomyFacets
     {
-
         /// <summary>
         /// Per-ordinal value. </summary>
         protected internal readonly int[] values;

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ae225b9e/src/Lucene.Net.Facet/Taxonomy/OrdinalsReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/OrdinalsReader.cs b/src/Lucene.Net.Facet/Taxonomy/OrdinalsReader.cs
index 544a1ef..c8e4843 100644
--- a/src/Lucene.Net.Facet/Taxonomy/OrdinalsReader.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/OrdinalsReader.cs
@@ -1,6 +1,5 @@
 \ufeffnamespace Lucene.Net.Facet.Taxonomy
 {
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -27,7 +26,6 @@
 
     public abstract class OrdinalsReader
     {
-
         /// <summary>
         /// Returns ordinals for documents in one segment. </summary>
         public abstract class OrdinalsSegmentReader
@@ -64,5 +62,4 @@
         /// </summary>
         public abstract string IndexFieldName { get; }
     }
-
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ae225b9e/src/Lucene.Net.Facet/Taxonomy/ParallelTaxonomyArrays.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/ParallelTaxonomyArrays.cs b/src/Lucene.Net.Facet/Taxonomy/ParallelTaxonomyArrays.cs
index 26f1d8a..60e351e 100644
--- a/src/Lucene.Net.Facet/Taxonomy/ParallelTaxonomyArrays.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/ParallelTaxonomyArrays.cs
@@ -1,8 +1,6 @@
 \ufeffnamespace Lucene.Net.Facet.Taxonomy
 {
-
-
-	/*
+    /*
 	 * Licensed to the Apache Software Foundation (ASF) under one or more
 	 * contributor license agreements.  See the NOTICE file distributed with
 	 * this work for additional information regarding copyright ownership.
@@ -19,56 +17,53 @@
 	 * limitations under the License.
 	 */
 
-	/// <summary>
-	/// Returns 3 arrays for traversing the taxonomy:
-	/// <ul>
-	/// <li>{@code parents}: {@code parents[i]} denotes the parent of category
-	/// ordinal {@code i}.</li>
-	/// <li>{@code children}: {@code children[i]} denotes a child of category ordinal
-	/// {@code i}.</li>
-	/// <li>{@code siblings}: {@code siblings[i]} denotes the sibling of category
-	/// ordinal {@code i}.</li>
-	/// </ul>
-	/// 
-	/// To traverse the taxonomy tree, you typically start with {@code children[0]}
-	/// (ordinal 0 is reserved for ROOT), and then depends if you want to do DFS or
-	/// BFS, you call {@code children[children[0]]} or {@code siblings[children[0]]}
-	/// and so forth, respectively.
-	/// 
-	/// <para>
-	/// <b>NOTE:</b> you are not expected to modify the values of the arrays, since
-	/// the arrays are shared with other threads.
-	/// 
-	/// @lucene.experimental
-	/// </para>
-	/// </summary>
-	public abstract class ParallelTaxonomyArrays
-	{
-
-	  /// <summary>
-	  /// Sole constructor. </summary>
-	  public ParallelTaxonomyArrays()
-	  {
-	  }
-
-	  /// <summary>
-	  /// Returns the parents array, where {@code parents[i]} denotes the parent of
-	  /// category ordinal {@code i}.
-	  /// </summary>
-	  public abstract int[] Parents();
-
-	  /// <summary>
-	  /// Returns the children array, where {@code children[i]} denotes a child of
-	  /// category ordinal {@code i}.
-	  /// </summary>
-	  public abstract int[] Children();
+    /// <summary>
+    /// Returns 3 arrays for traversing the taxonomy:
+    /// <ul>
+    /// <li>{@code parents}: {@code parents[i]} denotes the parent of category
+    /// ordinal {@code i}.</li>
+    /// <li>{@code children}: {@code children[i]} denotes a child of category ordinal
+    /// {@code i}.</li>
+    /// <li>{@code siblings}: {@code siblings[i]} denotes the sibling of category
+    /// ordinal {@code i}.</li>
+    /// </ul>
+    /// 
+    /// To traverse the taxonomy tree, you typically start with {@code children[0]}
+    /// (ordinal 0 is reserved for ROOT), and then depends if you want to do DFS or
+    /// BFS, you call {@code children[children[0]]} or {@code siblings[children[0]]}
+    /// and so forth, respectively.
+    /// 
+    /// <para>
+    /// <b>NOTE:</b> you are not expected to modify the values of the arrays, since
+    /// the arrays are shared with other threads.
+    /// 
+    /// @lucene.experimental
+    /// </para>
+    /// </summary>
+    public abstract class ParallelTaxonomyArrays
+    {
+        /// <summary>
+        /// Sole constructor. </summary>
+        public ParallelTaxonomyArrays()
+        {
+        }
 
-	  /// <summary>
-	  /// Returns the siblings array, where {@code siblings[i]} denotes the sibling
-	  /// of category ordinal {@code i}.
-	  /// </summary>
-	  public abstract int[] Siblings();
+        /// <summary>
+        /// Returns the parents array, where {@code parents[i]} denotes the parent of
+        /// category ordinal {@code i}.
+        /// </summary>
+        public abstract int[] Parents();
 
-	}
+        /// <summary>
+        /// Returns the children array, where {@code children[i]} denotes a child of
+        /// category ordinal {@code i}.
+        /// </summary>
+        public abstract int[] Children();
 
+        /// <summary>
+        /// Returns the siblings array, where {@code siblings[i]} denotes the sibling
+        /// of category ordinal {@code i}.
+        /// </summary>
+        public abstract int[] Siblings();
+    }
 }
\ No newline at end of file


[22/46] lucenenet git commit: Fixed string CompareTo to match Java

Posted by sy...@apache.org.
Fixed string CompareTo to match Java


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/e76ee907
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/e76ee907
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/e76ee907

Branch: refs/heads/master
Commit: e76ee907c4302bc1765847892ca790153dd845fe
Parents: 8d23d13
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Sep 25 14:17:24 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Mon Oct 3 23:31:23 2016 +0700

----------------------------------------------------------------------
 src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetCounts.cs | 5 +++--
 src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs                   | 2 +-
 src/Lucene.Net.Facet/Taxonomy/FacetLabel.cs                     | 2 +-
 src/Lucene.Net.Facet/Taxonomy/TaxonomyFacets.cs                 | 5 +++--
 4 files changed, 8 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e76ee907/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetCounts.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetCounts.cs b/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetCounts.cs
index 057d274..95ee9d1 100644
--- a/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetCounts.cs
+++ b/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetCounts.cs
@@ -1,4 +1,5 @@
-\ufeffusing System;
+\ufeffusing Lucene.Net.Support;
+using System;
 using System.Collections.Generic;
 using System.Linq;
 
@@ -329,7 +330,7 @@ namespace Lucene.Net.Facet.SortedSet
                 }
                 else
                 {
-                    return a.Dim.CompareTo(b.Dim);
+                    return a.Dim.CompareToOrdinal(b.Dim);
                 }
             }
         }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e76ee907/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs b/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs
index f168e87..ae177b6 100644
--- a/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs
@@ -134,7 +134,7 @@ namespace Lucene.Net.Facet.Taxonomy
             int len = Length < other.Length ? Length : other.Length;
             for (int i = 0, j = 0; i < len; i++, j++)
             {
-                int cmp = Components[i].CompareTo(other.Components[j]);
+                int cmp = Components[i].CompareToOrdinal(other.Components[j]);
                 if (cmp < 0) // this is 'before'
                 {
                     return -1;

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e76ee907/src/Lucene.Net.Facet/Taxonomy/FacetLabel.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/FacetLabel.cs b/src/Lucene.Net.Facet/Taxonomy/FacetLabel.cs
index 26dc493..edc68c0 100644
--- a/src/Lucene.Net.Facet/Taxonomy/FacetLabel.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/FacetLabel.cs
@@ -114,7 +114,7 @@ namespace Lucene.Net.Facet.Taxonomy
             int len = Length < other.Length ? Length : other.Length;
             for (int i = 0, j = 0; i < len; i++, j++)
             {
-                int cmp = Components[i].CompareTo(other.Components[j]);
+                int cmp = Components[i].CompareToOrdinal(other.Components[j]);
                 if (cmp < 0)
                 {
                     return -1; // this is 'before'

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e76ee907/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacets.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacets.cs b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacets.cs
index c0ae758..1a53994 100644
--- a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacets.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacets.cs
@@ -1,4 +1,5 @@
-\ufeffusing System;
+\ufeffusing Lucene.Net.Support;
+using System;
 using System.Collections.Generic;
 using System.Linq;
 
@@ -47,7 +48,7 @@ namespace Lucene.Net.Facet.Taxonomy
                 }
                 else
                 {
-                    return a.Dim.CompareTo(b.Dim);
+                    return a.Dim.CompareToOrdinal(b.Dim);
                 }
             }
         }


[08/46] lucenenet git commit: Fixed sorting bugs in Facet.FacetTestCase. Need to sort the list that was passed into the method, not create a new one.

Posted by sy...@apache.org.
Fixed sorting bugs in Facet.FacetTestCase. Need to sort the list that was passed into the method, not create a new one.


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/44f7d9c8
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/44f7d9c8
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/44f7d9c8

Branch: refs/heads/master
Commit: 44f7d9c833ae4695bc643041d6e2c9083a721fb6
Parents: 4b87de0
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sat Sep 24 20:09:24 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Mon Oct 3 23:30:49 2016 +0700

----------------------------------------------------------------------
 src/Lucene.Net.Tests.Facet/FacetTestCase.cs             | 12 ++++--------
 .../SortedSet/TestSortedSetDocValuesFacets.cs           |  6 +++---
 .../Taxonomy/TestTaxonomyFacetCounts.cs                 |  4 ++--
 .../Taxonomy/TestTaxonomyFacetSumValueSource.cs         |  4 ++--
 4 files changed, 11 insertions(+), 15 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/44f7d9c8/src/Lucene.Net.Tests.Facet/FacetTestCase.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Facet/FacetTestCase.cs b/src/Lucene.Net.Tests.Facet/FacetTestCase.cs
index 977d13b..0c71f32 100644
--- a/src/Lucene.Net.Tests.Facet/FacetTestCase.cs
+++ b/src/Lucene.Net.Tests.Facet/FacetTestCase.cs
@@ -182,11 +182,9 @@ namespace Lucene.Net.Facet
             }
         }
 
-        protected internal virtual void SortLabelValues(IList<LabelAndValue> labelValues)
+        protected internal virtual void SortLabelValues(List<LabelAndValue> labelValues)
         {
-            var resArray = labelValues.ToArray();
-            Array.Sort(resArray,new ComparatorAnonymousInnerClassHelper2(this));
-            labelValues = resArray.ToList();
+            labelValues.Sort(new ComparatorAnonymousInnerClassHelper2(this));
         }
 
         private class ComparatorAnonymousInnerClassHelper2 : IComparer<LabelAndValue>
@@ -215,11 +213,9 @@ namespace Lucene.Net.Facet
             }
         }
 
-        protected internal virtual void SortFacetResults(IList<FacetResult> results)
+        protected internal virtual void SortFacetResults(List<FacetResult> results)
         {
-            var resArray = results.ToArray();
-            Array.Sort(resArray, new ComparatorAnonymousInnerClassHelper3(this));
-            results = resArray.ToList();
+            results.Sort(new ComparatorAnonymousInnerClassHelper3(this));
         }
 
         private class ComparatorAnonymousInnerClassHelper3 : IComparer<FacetResult>

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/44f7d9c8/src/Lucene.Net.Tests.Facet/SortedSet/TestSortedSetDocValuesFacets.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Facet/SortedSet/TestSortedSetDocValuesFacets.cs b/src/Lucene.Net.Tests.Facet/SortedSet/TestSortedSetDocValuesFacets.cs
index 75cf9d3..fe385d1 100644
--- a/src/Lucene.Net.Tests.Facet/SortedSet/TestSortedSetDocValuesFacets.cs
+++ b/src/Lucene.Net.Tests.Facet/SortedSet/TestSortedSetDocValuesFacets.cs
@@ -359,10 +359,10 @@ namespace Lucene.Net.Facet.SortedSet
                     }
                 }
 
-                IList<FacetResult> expected = new List<FacetResult>();
+                List<FacetResult> expected = new List<FacetResult>();
                 for (int i = 0; i < numDims; i++)
                 {
-                    IList<LabelAndValue> labelValues = new List<LabelAndValue>();
+                    List<LabelAndValue> labelValues = new List<LabelAndValue>();
                     int totCount = 0;
                     foreach (KeyValuePair<string, int?> ent in expectedCounts[i])
                     {
@@ -384,7 +384,7 @@ namespace Lucene.Net.Facet.SortedSet
                 // Messy: fixup ties
                 //sortTies(actual);
 
-                Assert.AreEqual(expected, actual);
+                CollectionAssert.AreEqual(expected, actual);
             }
 
             IOUtils.Close(w, searcher.IndexReader, indexDir, taxoDir);

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/44f7d9c8/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetCounts.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetCounts.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetCounts.cs
index 72390e2..e4ddc51 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetCounts.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetCounts.cs
@@ -816,10 +816,10 @@ namespace Lucene.Net.Facet.Taxonomy
                     }
                 }
 
-                IList<FacetResult> expected = new List<FacetResult>();
+                List<FacetResult> expected = new List<FacetResult>();
                 for (int i = 0; i < numDims; i++)
                 {
-                    IList<LabelAndValue> labelValues = new List<LabelAndValue>();
+                    List<LabelAndValue> labelValues = new List<LabelAndValue>();
                     int totCount = 0;
                     foreach (KeyValuePair<string, int?> ent in expectedCounts[i])
                     {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/44f7d9c8/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetSumValueSource.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetSumValueSource.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetSumValueSource.cs
index e05daa9..d8edbb5 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetSumValueSource.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetSumValueSource.cs
@@ -569,10 +569,10 @@ namespace Lucene.Net.Facet.Taxonomy
                     }
                 }
 
-                IList<FacetResult> expected = new List<FacetResult>();
+                List<FacetResult> expected = new List<FacetResult>();
                 for (int i = 0; i < numDims; i++)
                 {
-                    IList<LabelAndValue> labelValues = new List<LabelAndValue>();
+                    List<LabelAndValue> labelValues = new List<LabelAndValue>();
                     float totValue = 0;
                     foreach (KeyValuePair<string, float?> ent in expectedValues[i])
                     {


[31/46] lucenenet git commit: Changed Facet.Taxonomy.WriterCache.LabelToOrdinal.NextOrdinal back to a method (GetNextOrdinal()) because it is not deterministic.

Posted by sy...@apache.org.
Changed Facet.Taxonomy.WriterCache.LabelToOrdinal.NextOrdinal back to a method (GetNextOrdinal()) because it is not deterministic.


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/03d05b34
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/03d05b34
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/03d05b34

Branch: refs/heads/master
Commit: 03d05b34e29a988a7baa941c3eca01bfd988724e
Parents: c083a05
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Sep 25 15:50:46 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Mon Oct 3 23:31:48 2016 +0700

----------------------------------------------------------------------
 src/Lucene.Net.Facet/Taxonomy/WriterCache/LabelToOrdinal.cs | 7 ++-----
 1 file changed, 2 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/03d05b34/src/Lucene.Net.Facet/Taxonomy/WriterCache/LabelToOrdinal.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/LabelToOrdinal.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/LabelToOrdinal.cs
index 600ecbb..c9b36d6 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/LabelToOrdinal.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/LabelToOrdinal.cs
@@ -55,12 +55,9 @@
         /// Returns the next unassigned ordinal. The default behavior of this method
         /// is to simply increment a counter.
         /// </summary>
-        public virtual int NextOrdinal
+        public virtual int GetNextOrdinal()
         {
-            get
-            {
-                return this.counter++;
-            }
+            return this.counter++;
         }
 
         /// <summary>


[41/46] lucenenet git commit: Facet: Updated documentation

Posted by sy...@apache.org.
Facet: Updated documentation


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/44958102
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/44958102
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/44958102

Branch: refs/heads/master
Commit: 44958102853fca1051996bfe6cd42b7b1c194c56
Parents: 8cbc492
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Sep 25 23:32:33 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Mon Oct 3 23:32:10 2016 +0700

----------------------------------------------------------------------
 src/Lucene.Net.Facet/DrillDownQuery.cs          |  50 ++++---
 src/Lucene.Net.Facet/DrillSideways.cs           |  69 +++++----
 src/Lucene.Net.Facet/DrillSidewaysQuery.cs      |   3 +-
 src/Lucene.Net.Facet/DrillSidewaysScorer.cs     |  10 +-
 src/Lucene.Net.Facet/FacetField.cs              |  16 +-
 src/Lucene.Net.Facet/FacetResult.cs             |  19 ++-
 src/Lucene.Net.Facet/Facets.cs                  |  19 +--
 src/Lucene.Net.Facet/FacetsCollector.cs         |  60 ++++----
 src/Lucene.Net.Facet/FacetsConfig.cs            |  91 ++++++------
 src/Lucene.Net.Facet/LabelAndValue.cs           |  10 +-
 src/Lucene.Net.Facet/Lucene.Net.Facet.csproj    |   4 +
 src/Lucene.Net.Facet/MultiFacets.cs             |   8 +-
 .../RandomSamplingFacetsCollector.cs            |  47 +++---
 .../Range/DoubleRangeFacetCounts.cs             |  36 ++---
 src/Lucene.Net.Facet/Range/LongRangeCounter.cs  |  17 ++-
 .../Range/LongRangeFacetCounts.cs               |  32 ++--
 src/Lucene.Net.Facet/Range/Range.cs             |  41 ++---
 src/Lucene.Net.Facet/Range/RangeFacetCounts.cs  |   8 +-
 .../DefaultSortedSetDocValuesReaderState.cs     |  21 ++-
 .../SortedSet/SortedSetDocValuesFacetCounts.cs  |  24 +--
 .../SortedSet/SortedSetDocValuesFacetField.cs   |   6 +-
 .../SortedSet/SortedSetDocValuesReaderState.cs  |  36 ++---
 .../Taxonomy/AssociationFacetField.cs           |  29 ++--
 .../Taxonomy/CachedOrdinalsReader.cs            |  21 +--
 src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs   |  37 +++--
 .../Directory/DirectoryTaxonomyReader.cs        |  51 +++----
 .../Directory/DirectoryTaxonomyWriter.cs        | 148 ++++++++++---------
 .../Taxonomy/Directory/TaxonomyIndexArrays.cs   |  25 ++--
 .../Taxonomy/DocValuesOrdinalsReader.cs         |  13 +-
 src/Lucene.Net.Facet/Taxonomy/FacetLabel.cs     |  37 ++---
 .../Taxonomy/FastTaxonomyFacetCounts.cs         |  16 +-
 .../Taxonomy/FloatAssociationFacetField.cs      |  17 ++-
 .../Taxonomy/FloatTaxonomyFacets.cs             |   8 +-
 .../Taxonomy/IntAssociationFacetField.cs        |  18 +--
 .../Taxonomy/IntTaxonomyFacets.cs               |   8 +-
 src/Lucene.Net.Facet/Taxonomy/LRUHashMap.cs     |  41 +++--
 src/Lucene.Net.Facet/Taxonomy/OrdinalsReader.cs |  12 +-
 .../Taxonomy/ParallelTaxonomyArrays.cs          |  35 ++---
 .../Taxonomy/PrintTaxonomyStats.cs              |  10 +-
 .../Taxonomy/SearcherTaxonomyManager.cs         |  29 ++--
 .../Taxonomy/TaxonomyFacetCounts.cs             |  12 +-
 .../TaxonomyFacetSumFloatAssociations.cs        |  12 +-
 .../Taxonomy/TaxonomyFacetSumIntAssociations.cs |  12 +-
 .../Taxonomy/TaxonomyFacetSumValueSource.cs     |  24 +--
 src/Lucene.Net.Facet/Taxonomy/TaxonomyFacets.cs |  22 +--
 src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs |  88 ++++++-----
 src/Lucene.Net.Facet/Taxonomy/TaxonomyWriter.cs |  58 ++++----
 .../Taxonomy/WriterCache/CategoryPathUtils.cs   |  12 +-
 .../Taxonomy/WriterCache/CharBlockArray.cs      |   2 +-
 .../WriterCache/Cl2oTaxonomyWriterCache.cs      |  10 +-
 .../Taxonomy/WriterCache/CollisionMap.cs        |  10 +-
 .../WriterCache/CompactLabelToOrdinal.cs        |  27 ++--
 .../Taxonomy/WriterCache/LabelToOrdinal.cs      |  11 +-
 .../WriterCache/LruTaxonomyWriterCache.cs       |   8 +-
 .../Taxonomy/WriterCache/NameIntCacheLRU.cs     |  17 ++-
 .../Taxonomy/WriterCache/TaxonomyWriterCache.cs |  38 ++---
 src/Lucene.Net.Facet/TopOrdAndFloatQueue.cs     |  11 +-
 src/Lucene.Net.Facet/TopOrdAndIntQueue.cs       |  11 +-
 58 files changed, 844 insertions(+), 723 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/44958102/src/Lucene.Net.Facet/DrillDownQuery.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/DrillDownQuery.cs b/src/Lucene.Net.Facet/DrillDownQuery.cs
index d6fa03b..67c36f2 100644
--- a/src/Lucene.Net.Facet/DrillDownQuery.cs
+++ b/src/Lucene.Net.Facet/DrillDownQuery.cs
@@ -36,13 +36,13 @@ namespace Lucene.Net.Facet
     using TermQuery = Lucene.Net.Search.TermQuery;
 
     /// <summary>
-    /// A <seealso cref="Query"/> for drill-down over facet categories. You
-    /// should call <seealso cref="#add(String, String...)"/> for every group of categories you
+    /// A <see cref="Query"/> for drill-down over facet categories. You
+    /// should call <see cref="Add(string, string[])"/> for every group of categories you
     /// want to drill-down over.
     /// <para>
-    /// <b>NOTE:</b> if you choose to create your own <seealso cref="Query"/> by calling
-    /// <seealso cref="#term"/>, it is recommended to wrap it with <seealso cref="ConstantScoreQuery"/>
-    /// and set the <seealso cref="ConstantScoreQuery#setBoost(float) boost"/> to {@code 0.0f},
+    /// <b>NOTE:</b> if you choose to create your own <see cref="Query"/> by calling
+    /// <see cref="Term"/>, it is recommended to wrap it with <see cref="ConstantScoreQuery"/>
+    /// and set the <see cref="ConstantScoreQuery.Boost">boost</see> to <c>0.0f</c>,
     /// so that it does not affect the scores of the documents.
     /// 
     /// @lucene.experimental
@@ -51,7 +51,8 @@ namespace Lucene.Net.Facet
     public sealed class DrillDownQuery : Query
     {
         /// <summary>
-        /// Creates a drill-down term. </summary>
+        /// Creates a drill-down term.
+        /// </summary>
         public static Term Term(string field, string dim, params string[] path)
         {
             return new Term(field, FacetsConfig.PathToString(dim, path));
@@ -62,7 +63,8 @@ namespace Lucene.Net.Facet
         private readonly IDictionary<string, int?> drillDownDims = new Dictionary<string, int?>();
 
         /// <summary>
-        /// Used by clone() </summary>
+        /// Used by <see cref="Clone"/>
+        /// </summary>
         internal DrillDownQuery(FacetsConfig config, BooleanQuery query, IDictionary<string, int?> drillDownDims)
         {
             this.query = (BooleanQuery)query.Clone();
@@ -71,7 +73,8 @@ namespace Lucene.Net.Facet
         }
 
         /// <summary>
-        /// Used by DrillSideways </summary>
+        /// Used by <see cref="DrillSideways"/>
+        /// </summary>
         internal DrillDownQuery(FacetsConfig config, Filter filter, DrillDownQuery other)
         {
             query = new BooleanQuery(true); // disable coord
@@ -92,7 +95,8 @@ namespace Lucene.Net.Facet
         }
 
         /// <summary>
-        /// Used by DrillSideways </summary>
+        /// Used by <see cref="DrillSideways"/>
+        /// </summary>
         internal DrillDownQuery(FacetsConfig config, Query baseQuery, IList<Query> clauses, IDictionary<string, int?> drillDownDims)
         {
             query = new BooleanQuery(true);
@@ -109,9 +113,9 @@ namespace Lucene.Net.Facet
         }
 
         /// <summary>
-        /// Creates a new {@code DrillDownQuery} without a base query, 
-        ///  to perform a pure browsing query (equivalent to using
-        ///  <seealso cref="MatchAllDocsQuery"/> as base). 
+        /// Creates a new <see cref="DrillDownQuery"/> without a base query, 
+        /// to perform a pure browsing query (equivalent to using
+        /// <see cref="MatchAllDocsQuery"/> as base). 
         /// </summary>
         public DrillDownQuery(FacetsConfig config)
             : this(config, null)
@@ -119,10 +123,10 @@ namespace Lucene.Net.Facet
         }
 
         /// <summary>
-        /// Creates a new {@code DrillDownQuery} over the given base query. Can be
-        ///  {@code null}, in which case the result <seealso cref="Query"/> from
-        ///  <seealso cref="#rewrite(IndexReader)"/> will be a pure browsing query, filtering on
-        ///  the added categories only. 
+        /// Creates a new <see cref="DrillDownQuery"/> over the given base query. Can be
+        /// <c>null</c>, in which case the result <see cref="Query"/> from
+        /// <see cref="Rewrite(IndexReader)"/> will be a pure browsing query, filtering on
+        /// the added categories only. 
         /// </summary>
         public DrillDownQuery(FacetsConfig config, Query baseQuery)
         {
@@ -136,7 +140,7 @@ namespace Lucene.Net.Facet
 
         /// <summary>
         /// Merges (ORs) a new path into an existing AND'd
-        ///  clause. 
+        /// clause. 
         /// </summary>
         private void Merge(string dim, string[] path)
         {
@@ -166,9 +170,9 @@ namespace Lucene.Net.Facet
 
         /// <summary>
         /// Adds one dimension of drill downs; if you pass the same
-        ///  dimension more than once it is OR'd with the previous
-        ///  cofnstraints on that dimension, and all dimensions are
-        ///  AND'd against each other and the base query. 
+        /// dimension more than once it is OR'd with the previous
+        /// cofnstraints on that dimension, and all dimensions are
+        /// AND'd against each other and the base query. 
         /// </summary>
         public void Add(string dim, params string[] path)
         {
@@ -188,8 +192,8 @@ namespace Lucene.Net.Facet
 
         /// <summary>
         /// Expert: add a custom drill-down subQuery.  Use this
-        ///  when you have a separate way to drill-down on the
-        ///  dimension than the indexed facet ordinals. 
+        /// when you have a separate way to drill-down on the
+        /// dimension than the indexed facet ordinals. 
         /// </summary>
         public void Add(string dim, Query subQuery)
         {
@@ -212,7 +216,7 @@ namespace Lucene.Net.Facet
 
         /// <summary>
         /// Expert: add a custom drill-down Filter, e.g. when
-        ///  drilling down after range faceting. 
+        /// drilling down after range faceting. 
         /// </summary>
         public void Add(string dim, Filter subFilter)
         {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/44958102/src/Lucene.Net.Facet/DrillSideways.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/DrillSideways.cs b/src/Lucene.Net.Facet/DrillSideways.cs
index 457bed3..408117b 100644
--- a/src/Lucene.Net.Facet/DrillSideways.cs
+++ b/src/Lucene.Net.Facet/DrillSideways.cs
@@ -27,18 +27,18 @@ namespace Lucene.Net.Facet
 
     /// <summary>
     /// Computes drill down and sideways counts for the provided
-    /// <seealso cref="DrillDownQuery"/>.  Drill sideways counts include
+    /// <see cref="DrillDownQuery"/>.  Drill sideways counts include
     /// alternative values/aggregates for the drill-down
     /// dimensions so that a dimension does not disappear after
     /// the user drills down into it.
     /// 
     /// <para> Use one of the static search
     /// methods to do the search, and then get the hits and facet
-    /// results from the returned <seealso cref="DrillSidewaysResult"/>.
+    /// results from the returned <see cref="DrillSidewaysResult"/>.
     /// 
     /// </para>
-    /// <para><b>NOTE</b>: this allocates one {@link
-    /// FacetsCollector} for each drill-down, plus one.  If your
+    /// <para><b>NOTE</b>: this allocates one <see cref="FacetsCollector"/>
+    /// for each drill-down, plus one.  If your
     /// index has high number of facet labels then this will
     /// multiply your memory usage.
     /// 
@@ -48,33 +48,37 @@ namespace Lucene.Net.Facet
     public class DrillSideways
     {
         /// <summary>
-        /// <seealso cref="IndexSearcher"/> passed to constructor. </summary>
+        /// <see cref="IndexSearcher"/> passed to constructor.
+        /// </summary>
         protected internal readonly IndexSearcher searcher;
 
         /// <summary>
-        /// <seealso cref="TaxonomyReader"/> passed to constructor. </summary>
+        /// <see cref="TaxonomyReader"/> passed to constructor.
+        /// </summary>
         protected internal readonly TaxonomyReader taxoReader;
 
         /// <summary>
-        /// <seealso cref="SortedSetDocValuesReaderState"/> passed to
-        ///  constructor; can be null. 
+        /// <see cref="SortedSetDocValuesReaderState"/> passed to
+        /// constructor; can be <c>null</c>. 
         /// </summary>
         protected internal readonly SortedSetDocValuesReaderState state;
 
         /// <summary>
-        /// <seealso cref="FacetsConfig"/> passed to constructor. </summary>
+        /// <see cref="FacetsConfig"/> passed to constructor.
+        /// </summary>
         protected internal readonly FacetsConfig config;
 
         /// <summary>
-        /// Create a new {@code DrillSideways} instance. </summary>
+        /// Create a new <see cref="DrillSideways"/> instance.
+        /// </summary>
         public DrillSideways(IndexSearcher searcher, FacetsConfig config, TaxonomyReader taxoReader)
             : this(searcher, config, taxoReader, null)
         {
         }
 
         /// <summary>
-        /// Create a new {@code DrillSideways} instance, assuming the categories were
-        ///  indexed with <seealso cref="SortedSetDocValuesFacetField"/>. 
+        /// Create a new <see cref="DrillSideways"/> instance, assuming the categories were
+        /// indexed with <see cref="SortedSetDocValuesFacetField"/>. 
         /// </summary>
         public DrillSideways(IndexSearcher searcher, FacetsConfig config, SortedSetDocValuesReaderState state)
             : this(searcher, config, null, state)
@@ -82,10 +86,9 @@ namespace Lucene.Net.Facet
         }
 
         /// <summary>
-        /// Create a new {@code DrillSideways} instance, where some
-        ///  dimensions were indexed with {@link
-        ///  SortedSetDocValuesFacetField} and others were indexed
-        ///  with <seealso cref="FacetField"/>. 
+        /// Create a new <see cref="DrillSideways"/> instance, where some
+        /// dimensions were indexed with <see cref="SortedSetDocValuesFacetField"/>
+        /// and others were indexed with <see cref="FacetField"/>. 
         /// </summary>
         public DrillSideways(IndexSearcher searcher, FacetsConfig config, TaxonomyReader taxoReader, SortedSetDocValuesReaderState state)
         {
@@ -97,7 +100,7 @@ namespace Lucene.Net.Facet
 
         /// <summary>
         /// Subclass can override to customize per-dim Facets
-        ///  impl. 
+        /// impl. 
         /// </summary>
         protected virtual Facets BuildFacetsResult(FacetsCollector drillDowns, FacetsCollector[] drillSideways, string[] drillSidewaysDims)
         {
@@ -139,7 +142,7 @@ namespace Lucene.Net.Facet
         }
 
         /// <summary>
-        /// Search, collecting hits with a <seealso cref="Collector"/>, and
+        /// Search, collecting hits with a <see cref="Collector"/>, and
         /// computing drill down and sideways counts.
         /// </summary>
         public virtual DrillSidewaysResult Search(DrillDownQuery query, Collector hitCollector)
@@ -194,7 +197,7 @@ namespace Lucene.Net.Facet
         }
 
         /// <summary>
-        /// Search, sorting by <seealso cref="Sort"/>, and computing
+        /// Search, sorting by <see cref="Sort"/>, and computing
         /// drill down and sideways counts.
         /// </summary>
         public virtual DrillSidewaysResult Search(DrillDownQuery query, Filter filter, FieldDoc after, int topN, Sort sort, bool doDocScores, bool doMaxScore)
@@ -249,15 +252,14 @@ namespace Lucene.Net.Facet
 
         /// <summary>
         /// Override this and return true if your collector
-        ///  (e.g., {@code ToParentBlockJoinCollector}) expects all
-        ///  sub-scorers to be positioned on the document being
-        ///  collected.  This will cause some performance loss;
-        ///  default is false.  Note that if you return true from
-        ///  this method (in a subclass) be sure your collector
-        ///  also returns false from {@link
-        ///  Collector#acceptsDocsOutOfOrder}: this will trick
-        ///  {@code BooleanQuery} into also scoring all subDocs at
-        ///  once. 
+        /// (e.g., <see cref="Join.ToParentBlockJoinCollector"/>) expects all
+        /// sub-scorers to be positioned on the document being
+        /// collected.  This will cause some performance loss;
+        /// default is <c>false</c>.  Note that if you return true from
+        /// this method (in a subclass) be sure your collector
+        /// also returns <c>false</c> from <see cref="Collector.AcceptsDocsOutOfOrder"/>: 
+        /// this will trick <see cref="BooleanQuery"/> into also scoring all subDocs at
+        /// once. 
         /// </summary>
         protected virtual bool ScoreSubDocsAtOnce()
         {
@@ -266,20 +268,23 @@ namespace Lucene.Net.Facet
 
         /// <summary>
         /// Result of a drill sideways search, including the
-        ///  <seealso crTopDocsetss"/> and <seealso cref="Lucene"/>. 
+        /// <see cref="Facet.Facets"/> and <see cref="TopDocs"/>. 
         /// </summary>
         public class DrillSidewaysResult
         {
             /// <summary>
-            /// Combined drill down & sideways results. </summary>
+            /// Combined drill down & sideways results.
+            /// </summary>
             public Facets Facets { get; private set; }
 
             /// <summary>
-            /// Hits. </summary>
+            /// Hits.
+            /// </summary>
             public TopDocs Hits { get; private set; }
 
             /// <summary>
-            /// Sole constructor. </summary>
+            /// Sole constructor.
+            /// </summary>
             public DrillSidewaysResult(Facets facets, TopDocs hits)
             {
                 this.Facets = facets;

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/44958102/src/Lucene.Net.Facet/DrillSidewaysQuery.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/DrillSidewaysQuery.cs b/src/Lucene.Net.Facet/DrillSidewaysQuery.cs
index 9b25dac..38fac96 100644
--- a/src/Lucene.Net.Facet/DrillSidewaysQuery.cs
+++ b/src/Lucene.Net.Facet/DrillSidewaysQuery.cs
@@ -36,9 +36,8 @@ namespace Lucene.Net.Facet
 
     /// <summary>
     /// Only purpose is to punch through and return a
-    ///  DrillSidewaysScorer 
+    /// <see cref="DrillSidewaysScorer"/> 
     /// </summary>
-
     internal class DrillSidewaysQuery : Query
     {
         internal readonly Query baseQuery;

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/44958102/src/Lucene.Net.Facet/DrillSidewaysScorer.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/DrillSidewaysScorer.cs b/src/Lucene.Net.Facet/DrillSidewaysScorer.cs
index 18a0e06..0dcc285 100644
--- a/src/Lucene.Net.Facet/DrillSidewaysScorer.cs
+++ b/src/Lucene.Net.Facet/DrillSidewaysScorer.cs
@@ -169,10 +169,10 @@ namespace Lucene.Net.Facet
 
         /// <summary>
         /// Used when base query is highly constraining vs the
-        ///  drilldowns, or when the docs must be scored at once
-        ///  (i.e., like BooleanScorer2, not BooleanScorer).  In
-        ///  this case we just .next() on base and .advance() on
-        ///  the dim filters. 
+        /// drilldowns, or when the docs must be scored at once
+        /// (i.e., like <see cref="Search.BooleanScorer2"/>, not <see cref="Search.BooleanScorer"/>).  In
+        /// this case we just .Next() on base and .Advance() on
+        /// the dim filters. 
         /// </summary>
         private void DoQueryFirstScoring(Collector collector, DocIdSetIterator[] disis, 
             Collector[] sidewaysCollectors, Bits[] bits, Collector[] bitsSidewaysCollectors)
@@ -263,7 +263,7 @@ namespace Lucene.Net.Facet
 
         /// <summary>
         /// Used when drill downs are highly constraining vs
-        ///  baseQuery. 
+        /// baseQuery. 
         /// </summary>
         private void DoDrillDownAdvanceScoring(Collector collector, DocIdSetIterator[] disis, Collector[] sidewaysCollectors)
         {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/44958102/src/Lucene.Net.Facet/FacetField.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/FacetField.cs b/src/Lucene.Net.Facet/FacetField.cs
index 794ec31..9934afb 100644
--- a/src/Lucene.Net.Facet/FacetField.cs
+++ b/src/Lucene.Net.Facet/FacetField.cs
@@ -24,11 +24,11 @@ namespace Lucene.Net.Facet
     using FieldType = Lucene.Net.Documents.FieldType;
 
     /// <summary>
-    /// Add an instance of this to your <seealso cref="Document"/> for every facet label.
+    /// Add an instance of this to your <see cref="Document"/> for every facet label.
     /// 
     /// <para>
-    /// <b>NOTE:</b> you must call <seealso cref="FacetsConfig#build(Document)"/> before
-    /// you add the document to IndexWriter.
+    /// <b>NOTE:</b> you must call <see cref="FacetsConfig.Build(Document)"/> before
+    /// you add the document to <see cref="Index.IndexWriter"/>.
     /// </para>
     /// </summary>
     public class FacetField : Field
@@ -41,16 +41,18 @@ namespace Lucene.Net.Facet
         }
 
         /// <summary>
-        /// Dimension for this field. </summary>
+        /// Dimension for this field.
+        /// </summary>
         public string Dim { get; private set; }
 
         /// <summary>
-        /// Path for this field. </summary>
+        /// Path for this field.
+        /// </summary>
         public string[] Path { get; private set; }
 
         /// <summary>
-        /// Creates the this from {@code dim} and
-        ///  {@code path}. 
+        /// Creates the this from <paramref name="dim"/> and
+        /// <paramref name="path"/>. 
         /// </summary>
         public FacetField(string dim, params string[] path)
             : base("dummy", TYPE)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/44958102/src/Lucene.Net.Facet/FacetResult.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/FacetResult.cs b/src/Lucene.Net.Facet/FacetResult.cs
index 7dc73b9..26040e8 100644
--- a/src/Lucene.Net.Facet/FacetResult.cs
+++ b/src/Lucene.Net.Facet/FacetResult.cs
@@ -23,30 +23,35 @@ namespace Lucene.Net.Facet
      */
 
     /// <summary>
-    /// Counts or aggregates for a single dimension. </summary>
+    /// Counts or aggregates for a single dimension.
+    /// </summary>
     public sealed class FacetResult
     {
         /// <summary>
-        /// Dimension that was requested. </summary>
+        /// Dimension that was requested.
+        /// </summary>
         public string Dim { get; private set; }
 
         /// <summary>
-        /// Path whose children were requested. </summary>
+        /// Path whose children were requested.
+        /// </summary>
         public string[] Path { get; private set; }
 
         /// <summary>
         /// Total value for this path (sum of all child counts, or
-        ///  sum of all child values), even those not included in
-        ///  the topN. 
+        /// sum of all child values), even those not included in
+        /// the topN. 
         /// </summary>
         public float Value { get; private set; }
 
         /// <summary>
-        /// How many child labels were encountered. </summary>
+        /// How many child labels were encountered.
+        /// </summary>
         public int ChildCount { get; private set; }
 
         /// <summary>
-        /// Child counts. </summary>
+        /// Child counts.
+        /// </summary>
         public LabelAndValue[] LabelValues { get; private set; }
 
         /// <summary>

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/44958102/src/Lucene.Net.Facet/Facets.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Facets.cs b/src/Lucene.Net.Facet/Facets.cs
index ce17c15..669124f 100644
--- a/src/Lucene.Net.Facet/Facets.cs
+++ b/src/Lucene.Net.Facet/Facets.cs
@@ -27,31 +27,32 @@ namespace Lucene.Net.Facet
     public abstract class Facets
     {
         /// <summary>
-        /// Default constructor. </summary>
+        /// Default constructor.
+        /// </summary>
         public Facets()
         {
         }
 
         /// <summary>
         /// Returns the topN child labels under the specified
-        ///  path.  Returns null if the specified path doesn't
-        ///  exist or if this dimension was never seen. 
+        /// path.  Returns null if the specified path doesn't
+        /// exist or if this dimension was never seen. 
         /// </summary>
         public abstract FacetResult GetTopChildren(int topN, string dim, params string[] path);
 
         /// <summary>
         /// Return the count or value
-        ///  for a specific path.  Returns -1 if
-        ///  this path doesn't exist, else the count. 
+        /// for a specific path.  Returns -1 if
+        /// this path doesn't exist, else the count. 
         /// </summary>
         public abstract float GetSpecificValue(string dim, params string[] path);
 
         /// <summary>
         /// Returns topN labels for any dimension that had hits,
-        ///  sorted by the number of hits that dimension matched;
-        ///  this is used for "sparse" faceting, where many
-        ///  different dimensions were indexed, for example
-        ///  depending on the type of document. 
+        /// sorted by the number of hits that dimension matched;
+        /// this is used for "sparse" faceting, where many
+        /// different dimensions were indexed, for example
+        /// depending on the type of document. 
         /// </summary>
         public abstract List<FacetResult> GetAllDims(int topN);
     }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/44958102/src/Lucene.Net.Facet/FacetsCollector.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/FacetsCollector.cs b/src/Lucene.Net.Facet/FacetsCollector.cs
index 0b225d0..c494a7b 100644
--- a/src/Lucene.Net.Facet/FacetsCollector.cs
+++ b/src/Lucene.Net.Facet/FacetsCollector.cs
@@ -25,11 +25,11 @@ namespace Lucene.Net.Facet
 
     /// <summary>
     /// Collects hits for subsequent faceting.  Once you've run
-    ///  a search and collect hits into this, instantiate one of
-    ///  the <seealso cref="Collector"/> subclasses to do the facet
-    ///  counting.  Use the {@code search} utility methods to
-    ///  perform an "ordinary" search but also collect into a
-    ///  <seealso cref="Facets"/>. 
+    /// a search and collect hits into this, instantiate one of
+    /// the <see cref="Collector"/> subclasses to do the facet
+    /// counting.  Use the <see cref="Search"/> utility methods to
+    /// perform an "ordinary" search but also collect into a
+    /// <see cref="Facets"/>. 
     /// </summary>
     public class FacetsCollector : Collector
     {
@@ -43,29 +43,32 @@ namespace Lucene.Net.Facet
 
         /// <summary>
         /// Used during collection to record matching docs and then return a
-        /// <seealso cref="DocIdSet"/> that contains them.
+        /// <see cref="DocIdSet"/> that contains them.
         /// </summary>
         protected internal abstract class Docs
         {
 
             /// <summary>
-            /// Solr constructor. </summary>
+            /// Sole constructor.
+            /// </summary>
             public Docs()
             {
             }
 
             /// <summary>
-            /// Record the given document. </summary>
+            /// Record the given document.
+            /// </summary>
             public abstract void AddDoc(int docId);
 
             /// <summary>
-            /// Return the <seealso cref="DocIdSet"/> which contains all the recorded docs. </summary>
+            /// Return the <see cref="DocIdSet"/> which contains all the recorded docs.
+            /// </summary>
             public abstract DocIdSet DocIdSet { get; }
         }
 
         /// <summary>
-        /// Holds the documents that were matched in the <seealso cref="AtomicReaderContext"/>.
-        /// If scores were required, then {@code scores} is not null.
+        /// Holds the documents that were matched in the <see cref="AtomicReaderContext"/>.
+        /// If scores were required, then <see cref="Scores"/> is not <c>null</c>.
         /// </summary>
         public sealed class MatchingDocs
         {
@@ -87,7 +90,8 @@ namespace Lucene.Net.Facet
             public int TotalHits { get; private set; }
 
             /// <summary>
-            /// Sole constructor. </summary>
+            /// Sole constructor.
+            /// </summary>
             public MatchingDocs(AtomicReaderContext context, DocIdSet bits, int totalHits, float[] scores)
             {
                 this.Context = context;
@@ -98,15 +102,16 @@ namespace Lucene.Net.Facet
         }
 
         /// <summary>
-        /// Default constructor </summary>
+        /// Default constructor
+        /// </summary>
         public FacetsCollector()
             : this(false)
         {
         }
 
         /// <summary>
-        /// Create this; if {@code keepScores} is true then a
-        ///  float[] is allocated to hold score of all hits. 
+        /// Create this; if <paramref name="keepScores"/> is <c>true</c> then a
+        /// <see cref="float[]"/> is allocated to hold score of all hits. 
         /// </summary>
         public FacetsCollector(bool keepScores)
         {
@@ -114,9 +119,9 @@ namespace Lucene.Net.Facet
         }
 
         /// <summary>
-        /// Creates a <seealso cref="Docs"/> to record hits. The default uses <seealso cref="FixedBitSet"/>
+        /// Creates a <see cref="Docs"/> to record hits. The default uses <see cref="FixedBitSet"/>
         /// to record hits and you can override to e.g. record the docs in your own
-        /// <seealso cref="DocIdSet"/>.
+        /// <see cref="DocIdSet"/>.
         /// </summary>
         protected virtual Docs CreateDocs(int maxDoc)
         {
@@ -153,7 +158,8 @@ namespace Lucene.Net.Facet
         }
 
         /// <summary>
-        /// True if scores were saved. </summary>
+        /// True if scores were saved.
+        /// </summary>
         public bool KeepScores
         {
             get
@@ -163,7 +169,7 @@ namespace Lucene.Net.Facet
         }
 
         /// <summary>
-        /// Returns the documents matched by the query, one <seealso cref="GetMatchingDocs"/> per
+        /// Returns the documents matched by the query, one <see cref="GetMatchingDocs"/> per
         /// visited segment.
         /// </summary>
         public virtual List<MatchingDocs> GetMatchingDocs()
@@ -231,7 +237,7 @@ namespace Lucene.Net.Facet
 
         /// <summary>
         /// Utility method, to search and also collect all hits
-        ///  into the provided <seealso cref="Collector"/>. 
+        /// into the provided <see cref="Collector"/>. 
         /// </summary>
         public static TopDocs Search(IndexSearcher searcher, Query q, int n, Collector fc)
         {
@@ -240,7 +246,7 @@ namespace Lucene.Net.Facet
 
         /// <summary>
         /// Utility method, to search and also collect all hits
-        ///  into the provided <seealso cref="Collector"/>. 
+        /// into the provided <see cref="Collector"/>. 
         /// </summary>
         public static TopDocs Search(IndexSearcher searcher, Query q, Filter filter, int n, Collector fc)
         {
@@ -249,7 +255,7 @@ namespace Lucene.Net.Facet
 
         /// <summary>
         /// Utility method, to search and also collect all hits
-        ///  into the provided <seealso cref="Collector"/>. 
+        /// into the provided <see cref="Collector"/>. 
         /// </summary>
         public static TopFieldDocs Search(IndexSearcher searcher, Query q, Filter filter, int n, Sort sort, Collector fc)
         {
@@ -262,7 +268,7 @@ namespace Lucene.Net.Facet
 
         /// <summary>
         /// Utility method, to search and also collect all hits
-        ///  into the provided <seealso cref="Collector"/>. 
+        /// into the provided <see cref="Collector"/>. 
         /// </summary>
         public static TopFieldDocs Search(IndexSearcher searcher, Query q, Filter filter, int n, Sort sort, bool doDocScores, bool doMaxScore, Collector fc)
         {
@@ -275,7 +281,7 @@ namespace Lucene.Net.Facet
 
         /// <summary>
         /// Utility method, to search and also collect all hits
-        ///  into the provided <seealso cref="Collector"/>. 
+        /// into the provided <see cref="Collector"/>. 
         /// </summary>
         public virtual TopDocs SearchAfter(IndexSearcher searcher, ScoreDoc after, Query q, int n, Collector fc)
         {
@@ -284,7 +290,7 @@ namespace Lucene.Net.Facet
 
         /// <summary>
         /// Utility method, to search and also collect all hits
-        ///  into the provided <seealso cref="Collector"/>. 
+        /// into the provided <see cref="Collector"/>. 
         /// </summary>
         public static TopDocs SearchAfter(IndexSearcher searcher, ScoreDoc after, Query q, Filter filter, int n, Collector fc)
         {
@@ -293,7 +299,7 @@ namespace Lucene.Net.Facet
 
         /// <summary>
         /// Utility method, to search and also collect all hits
-        ///  into the provided <seealso cref="Collector"/>. 
+        /// into the provided <see cref="Collector"/>. 
         /// </summary>
         public static TopDocs SearchAfter(IndexSearcher searcher, ScoreDoc after, Query q, Filter filter, int n, Sort sort, Collector fc)
         {
@@ -306,7 +312,7 @@ namespace Lucene.Net.Facet
 
         /// <summary>
         /// Utility method, to search and also collect all hits
-        ///  into the provided <seealso cref="Collector"/>. 
+        /// into the provided <see cref="Collector"/>. 
         /// </summary>
         public static TopDocs SearchAfter(IndexSearcher searcher, ScoreDoc after, Query q, Filter filter, int n, Sort sort, bool doDocScores, bool doMaxScore, Collector fc)
         {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/44958102/src/Lucene.Net.Facet/FacetsConfig.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/FacetsConfig.cs b/src/Lucene.Net.Facet/FacetsConfig.cs
index 48e97a1..0df6e07 100644
--- a/src/Lucene.Net.Facet/FacetsConfig.cs
+++ b/src/Lucene.Net.Facet/FacetsConfig.cs
@@ -44,15 +44,16 @@ namespace Lucene.Net.Facet
 
     /// <summary>
     /// Records per-dimension configuration.  By default a
-    ///  dimension is flat, single valued and does
-    ///  not require count for the dimension; use
-    ///  the setters in this class to change these settings for
-    ///  each dim.
+    /// dimension is flat, single valued and does
+    /// not require count for the dimension; use
+    /// the setters in this class to change these settings for
+    /// each dim.
     /// 
-    ///  <para><b>NOTE</b>: this configuration is not saved into the
-    ///  index, but it's vital, and up to the application to
-    ///  ensure, that at search time the provided {@code
-    ///  FacetsConfig} matches what was used during indexing.
+    /// <para>
+    /// <b>NOTE</b>: this configuration is not saved into the
+    /// index, but it's vital, and up to the application to
+    /// ensure, that at search time the provided <see cref="FacetsConfig"/>
+    /// matches what was used during indexing.
     /// 
     ///  @lucene.experimental 
     /// </para>
@@ -61,7 +62,7 @@ namespace Lucene.Net.Facet
     {
         /// <summary>
         /// Which Lucene field holds the drill-downs and ords (as
-        ///  doc values). 
+        /// doc values). 
         /// </summary>
         public const string DEFAULT_INDEX_FIELD_NAME = "$facets";
 
@@ -99,7 +100,8 @@ namespace Lucene.Net.Facet
             public string IndexFieldName { get; set; }
 
             /// <summary>
-            /// Default constructor. </summary>
+            /// Default constructor.
+            /// </summary>
             public DimConfig()
             {
                 IndexFieldName = DEFAULT_INDEX_FIELD_NAME;
@@ -111,19 +113,21 @@ namespace Lucene.Net.Facet
         public static readonly DimConfig DEFAULT_DIM_CONFIG = new DimConfig();
 
         /// <summary>
-        /// Default constructor. </summary>
+        /// Default constructor.
+        /// </summary>
         public FacetsConfig()
         {
         }
 
         /// <summary>
         /// Get the default configuration for new dimensions.  Useful when
-        ///  the dimension is not known beforehand and may need different 
-        ///  global default settings, like {@code multivalue =
-        ///  true}.
+        /// the dimension is not known beforehand and may need different 
+        /// global default settings, like <c>multivalue = true</c>.
         /// </summary>
-        ///  <returns> The default configuration to be used for dimensions that 
-        ///  are not yet set in the <seealso cref="FacetsConfig"/>  </returns>
+        /// <returns>
+        /// The default configuration to be used for dimensions that 
+        /// are not yet set in the <see cref="FacetsConfig"/>
+        /// </returns>
         protected virtual DimConfig DefaultDimConfig
         {
             get
@@ -133,7 +137,8 @@ namespace Lucene.Net.Facet
         }
 
         /// <summary>
-        /// Get the current configuration for a dimension. </summary>
+        /// Get the current configuration for a dimension.
+        /// </summary>
         public virtual DimConfig GetDimConfig(string dimName)
         {
             lock (this)
@@ -148,8 +153,8 @@ namespace Lucene.Net.Facet
         }
 
         /// <summary>
-        /// Pass {@code true} if this dimension is hierarchical
-        ///  (has depth > 1 paths). 
+        /// Pass <c>true</c> if this dimension is hierarchical
+        /// (has depth > 1 paths). 
         /// </summary>
         public virtual void SetHierarchical(string dimName, bool v)
         {
@@ -168,8 +173,8 @@ namespace Lucene.Net.Facet
         }
 
         /// <summary>
-        /// Pass {@code true} if this dimension may have more than
-        ///  one value per document. 
+        /// Pass <c>true</c> if this dimension may have more than
+        /// one value per document. 
         /// </summary>
         public virtual void SetMultiValued(string dimName, bool v)
         {
@@ -188,9 +193,9 @@ namespace Lucene.Net.Facet
         }
 
         /// <summary>
-        /// Pass {@code true} if at search time you require
-        ///  accurate counts of the dimension, i.e. how many
-        ///  hits have this dimension. 
+        /// Pass <c>true</c> if at search time you require
+        /// accurate counts of the dimension, i.e. how many
+        /// hits have this dimension. 
         /// </summary>
         public virtual void SetRequireDimCount(string dimName, bool v)
         {
@@ -210,8 +215,8 @@ namespace Lucene.Net.Facet
 
         /// <summary>
         /// Specify which index field name should hold the
-        ///  ordinals for this dimension; this is only used by the
-        ///  taxonomy based facet methods. 
+        /// ordinals for this dimension; this is only used by the
+        /// taxonomy based facet methods. 
         /// </summary>
         public virtual void SetIndexFieldName(string dimName, string indexFieldName)
         {
@@ -230,7 +235,8 @@ namespace Lucene.Net.Facet
         }
 
         /// <summary>
-        /// Returns map of field name to <seealso cref="DimConfig"/>. </summary>
+        /// Returns map of field name to <see cref="DimConfig"/>.
+        /// </summary>
         public virtual IDictionary<string, DimConfig> DimConfigs
         {
             get
@@ -249,12 +255,12 @@ namespace Lucene.Net.Facet
         }
 
         /// <summary>
-        /// Translates any added <seealso cref="FacetField"/>s into normal fields for indexing;
-        /// only use this version if you did not add any taxonomy-based fields (
-        /// <seealso cref="FacetField"/> or <seealso cref="AssociationFacetField"/>).
+        /// Translates any added <see cref="FacetField"/>s into normal fields for indexing;
+        /// only use this version if you did not add any taxonomy-based fields 
+        /// (<see cref="FacetField"/> or <see cref="AssociationFacetField"/>).
         /// 
         /// <para>
-        /// <b>NOTE:</b> you should add the returned document to IndexWriter, not the
+        /// <b>NOTE:</b> you should add the returned document to <see cref="Index.IndexWriter"/>, not the
         /// input one!
         /// </para>
         /// </summary>
@@ -264,10 +270,10 @@ namespace Lucene.Net.Facet
         }
 
         /// <summary>
-        /// Translates any added <seealso cref="FacetField"/>s into normal fields for indexing.
+        /// Translates any added <see cref="FacetField"/>s into normal fields for indexing.
         /// 
         /// <para>
-        /// <b>NOTE:</b> you should add the returned document to IndexWriter, not the
+        /// <b>NOTE:</b> you should add the returned document to <see cref="Index.IndexWriter"/>, not the
         /// input one!
         /// </para>
         /// </summary>
@@ -527,8 +533,8 @@ namespace Lucene.Net.Facet
         }
 
         /// <summary>
-        /// Encodes ordinals into a BytesRef; expert: subclass can
-        ///  override this to change encoding. 
+        /// Encodes ordinals into a <see cref="BytesRef"/>; expert: subclass can
+        /// override this to change encoding. 
         /// </summary>
         protected virtual BytesRef DedupAndEncode(IntsRef ordinals)
         {
@@ -608,7 +614,8 @@ namespace Lucene.Net.Facet
         private const char ESCAPE_CHAR = '\u001E';
 
         /// <summary>
-        /// Turns a dim + path into an encoded string. </summary>
+        /// Turns a dim + path into an encoded string.
+        /// </summary>
         public static string PathToString(string dim, string[] path)
         {
             string[] fullPath = new string[1 + path.Length];
@@ -618,15 +625,16 @@ namespace Lucene.Net.Facet
         }
 
         /// <summary>
-        /// Turns a dim + path into an encoded string. </summary>
+        /// Turns a dim + path into an encoded string.
+        /// </summary>
         public static string PathToString(string[] path)
         {
             return PathToString(path, path.Length);
         }
 
         /// <summary>
-        /// Turns the first {@code length} elements of {@code
-        /// path} into an encoded string. 
+        /// Turns the first <paramref name="length"/> elements of <paramref name="path"/>
+        /// into an encoded string. 
         /// </summary>
         public static string PathToString(string[] path, int length)
         {
@@ -661,9 +669,8 @@ namespace Lucene.Net.Facet
         }
 
         /// <summary>
-        /// Turns an encoded string (from a previous call to {@link
-        ///  #pathToString}) back into the original {@code
-        ///  String[]}. 
+        /// Turns an encoded string (from a previous call to <see cref="PathToString"/>) 
+        /// back into the original <see cref="string[]"/>. 
         /// </summary>
         public static string[] StringToPath(string s)
         {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/44958102/src/Lucene.Net.Facet/LabelAndValue.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/LabelAndValue.cs b/src/Lucene.Net.Facet/LabelAndValue.cs
index bf6271c..b5719bf 100644
--- a/src/Lucene.Net.Facet/LabelAndValue.cs
+++ b/src/Lucene.Net.Facet/LabelAndValue.cs
@@ -22,7 +22,7 @@ namespace Lucene.Net.Facet
 
     /// <summary>
     /// Single label and its value, usually contained in a
-    ///  <seealso cref="FacetResult"/>. 
+    /// <see cref="FacetResult"/>. 
     /// </summary>
     public sealed class LabelAndValue
     {
@@ -69,14 +69,14 @@ namespace Lucene.Net.Facet
             return Label + " (" + valueString + ")";
         }
 
-        public override bool Equals(object _other)
+        public override bool Equals(object other)
         {
-            if ((_other is LabelAndValue) == false)
+            if ((other is LabelAndValue) == false)
             {
                 return false;
             }
-            LabelAndValue other = (LabelAndValue)_other;
-            return Label.Equals(other.Label) && Value.Equals(other.Value);
+            LabelAndValue _other = (LabelAndValue)other;
+            return Label.Equals(_other.Label) && Value.Equals(_other.Value);
         }
 
         public override int GetHashCode()

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/44958102/src/Lucene.Net.Facet/Lucene.Net.Facet.csproj
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Lucene.Net.Facet.csproj b/src/Lucene.Net.Facet/Lucene.Net.Facet.csproj
index 1b33158..0270457 100644
--- a/src/Lucene.Net.Facet/Lucene.Net.Facet.csproj
+++ b/src/Lucene.Net.Facet/Lucene.Net.Facet.csproj
@@ -107,6 +107,10 @@
       <Project>{5d4ad9be-1ffb-41ab-9943-25737971bf57}</Project>
       <Name>Lucene.Net</Name>
     </ProjectReference>
+    <ProjectReference Include="..\Lucene.Net.Join\Lucene.Net.Join.csproj">
+      <Project>{e8a339c7-fcf6-4a72-8586-56d8961d7b99}</Project>
+      <Name>Lucene.Net.Join</Name>
+    </ProjectReference>
     <ProjectReference Include="..\Lucene.Net.Queries\Lucene.Net.Queries.csproj">
       <Project>{69d7956c-c2cc-4708-b399-a188fec384c4}</Project>
       <Name>Lucene.Net.Queries</Name>

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/44958102/src/Lucene.Net.Facet/MultiFacets.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/MultiFacets.cs b/src/Lucene.Net.Facet/MultiFacets.cs
index 79bb077..340b706 100644
--- a/src/Lucene.Net.Facet/MultiFacets.cs
+++ b/src/Lucene.Net.Facet/MultiFacets.cs
@@ -20,8 +20,8 @@ namespace Lucene.Net.Facet
      */
 
     /// <summary>
-    /// Maps specified dims to provided Facets impls; else, uses
-    ///  the default Facets impl. 
+    /// Maps specified dims to provided <see cref="Facets"/> impls; else, uses
+    /// the default <see cref="Facets"/> impl. 
     /// </summary>
     public class MultiFacets : Facets
     {
@@ -29,8 +29,8 @@ namespace Lucene.Net.Facet
         private readonly Facets defaultFacets;
 
         /// <summary>
-        /// Create this, with the specified default <seealso cref="Facets"/>
-        ///  for fields not included in {@code dimToFacets}. 
+        /// Create this, with the specified default <see cref="Facets"/>
+        /// for fields not included in <paramref name="dimToFacets"/>. 
         /// </summary>
         public MultiFacets(IDictionary<string, Facets> dimToFacets, Facets defaultFacets = null)
         {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/44958102/src/Lucene.Net.Facet/RandomSamplingFacetsCollector.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/RandomSamplingFacetsCollector.cs b/src/Lucene.Net.Facet/RandomSamplingFacetsCollector.cs
index 491cf5e..436f9b0 100644
--- a/src/Lucene.Net.Facet/RandomSamplingFacetsCollector.cs
+++ b/src/Lucene.Net.Facet/RandomSamplingFacetsCollector.cs
@@ -32,14 +32,14 @@ namespace Lucene.Net.Facet
     /// <summary>
     /// Collects hits for subsequent faceting, using sampling if needed. Once you've
     /// run a search and collect hits into this, instantiate one of the
-    /// <seealso cref="Facets"/> subclasses to do the facet counting. Note that this collector
+    /// <see cref="Facets"/> subclasses to do the facet counting. Note that this collector
     /// does not collect the scores of matching docs (i.e.
-    /// <seealso cref="FacetsCollector.MatchingDocs#scores"/>) is {@code null}.
+    /// <see cref="FacetsCollector.MatchingDocs.Scores"/>) is <c>null</c>.
     /// <para>
     /// If you require the original set of hits, you can call
-    /// <seealso cref="#getOriginalMatchingDocs()"/>. Also, since the counts of the top-facets
+    /// <see cref="GetOriginalMatchingDocs()"/>. Also, since the counts of the top-facets
     /// is based on the sampled set, you can amortize the counts by calling
-    /// <seealso cref="#amortizeFacetCounts"/>.
+    /// <see cref="AmortizeFacetCounts"/>.
     /// </para>
     /// </summary>
     public class RandomSamplingFacetsCollector : FacetsCollector
@@ -56,14 +56,16 @@ namespace Lucene.Net.Facet
             internal long x;
 
             /// <summary>
-            /// Creates a xorshift random generator using the provided seed </summary>
+            /// Creates a xorshift random generator using the provided seed
+            /// </summary>
             public XORShift64Random(long seed)
             {
                 x = seed == 0 ? 0xdeadbeef : seed;
             }
 
             /// <summary>
-            /// Get the next random long value </summary>
+            /// Get the next random long value
+            /// </summary>
             public virtual long RandomLong()
             {
                 x ^= (x << 21);
@@ -73,7 +75,8 @@ namespace Lucene.Net.Facet
             }
 
             /// <summary>
-            /// Get the next random int, between 0 (inclusive) and n (exclusive) </summary>
+            /// Get the next random int, between 0 (inclusive) and <paramref name="n"/> (exclusive)
+            /// </summary>
             public virtual int NextInt(int n)
             {
                 int res = (int)(RandomLong() % n);
@@ -95,7 +98,7 @@ namespace Lucene.Net.Facet
         /// <summary>
         /// Constructor with the given sample size and default seed.
         /// </summary>
-        /// <seealso cref= #RandomSamplingFacetsCollector(int, long) </seealso>
+        /// <seealso cref="RandomSamplingFacetsCollector(int, long)"/>
         public RandomSamplingFacetsCollector(int sampleSize)
             : this(sampleSize, 0)
         {
@@ -111,7 +114,7 @@ namespace Lucene.Net.Facet
         ///          samplingRatio of 0.01. If the number of hits is lower, no sampling
         ///          is done at all </param>
         /// <param name="seed">
-        ///          The random seed. If {@code 0} then a seed will be chosen for you. </param>
+        ///          The random seed. If <c>0</c> then a seed will be chosen for you. </param>
         public RandomSamplingFacetsCollector(int sampleSize, long seed)
             : base(false)
         {
@@ -122,15 +125,15 @@ namespace Lucene.Net.Facet
 
         /// <summary>
         /// Returns the sampled list of the matching documents. Note that a
-        /// <seealso cref="FacetsCollector.MatchingDocs"/> instance is returned per segment, even
+        /// <see cref="FacetsCollector.MatchingDocs"/> instance is returned per segment, even
         /// if no hits from that segment are included in the sampled set.
         /// <para>
-        /// Note: One or more of the MatchingDocs might be empty (not containing any
+        /// Note: One or more of the <see cref="FacetsCollector.MatchingDocs"/> might be empty (not containing any
         /// hits) as result of sampling.
         /// </para>
         /// <para>
-        /// Note: {@code MatchingDocs.totalHits} is copied from the original
-        /// MatchingDocs, scores is set to {@code null}
+        /// Note: <see cref="FacetsCollector.MatchingDocs.TotalHits"/> is copied from the original
+        /// <see cref="FacetsCollector.MatchingDocs"/>, scores is set to <c>null</c>
         /// </para>
         /// </summary>
         public override List<MatchingDocs> GetMatchingDocs()
@@ -160,14 +163,16 @@ namespace Lucene.Net.Facet
         }
 
         /// <summary>
-        /// Returns the original matching documents. </summary>
+        /// Returns the original matching documents.
+        /// </summary>
         public virtual List<MatchingDocs> GetOriginalMatchingDocs()
         {
             return base.GetMatchingDocs();
         }
 
         /// <summary>
-        /// Create a sampled copy of the matching documents list. </summary>
+        /// Create a sampled copy of the matching documents list.
+        /// </summary>
         private List<MatchingDocs> CreateSampledDocs(IEnumerable<MatchingDocs> matchingDocsList)
         {
             List<MatchingDocs> sampledDocsList = new List<MatchingDocs>(matchingDocsList.Count());
@@ -179,7 +184,8 @@ namespace Lucene.Net.Facet
         }
 
         /// <summary>
-        /// Create a sampled of the given hits. </summary>
+        /// Create a sampled of the given hits.
+        /// </summary>
         private MatchingDocs CreateSample(MatchingDocs docs)
         {
             int maxdoc = docs.Context.Reader.MaxDoc;
@@ -254,9 +260,9 @@ namespace Lucene.Net.Facet
         }
 
         /// <summary>
-        /// Note: if you use a counting <seealso cref="Facets"/> implementation, you can amortize the
-        /// sampled counts by calling this method. Uses the <seealso cref="FacetsConfig"/> and
-        /// the <seealso cref="IndexSearcher"/> to determine the upper bound for each facet value.
+        /// Note: if you use a counting <see cref="Facets"/> implementation, you can amortize the
+        /// sampled counts by calling this method. Uses the <see cref="FacetsConfig"/> and
+        /// the <see cref="IndexSearcher"/> to determine the upper bound for each facet value.
         /// </summary>
         public virtual FacetResult AmortizeFacetCounts(FacetResult res, FacetsConfig config, IndexSearcher searcher)
         {
@@ -296,7 +302,8 @@ namespace Lucene.Net.Facet
         }
 
         /// <summary>
-        /// Returns the sampling rate that was used. </summary>
+        /// Returns the sampling rate that was used.
+        /// </summary>
         public virtual double SamplingRate
         {
             get

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/44958102/src/Lucene.Net.Facet/Range/DoubleRangeFacetCounts.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Range/DoubleRangeFacetCounts.cs b/src/Lucene.Net.Facet/Range/DoubleRangeFacetCounts.cs
index 7905086..47f4b41 100644
--- a/src/Lucene.Net.Facet/Range/DoubleRangeFacetCounts.cs
+++ b/src/Lucene.Net.Facet/Range/DoubleRangeFacetCounts.cs
@@ -30,20 +30,20 @@ namespace Lucene.Net.Facet.Range
     using ValueSource = Lucene.Net.Queries.Function.ValueSource;
 
     /// <summary>
-    /// <seealso cref="Facets"/> implementation that computes counts for
-    ///  dynamic double ranges from a provided {@link
-    ///  ValueSource}, using <seealso cref="FunctionValues#doubleVal"/>.  Use
+    /// <see cref="Facets"/> implementation that computes counts for
+    ///  dynamic double ranges from a provided <see cref="ValueSource"/>, 
+    ///  using <see cref="FunctionValues.DoubleVal"/>.  Use
     ///  this for dimensions that change in real-time (e.g. a
     ///  relative time based dimension like "Past day", "Past 2
     ///  days", etc.) or that change for each request (e.g.
-    ///  distance from the user's location, "< 1 km", "< 2 km",
+    ///  distance from the user's location, "&lt; 1 km", "&lt; 2 km",
     ///  etc.).
     /// 
-    ///  <para> If you had indexed your field using {@link
-    ///  FloatDocValuesField} then pass <seealso cref="FloatFieldSource"/>
-    ///  as the <seealso cref="ValueSource"/>; if you used {@link
-    ///  DoubleDocValuesField} then pass {@link
-    ///  DoubleFieldSource} (this is the default used when you
+    ///  <para> If you had indexed your field using <see cref="Documents.FloatDocValuesField"/> 
+    ///  then pass <see cref="Queries.Function.ValueSources.FloatFieldSource"/>
+    ///  as the <see cref="ValueSource"/>; if you used 
+    ///  <see cref="Documents.DoubleDocValuesField"/> then pass 
+    ///  <see cref="DoubleFieldSource"/> (this is the default used when you
     ///  pass just a the field name).
     /// 
     ///  @lucene.experimental 
@@ -52,8 +52,8 @@ namespace Lucene.Net.Facet.Range
     public class DoubleRangeFacetCounts : RangeFacetCounts
     {
         /// <summary>
-        /// Create {@code RangeFacetCounts}, using {@link
-        ///  DoubleFieldSource} from the specified field. 
+        /// Create <see cref="RangeFacetCounts"/>, using 
+        /// <see cref="DoubleFieldSource"/> from the specified field. 
         /// </summary>
         public DoubleRangeFacetCounts(string field, FacetsCollector hits, params DoubleRange[] ranges)
             : this(field, new DoubleFieldSource(field), hits, ranges)
@@ -61,8 +61,8 @@ namespace Lucene.Net.Facet.Range
         }
 
         /// <summary>
-        /// Create {@code RangeFacetCounts}, using the provided
-        ///  <seealso cref="ValueSource"/>. 
+        /// Create <see cref="RangeFacetCounts"/>, using the provided
+        /// <see cref="ValueSource"/>. 
         /// </summary>
         public DoubleRangeFacetCounts(string field, ValueSource valueSource, FacetsCollector hits, params DoubleRange[] ranges)
             : this(field, valueSource, hits, null, ranges)
@@ -70,11 +70,11 @@ namespace Lucene.Net.Facet.Range
         }
 
         /// <summary>
-        /// Create {@code RangeFacetCounts}, using the provided
-        ///  <seealso cref="ValueSource"/>, and using the provided Filter as
-        ///  a fastmatch: only documents passing the filter are
-        ///  checked for the matching ranges.  The filter must be
-        ///  random access (implement <seealso cref="DocIdSet#bits"/>). 
+        /// Create <see cref="RangeFacetCounts"/>, using the provided
+        /// <see cref="ValueSource"/>, and using the provided Filter as
+        /// a fastmatch: only documents passing the filter are
+        /// checked for the matching ranges.  The filter must be
+        /// random access (implement <see cref="DocIdSet.GetBits()"/>). 
         /// </summary>
         public DoubleRangeFacetCounts(string field, ValueSource valueSource, FacetsCollector hits, Filter fastMatchFilter, DoubleRange[] ranges)
             : base(field, ranges, fastMatchFilter)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/44958102/src/Lucene.Net.Facet/Range/LongRangeCounter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Range/LongRangeCounter.cs b/src/Lucene.Net.Facet/Range/LongRangeCounter.cs
index f5753fa..a871e1e 100644
--- a/src/Lucene.Net.Facet/Range/LongRangeCounter.cs
+++ b/src/Lucene.Net.Facet/Range/LongRangeCounter.cs
@@ -23,11 +23,10 @@ namespace Lucene.Net.Facet.Range
 
     /// <summary>
     /// Counts how many times each range was seen;
-    ///  per-hit it's just a binary search (<seealso cref="#add"/>)
-    ///  against the elementary intervals, and in the end we
-    ///  rollup back to the original ranges. 
+    /// per-hit it's just a binary search (<see cref="Add"/>)
+    /// against the elementary intervals, and in the end we
+    /// rollup back to the original ranges. 
     /// </summary>
-
     internal sealed class LongRangeCounter
     {
         internal readonly LongRangeNode root;
@@ -205,8 +204,8 @@ namespace Lucene.Net.Facet.Range
 
         /// <summary>
         /// Fills counts corresponding to the original input
-        ///  ranges, returning the missing count (how many hits
-        ///  didn't match any ranges). 
+        /// ranges, returning the missing count (how many hits
+        /// didn't match any ranges). 
         /// </summary>
         public int FillCounts(int[] counts)
         {
@@ -285,7 +284,8 @@ namespace Lucene.Net.Facet.Range
         }
 
         /// <summary>
-        /// Holds one node of the segment tree. </summary>
+        /// Holds one node of the segment tree.
+        /// </summary>
         public sealed class LongRangeNode
         {
             internal readonly LongRangeNode left;
@@ -328,7 +328,8 @@ namespace Lucene.Net.Facet.Range
             }
 
             /// <summary>
-            /// Recursively assigns range outputs to each node. </summary>
+            /// Recursively assigns range outputs to each node.
+            /// </summary>
             internal void AddOutputs(int index, LongRange range)
             {
                 if (start >= range.minIncl && end <= range.maxIncl)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/44958102/src/Lucene.Net.Facet/Range/LongRangeFacetCounts.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Range/LongRangeFacetCounts.cs b/src/Lucene.Net.Facet/Range/LongRangeFacetCounts.cs
index 60451c4..e3a17e7 100644
--- a/src/Lucene.Net.Facet/Range/LongRangeFacetCounts.cs
+++ b/src/Lucene.Net.Facet/Range/LongRangeFacetCounts.cs
@@ -29,13 +29,13 @@ namespace Lucene.Net.Facet.Range
     using ValueSource = Lucene.Net.Queries.Function.ValueSource;
 
     /// <summary>
-    /// <seealso cref="Facets"/> implementation that computes counts for
-    ///  dynamic long ranges from a provided <seealso cref="ValueSource"/>,
-    ///  using <seealso cref="FunctionValues#longVal"/>.  Use
-    ///  this for dimensions that change in real-time (e.g. a
-    ///  relative time based dimension like "Past day", "Past 2
-    ///  days", etc.) or that change for each request (e.g. 
-    ///  distance from the user's location, "< 1 km", "< 2 km",
+    /// <see cref="Facets"/> implementation that computes counts for
+    /// dynamic long ranges from a provided <see cref="ValueSource"/>,
+    /// using <see cref="FunctionValues.LongVal"/>.  Use
+    /// this for dimensions that change in real-time (e.g. a
+    /// relative time based dimension like "Past day", "Past 2
+    /// days", etc.) or that change for each request (e.g. 
+    /// distance from the user's location, "&lt; 1 km", "&lt; 2 km",
     ///  etc.).
     /// 
     ///  @lucene.experimental 
@@ -44,8 +44,8 @@ namespace Lucene.Net.Facet.Range
     {
 
         /// <summary>
-        /// Create {@code LongRangeFacetCounts}, using {@link
-        ///  LongFieldSource} from the specified field. 
+        /// Create <see cref="LongRangeFacetCounts"/>, using
+        /// <see cref="LongFieldSource"/> from the specified field. 
         /// </summary>
         public LongRangeFacetCounts(string field, FacetsCollector hits, params LongRange[] ranges)
             : this(field, new LongFieldSource(field), hits, ranges)
@@ -53,8 +53,8 @@ namespace Lucene.Net.Facet.Range
         }
 
         /// <summary>
-        /// Create {@code RangeFacetCounts}, using the provided
-        ///  <seealso cref="ValueSource"/>. 
+        /// Create <see cref="RangeFacetCounts"/>, using the provided
+        /// <see cref="ValueSource"/>. 
         /// </summary>
         public LongRangeFacetCounts(string field, ValueSource valueSource, FacetsCollector hits, params LongRange[] ranges)
             : this(field, valueSource, hits, null, ranges)
@@ -62,11 +62,11 @@ namespace Lucene.Net.Facet.Range
         }
 
         /// <summary>
-        /// Create {@code RangeFacetCounts}, using the provided
-        ///  <seealso cref="ValueSource"/>, and using the provided Filter as
-        ///  a fastmatch: only documents passing the filter are
-        ///  checked for the matching ranges.  The filter must be
-        ///  random access (implement <seealso cref="DocIdSet#bits"/>). 
+        /// Create <see cref="RangeFacetCounts"/>, using the provided
+        /// <see cref="ValueSource"/>, and using the provided Filter as
+        /// a fastmatch: only documents passing the filter are
+        /// checked for the matching ranges.  The filter must be
+        /// random access (implement <see cref="DocIdSet.GetBits"/>). 
         /// </summary>
         public LongRangeFacetCounts(string field, ValueSource valueSource, 
             FacetsCollector hits, Filter fastMatchFilter, params LongRange[] ranges)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/44958102/src/Lucene.Net.Facet/Range/Range.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Range/Range.cs b/src/Lucene.Net.Facet/Range/Range.cs
index 32ea724..ca2e2f4 100644
--- a/src/Lucene.Net.Facet/Range/Range.cs
+++ b/src/Lucene.Net.Facet/Range/Range.cs
@@ -44,30 +44,30 @@
         }
 
         /// <summary>
-        /// Returns a new <seealso cref="Filter"/> accepting only documents
-        ///  in this range.  This filter is not general-purpose;
-        ///  you should either use it with <seealso cref="DrillSideways"/> by
-        ///  adding it to <seealso cref="DrillDownQuery#add"/>, or pass it to
-        ///  <seealso cref="FilteredQuery"/> using its {@link
-        ///  FilteredQuery#QUERY_FIRST_FILTER_STRATEGY}.  If the
-        ///  <seealso cref="ValueSource"/> is static, e.g. an indexed numeric
-        ///  field, then it may be more efficient to use {@link
-        ///  NumericRangeFilter}.  The provided fastMatchFilter,
-        ///  if non-null, will first be consulted, and only if
-        ///  that is set for each document will the range then be
-        ///  checked. 
+        /// Returns a new <see cref="Filter"/> accepting only documents
+        /// in this range.  This filter is not general-purpose;
+        /// you should either use it with <see cref="DrillSideways"/> by
+        /// adding it to <see cref="DrillDownQuery.Add"/>, or pass it to
+        /// <see cref="Search.FilteredQuery"/> using its 
+        /// <see cref="Search.FilteredQuery.QUERY_FIRST_FILTER_STRATEGY"/>.
+        /// If the <see cref="ValueSource"/> is static, e.g. an indexed numeric
+        /// field, then it may be more efficient to use 
+        /// <see cref="Search.NumericRangeFilter"/>.  The provided <paramref name="fastMatchFilter"/>,
+        /// if non-null, will first be consulted, and only if
+        /// that is set for each document will the range then be
+        /// checked. 
         /// </summary>
         public abstract Filter GetFilter(Filter fastMatchFilter, ValueSource valueSource);
 
         /// <summary>
-        /// Returns a new <seealso cref="Filter"/> accepting only documents
+        /// Returns a new <see cref="Filter"/> accepting only documents
         ///  in this range.  This filter is not general-purpose;
-        ///  you should either use it with <seealso cref="DrillSideways"/> by
-        ///  adding it to <seealso cref="DrillDownQuery#add"/>, or pass it to
-        ///  <seealso cref="FilteredQuery"/> using its {@link
-        ///  FilteredQuery#QUERY_FIRST_FILTER_STRATEGY}.  If the
-        ///  <seealso cref="ValueSource"/> is static, e.g. an indexed numeric
-        ///  field, then it may be more efficient to use <seealso cref="NumericRangeFilter"/>. 
+        ///  you should either use it with <see cref="DrillSideways"/> by
+        ///  adding it to <see cref="DrillDownQuery.Add"/>, or pass it to
+        ///  <see cref="Search.FilteredQuery"/> using its 
+        ///  <see cref="Search.FilteredQuery.QUERY_FIRST_FILTER_STRATEGY"/>.  If the
+        ///  <see cref="ValueSource"/> is static, e.g. an indexed numeric
+        ///  field, then it may be more efficient to use <see cref="Search.NumericRangeFilter"/>. 
         /// </summary>
         public virtual Filter GetFilter(ValueSource valueSource)
         {
@@ -75,7 +75,8 @@
         }
 
         /// <summary>
-        /// Invoke this for a useless range. </summary>
+        /// Invoke this for a useless range.
+        /// </summary>
         protected internal virtual void FailNoMatch()
         {
             throw new System.ArgumentException("range \"" + Label + "\" matches nothing");

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/44958102/src/Lucene.Net.Facet/Range/RangeFacetCounts.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Range/RangeFacetCounts.cs b/src/Lucene.Net.Facet/Range/RangeFacetCounts.cs
index 943f4e1..2647238 100644
--- a/src/Lucene.Net.Facet/Range/RangeFacetCounts.cs
+++ b/src/Lucene.Net.Facet/Range/RangeFacetCounts.cs
@@ -38,9 +38,9 @@ namespace Lucene.Net.Facet.Range
 
         /// <summary>
         /// Optional: if specified, we first test this Filter to
-        ///  see whether the document should be checked for
-        ///  matching ranges.  If this is null, all documents are
-        ///  checked. 
+        /// see whether the document should be checked for
+        /// matching ranges.  If this is null, all documents are
+        /// checked. 
         /// </summary>
         protected internal readonly Filter fastMatchFilter;
 
@@ -53,7 +53,7 @@ namespace Lucene.Net.Facet.Range
         protected internal int totCount;
 
         /// <summary>
-        /// Create {@code RangeFacetCounts} </summary>
+        /// Create <see cref="RangeFacetCounts"/> </summary>
         protected internal RangeFacetCounts(string field, Range[] ranges, Filter fastMatchFilter)
         {
             this.field = field;

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/44958102/src/Lucene.Net.Facet/SortedSet/DefaultSortedSetDocValuesReaderState.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/SortedSet/DefaultSortedSetDocValuesReaderState.cs b/src/Lucene.Net.Facet/SortedSet/DefaultSortedSetDocValuesReaderState.cs
index 9ecca3f..efbcbd0 100644
--- a/src/Lucene.Net.Facet/SortedSet/DefaultSortedSetDocValuesReaderState.cs
+++ b/src/Lucene.Net.Facet/SortedSet/DefaultSortedSetDocValuesReaderState.cs
@@ -23,7 +23,7 @@ namespace Lucene.Net.Facet.SortedSet
 	 */
 
     /// <summary>
-    /// Default implementation of <seealso cref="SortedSetDocValuesFacetCounts"/>
+    /// Default implementation of <see cref="SortedSetDocValuesFacetCounts"/>
     /// </summary>
     public class DefaultSortedSetDocValuesReaderState : SortedSetDocValuesReaderState
     {
@@ -32,14 +32,14 @@ namespace Lucene.Net.Facet.SortedSet
         private readonly int valueCount;
 
         /// <summary>
-        /// <seealso cref="IndexReader"/> passed to the constructor. </summary>
+        /// <see cref="IndexReader"/> passed to the constructor. </summary>
         private readonly IndexReader origReader;
 
         private readonly IDictionary<string, OrdRange> prefixToOrdRange = new Dictionary<string, OrdRange>();
 
         /// <summary>
         /// Creates this, pulling doc values from the specified
-        ///  field. 
+        /// field. 
         /// </summary>
         public DefaultSortedSetDocValuesReaderState(IndexReader reader, string field = FacetsConfig.DEFAULT_INDEX_FIELD_NAME)
         {
@@ -97,7 +97,8 @@ namespace Lucene.Net.Facet.SortedSet
         }
 
         /// <summary>
-        /// Return top-level doc values. </summary>
+        /// Return top-level doc values.
+        /// </summary>
         public override SortedSetDocValues DocValues
         {
             get
@@ -107,7 +108,8 @@ namespace Lucene.Net.Facet.SortedSet
         }
 
         /// <summary>
-        /// Returns mapping from prefix to <seealso cref="SortedSetDocValuesReaderState.OrdRange"/>. </summary>
+        /// Returns mapping from prefix to <see cref="SortedSetDocValuesReaderState.OrdRange"/>.
+        /// </summary>
         public override IDictionary<string, OrdRange> PrefixToOrdRange
         {
             get
@@ -117,14 +119,16 @@ namespace Lucene.Net.Facet.SortedSet
         }
 
         /// <summary>
-        /// Returns the <seealso cref="SortedSetDocValuesReaderState.OrdRange"/> for this dimension. </summary>
+        /// Returns the <see cref="SortedSetDocValuesReaderState.OrdRange"/> for this dimension.
+        /// </summary>
         public override OrdRange GetOrdRange(string dim)
         {
             return prefixToOrdRange[dim];
         }
 
         /// <summary>
-        /// Indexed field we are reading. </summary>
+        /// Indexed field we are reading.
+        /// </summary>
         public override string Field
         {
             get
@@ -142,7 +146,8 @@ namespace Lucene.Net.Facet.SortedSet
         }
 
         /// <summary>
-        /// Number of unique labels. </summary>
+        /// Number of unique labels.
+        /// </summary>
         public override int Count
         {
             get

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/44958102/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetCounts.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetCounts.cs b/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetCounts.cs
index e552d83..dc5dba3 100644
--- a/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetCounts.cs
+++ b/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetCounts.cs
@@ -32,18 +32,18 @@ namespace Lucene.Net.Facet.SortedSet
 
     /// <summary>
     /// Compute facets counts from previously
-    ///  indexed <seealso cref="SortedSetDocValuesFacetField"/>,
-    ///  without require a separate taxonomy index.  Faceting is
-    ///  a bit slower (~25%), and there is added cost on every
-    ///  <seealso cref="IndexReader"/> open to create a new {@link
-    ///  SortedSetDocValuesReaderState}.  Furthermore, this does
-    ///  not support hierarchical facets; only flat (dimension +
-    ///  label) facets, but it uses quite a bit less RAM to do
-    ///  so.
+    /// indexed <see cref="SortedSetDocValuesFacetField"/>,
+    /// without require a separate taxonomy index.  Faceting is
+    /// a bit slower (~25%), and there is added cost on every
+    /// <see cref="IndexReader"/> open to create a new 
+    /// <see cref="SortedSetDocValuesReaderState"/>.  Furthermore, this does
+    /// not support hierarchical facets; only flat (dimension +
+    /// label) facets, but it uses quite a bit less RAM to do
+    /// so.
     /// 
-    ///  <para><b>NOTE</b>: this class should be instantiated and
-    ///  then used from a single thread, because it holds a
-    ///  thread-private instance of <seealso cref="SortedSetDocValues"/>.
+    /// <para><b>NOTE</b>: this class should be instantiated and
+    /// then used from a single thread, because it holds a
+    /// thread-private instance of <see cref="SortedSetDocValues"/>.
     /// 
     /// </para>
     /// <para><b>NOTE:</b>: tie-break is by unicode sort order
@@ -60,7 +60,7 @@ namespace Lucene.Net.Facet.SortedSet
 
         /// <summary>
         /// Sparse faceting: returns any dimension that had any
-        ///  hits, topCount labels per dimension. 
+        /// hits, topCount labels per dimension. 
         /// </summary>
         public SortedSetDocValuesFacetCounts(SortedSetDocValuesReaderState state, FacetsCollector hits)
         {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/44958102/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetField.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetField.cs b/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetField.cs
index ba1f360..3714679 100644
--- a/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetField.cs
+++ b/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetField.cs
@@ -21,13 +21,13 @@
     using FieldType = Lucene.Net.Documents.FieldType;
 
     /// <summary>
-    /// Add an instance of this to your Document for every facet
-    ///  label to be indexed via SortedSetDocValues. 
+    /// Add an instance of this to your <see cref="Documents.Document"/> for every facet
+    /// label to be indexed via <see cref="Index.SortedSetDocValues"/>. 
     /// </summary>
     public class SortedSetDocValuesFacetField : Field
     {
         /// <summary>
-        /// Indexed <seealso cref="FieldType"/>. </summary>
+        /// Indexed <see cref="FieldType"/>. </summary>
         public static readonly FieldType TYPE = new FieldType();
         static SortedSetDocValuesFacetField()
         {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/44958102/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesReaderState.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesReaderState.cs b/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesReaderState.cs
index 636d434..1584e4a 100644
--- a/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesReaderState.cs
+++ b/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesReaderState.cs
@@ -23,29 +23,29 @@ namespace Lucene.Net.Facet.SortedSet
     using SortedSetDocValues = Lucene.Net.Index.SortedSetDocValues;
 
     /// <summary>
-    /// Wraps a <seealso cref="IndexReader"/> and resolves ords
-    ///  using existing <seealso cref="SortedSetDocValues"/> APIs without a
-    ///  separate taxonomy index.  This only supports flat facets
-    ///  (dimension + label), and it makes faceting a bit
-    ///  slower, adds some cost at reopen time, but avoids
-    ///  managing the separate taxonomy index.  It also requires
-    ///  less RAM than the taxonomy index, as it manages the flat
-    ///  (2-level) hierarchy more efficiently.  In addition, the
-    ///  tie-break during faceting is now meaningful (in label
-    ///  sorted order).
+    /// Wraps a <see cref="IndexReader"/> and resolves ords
+    /// using existing <see cref="SortedSetDocValues"/> APIs without a
+    /// separate taxonomy index.  This only supports flat facets
+    /// (dimension + label), and it makes faceting a bit
+    /// slower, adds some cost at reopen time, but avoids
+    /// managing the separate taxonomy index.  It also requires
+    /// less RAM than the taxonomy index, as it manages the flat
+    /// (2-level) hierarchy more efficiently.  In addition, the
+    /// tie-break during faceting is now meaningful (in label
+    /// sorted order).
     /// 
-    ///  <para><b>NOTE</b>: creating an instance of this class is
-    ///  somewhat costly, as it computes per-segment ordinal maps,
-    ///  so you should create it once and re-use that one instance
-    ///  for a given <seealso cref="IndexReader"/>. 
+    /// <para><b>NOTE</b>: creating an instance of this class is
+    /// somewhat costly, as it computes per-segment ordinal maps,
+    /// so you should create it once and re-use that one instance
+    /// for a given <see cref="IndexReader"/>. 
     /// </para>
     /// </summary>
     public abstract class SortedSetDocValuesReaderState
     {
         /// <summary>
         /// Holds start/end range of ords, which maps to one
-        ///  dimension (someday we may generalize it to map to
-        ///  hierarchies within one dimension). 
+        /// dimension (someday we may generalize it to map to
+        /// hierarchies within one dimension). 
         /// </summary>
         public sealed class OrdRange
         {
@@ -80,11 +80,11 @@ namespace Lucene.Net.Facet.SortedSet
         public abstract string Field { get; }
 
         /// <summary>
-        /// Returns the <seealso cref="OrdRange"/> for this dimension. </summary>
+        /// Returns the <see cref="OrdRange"/> for this dimension. </summary>
         public abstract OrdRange GetOrdRange(string dim);
 
         /// <summary>
-        /// Returns mapping from prefix to <seealso cref="OrdRange"/>. </summary>
+        /// Returns mapping from prefix to <see cref="OrdRange"/>. </summary>
         public abstract IDictionary<string, OrdRange> PrefixToOrdRange { get; }
 
         /// <summary>

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/44958102/src/Lucene.Net.Facet/Taxonomy/AssociationFacetField.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/AssociationFacetField.cs b/src/Lucene.Net.Facet/Taxonomy/AssociationFacetField.cs
index f2e70a7..c9fd7af 100644
--- a/src/Lucene.Net.Facet/Taxonomy/AssociationFacetField.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/AssociationFacetField.cs
@@ -25,20 +25,20 @@ namespace Lucene.Net.Facet.Taxonomy
     using FieldType = Lucene.Net.Documents.FieldType;
 
     /// <summary>
-    /// Add an instance of this to your <seealso cref="Document"/> to add
-    ///  a facet label associated with an arbitrary byte[].
-    ///  This will require a custom <seealso cref="Facets"/>
-    ///  implementation at search time; see {@link
-    ///  IntAssociationFacetField} and {@link
-    ///  FloatAssociationFacetField} to use existing {@link
-    ///  Facets} implementations.
+    /// Add an instance of this to your <see cref="Document"/> to add
+    /// a facet label associated with an arbitrary <see cref="byte[]"/>.
+    /// This will require a custom <see cref="Facets"/>
+    /// implementation at search time; see <see cref="IntAssociationFacetField"/> 
+    /// and <see cref="FloatAssociationFacetField"/> to use existing 
+    /// <see cref="Facets"/> implementations.
     /// 
     ///  @lucene.experimental 
     /// </summary>
     public class AssociationFacetField : Field
     {
         /// <summary>
-        /// Indexed <seealso cref="FieldType"/>. </summary>
+        /// Indexed <see cref="FieldType"/>.
+        /// </summary>
         public static readonly FieldType TYPE = new FieldType();
         static AssociationFacetField()
         {
@@ -47,20 +47,23 @@ namespace Lucene.Net.Facet.Taxonomy
         }
 
         /// <summary>
-        /// Dimension for this field. </summary>
+        /// Dimension for this field.
+        /// </summary>
         public string Dim { get; private set; }
 
         /// <summary>
-        /// Facet path for this field. </summary>
+        /// Facet path for this field.
+        /// </summary>
         public string[] Path { get; private set; }
 
         /// <summary>
-        /// Associated value. </summary>
+        /// Associated value.
+        /// </summary>
         public BytesRef Assoc { get; private set; }
 
         /// <summary>
-        /// Creates this from {@code dim} and {@code path} and an
-        ///  association 
+        /// Creates this from <paramref name="dim"/> and <paramref name="path"/> and an
+        /// association 
         /// </summary>
         public AssociationFacetField(BytesRef assoc, string dim, params string[] path)
             : base("dummy", TYPE)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/44958102/src/Lucene.Net.Facet/Taxonomy/CachedOrdinalsReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/CachedOrdinalsReader.cs b/src/Lucene.Net.Facet/Taxonomy/CachedOrdinalsReader.cs
index 62ee95f..6cb16de 100644
--- a/src/Lucene.Net.Facet/Taxonomy/CachedOrdinalsReader.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/CachedOrdinalsReader.cs
@@ -32,13 +32,13 @@ namespace Lucene.Net.Facet.Taxonomy
 
     /// <summary>
     /// A per-segment cache of documents' facet ordinals. Every
-    /// <seealso cref="CachedOrds"/> holds the ordinals in a raw {@code
-    /// int[]}, and therefore consumes as much RAM as the total
+    /// <see cref="CachedOrds"/> holds the ordinals in a raw <see cref="int[]"/>, 
+    /// and therefore consumes as much RAM as the total
     /// number of ordinals found in the segment, but saves the
     /// CPU cost of decoding ordinals during facet counting.
     /// 
     /// <para>
-    /// <b>NOTE:</b> every <seealso cref="CachedOrds"/> is limited to 2.1B
+    /// <b>NOTE:</b> every <see cref="CachedOrds"/> is limited to 2.1B
     /// total ordinals. If that is a limitation for you then
     /// consider limiting the segment size to fewer documents, or
     /// use an alternative cache which pages through the category
@@ -47,7 +47,7 @@ namespace Lucene.Net.Facet.Taxonomy
     /// </para>
     /// <para>
     /// <b>NOTE:</b> when using this cache, it is advised to use
-    /// a <seealso cref="DocValuesFormat"/> that does not cache the data in
+    /// a <see cref="DocValuesFormat"/> that does not cache the data in
     /// memory, at least for the category lists fields, or
     /// otherwise you'll be doing double-caching.
     /// 
@@ -122,20 +122,23 @@ namespace Lucene.Net.Facet.Taxonomy
         }
 
         /// <summary>
-        /// Holds the cached ordinals in two parallel {@code int[]} arrays. </summary>
+        /// Holds the cached ordinals in two parallel <see cref="int[]"/> arrays.
+        /// </summary>
         public sealed class CachedOrds : Accountable
         {
             /// <summary>
-            /// Index into <seealso cref="#ordinals"/> for each document. </summary>
+            /// Index into <see cref="Ordinals"/> for each document.
+            /// </summary>
             public int[] Offsets { get; private set; }
 
             /// <summary>
-            /// Holds ords for all docs. </summary>
+            /// Holds ords for all docs.
+            /// </summary>
             public int[] Ordinals { get; private set; }
 
             /// <summary>
-            /// Creates a new <seealso cref="CachedOrds"/> from the <seealso cref="BinaryDocValues"/>.
-            /// Assumes that the <seealso cref="BinaryDocValues"/> is not {@code null}.
+            /// Creates a new <see cref="CachedOrds"/> from the <see cref="BinaryDocValues"/>.
+            /// Assumes that the <see cref="BinaryDocValues"/> is not <c>null</c>.
             /// </summary>
             public CachedOrds(OrdinalsSegmentReader source, int maxDoc)
             {


[13/46] lucenenet git commit: Facet: Normalized code formatting, license headers, and usings.

Posted by sy...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ae225b9e/src/Lucene.Net.Facet/Taxonomy/PrintTaxonomyStats.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/PrintTaxonomyStats.cs b/src/Lucene.Net.Facet/Taxonomy/PrintTaxonomyStats.cs
index a4b44aa..e4b723b 100644
--- a/src/Lucene.Net.Facet/Taxonomy/PrintTaxonomyStats.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/PrintTaxonomyStats.cs
@@ -3,7 +3,6 @@ using System.IO;
 
 namespace Lucene.Net.Facet.Taxonomy
 {
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -21,10 +20,9 @@ namespace Lucene.Net.Facet.Taxonomy
      * limitations under the License.
      */
 
-
     using ChildrenIterator = Lucene.Net.Facet.Taxonomy.TaxonomyReader.ChildrenIterator;
-    using DirectoryTaxonomyReader = Lucene.Net.Facet.Taxonomy.Directory.DirectoryTaxonomyReader;
     using Directory = Lucene.Net.Store.Directory;
+    using DirectoryTaxonomyReader = Lucene.Net.Facet.Taxonomy.Directory.DirectoryTaxonomyReader;
     using FSDirectory = Lucene.Net.Store.FSDirectory;
 
     /// <summary>
@@ -33,7 +31,6 @@ namespace Lucene.Net.Facet.Taxonomy
     // java -cp ../build/core/classes/java:../build/facet/classes/java org.apache.lucene.facet.util.PrintTaxonomyStats -printTree /s2/scratch/indices/wikibig.trunk.noparents.facets.Lucene41.nd1M/facets
     public class PrintTaxonomyStats
     {
-
         /// <summary>
         /// Sole constructor. </summary>
         public PrintTaxonomyStats()
@@ -117,5 +114,4 @@ namespace Lucene.Net.Facet.Taxonomy
             }
         }
     }
-
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ae225b9e/src/Lucene.Net.Facet/Taxonomy/SearcherTaxonomyManager.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/SearcherTaxonomyManager.cs b/src/Lucene.Net.Facet/Taxonomy/SearcherTaxonomyManager.cs
index 7abc055..6686188 100644
--- a/src/Lucene.Net.Facet/Taxonomy/SearcherTaxonomyManager.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/SearcherTaxonomyManager.cs
@@ -1,9 +1,8 @@
-\ufeffusing System.Threading;
-using Lucene.Net.Search;
+\ufeffusing Lucene.Net.Search;
+using System;
 
 namespace Lucene.Net.Facet.Taxonomy
 {
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -21,18 +20,16 @@ namespace Lucene.Net.Facet.Taxonomy
      * limitations under the License.
      */
 
+    using Directory = Lucene.Net.Store.Directory;
+    using DirectoryReader = Lucene.Net.Index.DirectoryReader;
     using DirectoryTaxonomyReader = Lucene.Net.Facet.Taxonomy.Directory.DirectoryTaxonomyReader;
     using DirectoryTaxonomyWriter = Lucene.Net.Facet.Taxonomy.Directory.DirectoryTaxonomyWriter;
-    using DirectoryReader = Lucene.Net.Index.DirectoryReader;
     using IndexReader = Lucene.Net.Index.IndexReader;
-    using IndexWriter = Lucene.Net.Index.IndexWriter;
     using IndexSearcher = Lucene.Net.Search.IndexSearcher;
-    using Lucene.Net.Search;
+    using IndexWriter = Lucene.Net.Index.IndexWriter;
+    using IOUtils = Lucene.Net.Util.IOUtils;
     using SearcherFactory = Lucene.Net.Search.SearcherFactory;
     using SearcherManager = Lucene.Net.Search.SearcherManager;
-    using Directory = Lucene.Net.Store.Directory;
-    using IOUtils = Lucene.Net.Util.IOUtils;
-    using System;
 
     /// <summary>
     /// Manages near-real-time reopen of both an IndexSearcher
@@ -45,7 +42,6 @@ namespace Lucene.Net.Facet.Taxonomy
     /// </summary>
     public class SearcherTaxonomyManager : ReferenceManager<SearcherTaxonomyManager.SearcherAndTaxonomy>
     {
-
         /// <summary>
         /// Holds a matched pair of <seealso cref="IndexSearcher"/> and
         ///  <seealso cref="TaxonomyReader"/> 
@@ -77,7 +73,8 @@ namespace Lucene.Net.Facet.Taxonomy
         /// Creates near-real-time searcher and taxonomy reader
         ///  from the corresponding writers. 
         /// </summary>
-        public SearcherTaxonomyManager(IndexWriter writer, bool applyAllDeletes, SearcherFactory searcherFactory, DirectoryTaxonomyWriter taxoWriter)
+        public SearcherTaxonomyManager(IndexWriter writer, bool applyAllDeletes, 
+            SearcherFactory searcherFactory, DirectoryTaxonomyWriter taxoWriter)
         {
             if (searcherFactory == null)
             {
@@ -86,7 +83,8 @@ namespace Lucene.Net.Facet.Taxonomy
             this.searcherFactory = searcherFactory;
             this.taxoWriter = taxoWriter;
             var taxoReader = new DirectoryTaxonomyReader(taxoWriter);
-            Current = new SearcherAndTaxonomy(SearcherManager.GetSearcher(searcherFactory, DirectoryReader.Open(writer, applyAllDeletes)), taxoReader);
+            Current = new SearcherAndTaxonomy(SearcherManager.GetSearcher(
+                searcherFactory, DirectoryReader.Open(writer, applyAllDeletes)), taxoReader);
             this.taxoEpoch = taxoWriter.TaxonomyEpoch;
         }
 
@@ -107,7 +105,8 @@ namespace Lucene.Net.Facet.Taxonomy
             }
             this.searcherFactory = searcherFactory;
             var taxoReader = new DirectoryTaxonomyReader(taxoDir);
-            Current = new SearcherAndTaxonomy(SearcherManager.GetSearcher(searcherFactory, DirectoryReader.Open(indexDir)), taxoReader);
+            Current = new SearcherAndTaxonomy(SearcherManager.GetSearcher(
+                searcherFactory, DirectoryReader.Open(indexDir)), taxoReader);
             this.taxoWriter = null;
             taxoEpoch = -1;
         }
@@ -176,5 +175,4 @@ namespace Lucene.Net.Facet.Taxonomy
             return reference.searcher.IndexReader.RefCount;
         }
     }
-
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ae225b9e/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetCounts.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetCounts.cs b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetCounts.cs
index 49be839..af008c7 100644
--- a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetCounts.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetCounts.cs
@@ -1,9 +1,7 @@
 \ufeffusing System.Collections.Generic;
-using Lucene.Net.Facet;
 
 namespace Lucene.Net.Facet.Taxonomy
 {
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -21,11 +19,10 @@ namespace Lucene.Net.Facet.Taxonomy
      * limitations under the License.
      */
 
-
-    using MatchingDocs = FacetsCollector.MatchingDocs;
     using BinaryDocValues = Lucene.Net.Index.BinaryDocValues;
     using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
     using IntsRef = Lucene.Net.Util.IntsRef;
+    using MatchingDocs = FacetsCollector.MatchingDocs;
 
     /// <summary>
     /// Reads from any <seealso cref="OrdinalsReader"/>; use {@link
@@ -73,5 +70,4 @@ namespace Lucene.Net.Facet.Taxonomy
             Rollup();
         }
     }
-
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ae225b9e/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumFloatAssociations.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumFloatAssociations.cs b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumFloatAssociations.cs
index 7d5188e..821c942 100644
--- a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumFloatAssociations.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumFloatAssociations.cs
@@ -1,11 +1,8 @@
-\ufeffusing System.Collections.Generic;
-using Lucene.Net.Facet;
-using Lucene.Net.Search;
-using Lucene.Net.Support;
+\ufeffusing Lucene.Net.Support;
+using System.Collections.Generic;
 
 namespace Lucene.Net.Facet.Taxonomy
 {
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -23,11 +20,10 @@ namespace Lucene.Net.Facet.Taxonomy
      * limitations under the License.
      */
 
-
-    using MatchingDocs = FacetsCollector.MatchingDocs;
     using BinaryDocValues = Lucene.Net.Index.BinaryDocValues;
-    using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
     using BytesRef = Lucene.Net.Util.BytesRef;
+    using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
+    using MatchingDocs = FacetsCollector.MatchingDocs;
 
     /// <summary>
     /// Aggregates sum of int values previously indexed with
@@ -38,7 +34,6 @@ namespace Lucene.Net.Facet.Taxonomy
     /// </summary>
     public class TaxonomyFacetSumFloatAssociations : FloatTaxonomyFacets
     {
-
         /// <summary>
         /// Create {@code TaxonomyFacetSumFloatAssociations} against
         ///  the default index field. 
@@ -84,9 +79,11 @@ namespace Lucene.Net.Facet.Taxonomy
                     int offset = bytesRef.Offset;
                     while (offset < end)
                     {
-                        int ord = ((bytes[offset] & 0xFF) << 24) | ((bytes[offset + 1] & 0xFF) << 16) | ((bytes[offset + 2] & 0xFF) << 8) | (bytes[offset + 3] & 0xFF);
+                        int ord = ((bytes[offset] & 0xFF) << 24) | ((bytes[offset + 1] & 0xFF) << 16) | 
+                            ((bytes[offset + 2] & 0xFF) << 8) | (bytes[offset + 3] & 0xFF);
                         offset += 4;
-                        int value = ((bytes[offset] & 0xFF) << 24) | ((bytes[offset + 1] & 0xFF) << 16) | ((bytes[offset + 2] & 0xFF) << 8) | (bytes[offset + 3] & 0xFF);
+                        int value = ((bytes[offset] & 0xFF) << 24) | ((bytes[offset + 1] & 0xFF) << 16) | 
+                            ((bytes[offset + 2] & 0xFF) << 8) | (bytes[offset + 3] & 0xFF);
                         offset += 4;
                         values[ord] += Number.IntBitsToFloat(value);
                     }
@@ -94,5 +91,4 @@ namespace Lucene.Net.Facet.Taxonomy
             }
         }
     }
-
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ae225b9e/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumIntAssociations.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumIntAssociations.cs b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumIntAssociations.cs
index 9942be6..8d835fb 100644
--- a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumIntAssociations.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumIntAssociations.cs
@@ -1,9 +1,7 @@
 \ufeffusing System.Collections.Generic;
-using Lucene.Net.Facet;
 
 namespace Lucene.Net.Facet.Taxonomy
 {
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -21,11 +19,10 @@ namespace Lucene.Net.Facet.Taxonomy
      * limitations under the License.
      */
 
-
-    using MatchingDocs = FacetsCollector.MatchingDocs;
     using BinaryDocValues = Lucene.Net.Index.BinaryDocValues;
-    using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
     using BytesRef = Lucene.Net.Util.BytesRef;
+    using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
+    using MatchingDocs = FacetsCollector.MatchingDocs;
 
     /// <summary>
     /// Aggregates sum of int values previously indexed with
@@ -36,7 +33,6 @@ namespace Lucene.Net.Facet.Taxonomy
     /// </summary>
     public class TaxonomyFacetSumIntAssociations : IntTaxonomyFacets
     {
-
         /// <summary>
         /// Create {@code TaxonomyFacetSumIntAssociations} against
         ///  the default index field. 
@@ -82,9 +78,11 @@ namespace Lucene.Net.Facet.Taxonomy
                     int offset = bytesRef.Offset;
                     while (offset < end)
                     {
-                        int ord = ((bytes[offset] & 0xFF) << 24) | ((bytes[offset + 1] & 0xFF) << 16) | ((bytes[offset + 2] & 0xFF) << 8) | (bytes[offset + 3] & 0xFF);
+                        int ord = ((bytes[offset] & 0xFF) << 24) | ((bytes[offset + 1] & 0xFF) << 16) | 
+                            ((bytes[offset + 2] & 0xFF) << 8) | (bytes[offset + 3] & 0xFF);
                         offset += 4;
-                        int value = ((bytes[offset] & 0xFF) << 24) | ((bytes[offset + 1] & 0xFF) << 16) | ((bytes[offset + 2] & 0xFF) << 8) | (bytes[offset + 3] & 0xFF);
+                        int value = ((bytes[offset] & 0xFF) << 24) | ((bytes[offset + 1] & 0xFF) << 16) | 
+                            ((bytes[offset + 2] & 0xFF) << 8) | (bytes[offset + 3] & 0xFF);
                         offset += 4;
                         values[ord] += value;
                     }
@@ -92,5 +90,4 @@ namespace Lucene.Net.Facet.Taxonomy
             }
         }
     }
-
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ae225b9e/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumValueSource.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumValueSource.cs b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumValueSource.cs
index 4d4fc76..4981f33 100644
--- a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumValueSource.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumValueSource.cs
@@ -1,13 +1,10 @@
 \ufeffusing System;
 using System.Collections;
 using System.Collections.Generic;
-using System.IO;
 using System.Threading;
-using Lucene.Net.Facet;
 
 namespace Lucene.Net.Facet.Taxonomy
 {
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -25,16 +22,15 @@ namespace Lucene.Net.Facet.Taxonomy
      * limitations under the License.
      */
 
-
-    using MatchingDocs = FacetsCollector.MatchingDocs;
     using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext;
-    using FunctionValues = Lucene.Net.Queries.Function.FunctionValues;
-    using ValueSource = Lucene.Net.Queries.Function.ValueSource;
-    using DoubleDocValues = Lucene.Net.Queries.Function.DocValues.DoubleDocValues;
     using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
+    using DoubleDocValues = Lucene.Net.Queries.Function.DocValues.DoubleDocValues;
+    using FunctionValues = Lucene.Net.Queries.Function.FunctionValues;
+    using IntsRef = Lucene.Net.Util.IntsRef;
+    using MatchingDocs = FacetsCollector.MatchingDocs;
     using Scorer = Lucene.Net.Search.Scorer;
+    using ValueSource = Lucene.Net.Queries.Function.ValueSource;
     using Weight = Lucene.Net.Search.Weight;
-    using IntsRef = Lucene.Net.Util.IntsRef;
 
     /// <summary>
     /// Aggregates sum of values from {@link
@@ -53,8 +49,10 @@ namespace Lucene.Net.Facet.Taxonomy
         ///  facet field {@link
         ///  FacetsConfig#DEFAULT_INDEX_FIELD_NAME}. 
         /// </summary>
-        public TaxonomyFacetSumValueSource(TaxonomyReader taxoReader, FacetsConfig config, FacetsCollector fc, ValueSource valueSource)
-            : this(new DocValuesOrdinalsReader(FacetsConfig.DEFAULT_INDEX_FIELD_NAME), taxoReader, config, fc, valueSource)
+        public TaxonomyFacetSumValueSource(TaxonomyReader taxoReader, FacetsConfig config,
+            FacetsCollector fc, ValueSource valueSource)
+            : this(new DocValuesOrdinalsReader(FacetsConfig.DEFAULT_INDEX_FIELD_NAME),
+                  taxoReader, config, fc, valueSource)
         {
         }
 
@@ -63,7 +61,8 @@ namespace Lucene.Net.Facet.Taxonomy
         ///  <seealso cref="ValueSource"/>, and pulls ordinals from the
         ///  provided <seealso cref="OrdinalsReader"/>. 
         /// </summary>
-        public TaxonomyFacetSumValueSource(OrdinalsReader ordinalsReader, TaxonomyReader taxoReader, FacetsConfig config, FacetsCollector fc, ValueSource valueSource)
+        public TaxonomyFacetSumValueSource(OrdinalsReader ordinalsReader, TaxonomyReader taxoReader,
+            FacetsConfig config, FacetsCollector fc, ValueSource valueSource)
             : base(ordinalsReader.IndexFieldName, taxoReader, config)
         {
             this.ordinalsReader = ordinalsReader;
@@ -193,7 +192,6 @@ namespace Lucene.Net.Facet.Taxonomy
                 {
                     this.outerInstance = outerInstance;
                     this.scorer = scorer;
-
                 }
 
                 public override double DoubleVal(int document)
@@ -214,7 +212,7 @@ namespace Lucene.Net.Facet.Taxonomy
                 if (ReferenceEquals(null, o)) return false;
                 if (ReferenceEquals(this, o)) return true;
                 if (o.GetType() != this.GetType()) return false;
-                return Equals((ScoreValueSource) o);
+                return Equals((ScoreValueSource)o);
             }
 
             protected bool Equals(ScoreValueSource other)
@@ -229,19 +227,13 @@ namespace Lucene.Net.Facet.Taxonomy
 
             private static readonly int hcode = typeof(DoubleDocValuesAnonymousInnerClassHelper).GetHashCode();
 
-            
-
-
             public override string Description
             {
                 get
                 {
                     return "score()";
                 }
-
             }
         }
-
     }
-
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ae225b9e/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacets.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacets.cs b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacets.cs
index 355ff6c..03baa55 100644
--- a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacets.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacets.cs
@@ -1,11 +1,9 @@
 \ufeffusing System;
 using System.Collections.Generic;
 using System.Linq;
-using Lucene.Net.Facet;
 
 namespace Lucene.Net.Facet.Taxonomy
 {
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -23,14 +21,12 @@ namespace Lucene.Net.Facet.Taxonomy
      * limitations under the License.
      */
 
-
     using DimConfig = Lucene.Net.Facet.FacetsConfig.DimConfig; // javadocs
 
     /// <summary>
     /// Base class for all taxonomy-based facets impls. </summary>
     public abstract class TaxonomyFacets : Facets
     {
-
         private static readonly IComparer<FacetResult> BY_VALUE_THEN_DIM = new ComparatorAnonymousInnerClassHelper();
 
         private class ComparatorAnonymousInnerClassHelper : IComparer<FacetResult>
@@ -132,6 +128,5 @@ namespace Lucene.Net.Facet.Taxonomy
             Array.Sort(resultArray, BY_VALUE_THEN_DIM);
             return resultArray;
         }
-
     }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ae225b9e/src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs b/src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs
index f5cfc12..f303abb 100644
--- a/src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs
@@ -1,15 +1,11 @@
-\ufeffusing System;
-using System.Diagnostics;
+\ufeffusing Lucene.Net.Support;
+using System;
 using System.Collections.Generic;
+using System.Diagnostics;
 using System.Threading;
-using Lucene.Net.Support;
 
 namespace Lucene.Net.Facet.Taxonomy
 {
-
-
-    using AlreadyClosedException = Lucene.Net.Store.AlreadyClosedException;
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -27,6 +23,8 @@ namespace Lucene.Net.Facet.Taxonomy
      * limitations under the License.
      */
 
+    using AlreadyClosedException = Lucene.Net.Store.AlreadyClosedException;
+
     /// <summary>
     /// TaxonomyReader is the read-only interface with which the faceted-search
     /// library uses the taxonomy during search time.
@@ -68,12 +66,10 @@ namespace Lucene.Net.Facet.Taxonomy
     /// </summary>
     public abstract class TaxonomyReader
     {
-
         /// <summary>
         /// An iterator over a category's children. </summary>
         public class ChildrenIterator
         {
-
             internal readonly int[] siblings;
             internal int child;
 
@@ -96,7 +92,6 @@ namespace Lucene.Net.Facet.Taxonomy
                 }
                 return res;
             }
-
         }
 
         /// <summary>
@@ -310,8 +305,5 @@ namespace Lucene.Net.Facet.Taxonomy
             }
             return false;
         }
-
-
     }
-
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ae225b9e/src/Lucene.Net.Facet/Taxonomy/TaxonomyWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/TaxonomyWriter.cs b/src/Lucene.Net.Facet/Taxonomy/TaxonomyWriter.cs
index 26a3e10..0487cbf 100644
--- a/src/Lucene.Net.Facet/Taxonomy/TaxonomyWriter.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/TaxonomyWriter.cs
@@ -1,13 +1,8 @@
 \ufeffusing System;
 using System.Collections.Generic;
-using Lucene.Net.Index;
 
 namespace Lucene.Net.Facet.Taxonomy
 {
-
-
-    using TwoPhaseCommit = Lucene.Net.Index.TwoPhaseCommit;
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -25,6 +20,8 @@ namespace Lucene.Net.Facet.Taxonomy
      * limitations under the License.
      */
 
+    using TwoPhaseCommit = Lucene.Net.Index.TwoPhaseCommit;
+
     /// <summary>
     /// TaxonomyWriter is the interface which the faceted-search library uses
     /// to dynamically build the taxonomy at indexing time.
@@ -56,7 +53,6 @@ namespace Lucene.Net.Facet.Taxonomy
     /// </summary>
     public interface TaxonomyWriter : IDisposable, TwoPhaseCommit
     {
-
         /// <summary>
         /// addCategory() adds a category with a given path name to the taxonomy,
         /// and returns its ordinal. If the category was already present in
@@ -118,8 +114,5 @@ namespace Lucene.Net.Facet.Taxonomy
         /// </para>
         /// </summary>
         IDictionary<string, string> CommitData { set; get; }
-
-
     }
-
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ae225b9e/src/Lucene.Net.Facet/Taxonomy/WriterCache/CategoryPathUtils.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CategoryPathUtils.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CategoryPathUtils.cs
index eb63f56..800fda0 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CategoryPathUtils.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CategoryPathUtils.cs
@@ -23,7 +23,6 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
     /// Utilities for use of <seealso cref="FacetLabel"/> by <seealso cref="CompactLabelToOrdinal"/>. </summary>
     internal class CategoryPathUtils
     {
-
         /// <summary>
         /// Serializes the given <seealso cref="FacetLabel"/> to the <seealso cref="CharBlockArray"/>. </summary>
         public static void Serialize(FacetLabel cp, CharBlockArray charBlockArray)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ae225b9e/src/Lucene.Net.Facet/Taxonomy/WriterCache/CharBlockArray.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CharBlockArray.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CharBlockArray.cs
index c1c9825..691ed41 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CharBlockArray.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CharBlockArray.cs
@@ -1,9 +1,8 @@
-\ufeffusing System;
+\ufeffusing Lucene.Net.Support;
+using System;
 using System.Collections.Generic;
 using System.IO;
 using System.Text;
-using Lucene.Net.Store;
-using Lucene.Net.Support;
 
 namespace Lucene.Net.Facet.Taxonomy.WriterCache
 {
@@ -33,7 +32,6 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
     [Serializable]
     public class CharBlockArray : ICharSequence
     {
-
         private const long serialVersionUID = 1L;
 
         private const int DefaultBlockSize = 32 * 1024; // 32 KB default size

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ae225b9e/src/Lucene.Net.Facet/Taxonomy/WriterCache/Cl2oTaxonomyWriterCache.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/Cl2oTaxonomyWriterCache.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/Cl2oTaxonomyWriterCache.cs
index 1b8f0c3..8e6ca98 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/Cl2oTaxonomyWriterCache.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/Cl2oTaxonomyWriterCache.cs
@@ -2,9 +2,6 @@
 
 namespace Lucene.Net.Facet.Taxonomy.WriterCache
 {
-
-
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -117,7 +114,5 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
                 return cache == null ? 0 : cache.MemoryUsage;
             }
         }
-
     }
-
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ae225b9e/src/Lucene.Net.Facet/Taxonomy/WriterCache/CollisionMap.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CollisionMap.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CollisionMap.cs
index c315ec5..9e8c1ad 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CollisionMap.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CollisionMap.cs
@@ -2,11 +2,8 @@
 using System.Collections;
 using System.Collections.Generic;
 
-
 namespace Lucene.Net.Facet.Taxonomy.WriterCache
 {
-
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -32,7 +29,6 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
     /// </summary>
     public class CollisionMap
     {
-
         private int capacity_Renamed;
         private float loadFactor;
         private int size_Renamed;
@@ -261,7 +257,7 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
                 Entry e = this.next_Renamed;
                 if (e == null)
                 {
-					throw new InvalidOperationException(this.GetType() + " cannot get next entry");;
+                    throw new InvalidOperationException(this.GetType() + " cannot get next entry"); ;
                 }
 
                 Entry n = e.next;
@@ -305,7 +301,5 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
                 get { return Current; }
             }
         }
-
     }
-
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ae225b9e/src/Lucene.Net.Facet/Taxonomy/WriterCache/CompactLabelToOrdinal.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CompactLabelToOrdinal.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CompactLabelToOrdinal.cs
index d0cebe2..28f93c4 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CompactLabelToOrdinal.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CompactLabelToOrdinal.cs
@@ -50,7 +50,6 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
     /// </summary>
     public class CompactLabelToOrdinal : LabelToOrdinal
     {
-
         /// <summary>
         /// Default maximum load factor. </summary>
         public const float DefaultLoadFactor = 0.15f;
@@ -81,7 +80,6 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
         /// Sole constructor. </summary>
         public CompactLabelToOrdinal(int initialCapacity, float loadFactor, int numHashArrays)
         {
-
             this.hashArrays = new HashArray[numHashArrays];
 
             this.capacity = DetermineCapacity((int)Math.Pow(2, numHashArrays), initialCapacity);

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ae225b9e/src/Lucene.Net.Facet/Taxonomy/WriterCache/LabelToOrdinal.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/LabelToOrdinal.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/LabelToOrdinal.cs
index 495aa9b..600ecbb 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/LabelToOrdinal.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/LabelToOrdinal.cs
@@ -1,7 +1,6 @@
 \ufeffnamespace Lucene.Net.Facet.Taxonomy.WriterCache
 {
-
-	/*
+    /*
 	 * Licensed to the Apache Software Foundation (ASF) under one or more
 	 * contributor license agreements.  See the NOTICE file distributed with
 	 * this work for additional information regarding copyright ownership.
@@ -18,66 +17,63 @@
 	 * limitations under the License.
 	 */
 
-	/// <summary>
-	/// Abstract class for storing Label->Ordinal mappings in a taxonomy. 
-	/// 
-	/// @lucene.experimental
-	/// </summary>
-	public abstract class LabelToOrdinal
-	{
-
-	  /// <summary>
-	  /// How many ordinals we've seen. </summary>
-	  protected internal int counter;
-
-	  /// <summary>
-	  /// Returned by <seealso cref="#getOrdinal"/> when the label isn't
-	  ///  recognized. 
-	  /// </summary>
-	  public const int INVALID_ORDINAL = -2;
-
-	  /// <summary>
-	  /// Default constructor. </summary>
-	  public LabelToOrdinal()
-	  {
-	  }
+    /// <summary>
+    /// Abstract class for storing Label->Ordinal mappings in a taxonomy. 
+    /// 
+    /// @lucene.experimental
+    /// </summary>
+    public abstract class LabelToOrdinal
+    {
+        /// <summary>
+        /// How many ordinals we've seen. </summary>
+        protected internal int counter;
 
-	  /// <summary>
-	  /// return the maximal Ordinal assigned so far
-	  /// </summary>
-	  public virtual int MaxOrdinal
-	  {
-		  get
-		  {
-			return this.counter;
-		  }
-	  }
+        /// <summary>
+        /// Returned by <seealso cref="#getOrdinal"/> when the label isn't
+        ///  recognized. 
+        /// </summary>
+        public const int INVALID_ORDINAL = -2;
 
-	  /// <summary>
-	  /// Returns the next unassigned ordinal. The default behavior of this method
-	  /// is to simply increment a counter.
-	  /// </summary>
-	  public virtual int NextOrdinal
-	  {
-		  get
-		  {
-			return this.counter++;
-		  }
-	  }
+        /// <summary>
+        /// Default constructor. </summary>
+        public LabelToOrdinal()
+        {
+        }
 
-	  /// <summary>
-	  /// Adds a new label if its not yet in the table.
-	  /// Throws an <seealso cref="IllegalArgumentException"/> if the same label with
-	  /// a different ordinal was previoulsy added to this table.
-	  /// </summary>
-	  public abstract void AddLabel(FacetLabel label, int ordinal);
+        /// <summary>
+        /// return the maximal Ordinal assigned so far
+        /// </summary>
+        public virtual int MaxOrdinal
+        {
+            get
+            {
+                return this.counter;
+            }
+        }
 
-	  /// <summary>
-	  /// Returns the ordinal assigned to the given label, 
-	  /// or <seealso cref="#INVALID_ORDINAL"/> if the label cannot be found in this table.
-	  /// </summary>
-	  public abstract int GetOrdinal(FacetLabel label);
+        /// <summary>
+        /// Returns the next unassigned ordinal. The default behavior of this method
+        /// is to simply increment a counter.
+        /// </summary>
+        public virtual int NextOrdinal
+        {
+            get
+            {
+                return this.counter++;
+            }
+        }
 
-	}
+        /// <summary>
+        /// Adds a new label if its not yet in the table.
+        /// Throws an <seealso cref="IllegalArgumentException"/> if the same label with
+        /// a different ordinal was previoulsy added to this table.
+        /// </summary>
+        public abstract void AddLabel(FacetLabel label, int ordinal);
 
+        /// <summary>
+        /// Returns the ordinal assigned to the given label, 
+        /// or <seealso cref="#INVALID_ORDINAL"/> if the label cannot be found in this table.
+        /// </summary>
+        public abstract int GetOrdinal(FacetLabel label);
+    }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ae225b9e/src/Lucene.Net.Facet/Taxonomy/WriterCache/LruTaxonomyWriterCache.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/LruTaxonomyWriterCache.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/LruTaxonomyWriterCache.cs
index 74bf396..d74891b 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/LruTaxonomyWriterCache.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/LruTaxonomyWriterCache.cs
@@ -1,7 +1,5 @@
 \ufeffnamespace Lucene.Net.Facet.Taxonomy.WriterCache
 {
-
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -26,7 +24,6 @@
     /// </summary>
     public class LruTaxonomyWriterCache : TaxonomyWriterCache
     {
-
         /// <summary>
         /// Determines cache type.
         /// For guaranteed correctness - not relying on no-collisions in the hash
@@ -143,7 +140,5 @@
                 return ret;
             }
         }
-
     }
-
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ae225b9e/src/Lucene.Net.Facet/Taxonomy/WriterCache/NameHashIntCacheLRU.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/NameHashIntCacheLRU.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/NameHashIntCacheLRU.cs
index cf135ea..40d67f5 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/NameHashIntCacheLRU.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/NameHashIntCacheLRU.cs
@@ -1,7 +1,6 @@
 \ufeffnamespace Lucene.Net.Facet.Taxonomy.WriterCache
 {
-
-	/*
+    /*
 	 * Licensed to the Apache Software Foundation (ASF) under one or more
 	 * contributor license agreements.  See the NOTICE file distributed with
 	 * this work for additional information regarding copyright ownership.
@@ -18,32 +17,30 @@
 	 * limitations under the License.
 	 */
 
-	/// <summary>
-	/// An an LRU cache of mapping from name to int.
-	/// Used to cache Ordinals of category paths.
-	/// It uses as key, hash of the path instead of the path.
-	/// This way the cache takes less RAM, but correctness depends on
-	/// assuming no collisions. 
-	/// 
-	/// @lucene.experimental
-	/// </summary>
-	public class NameHashIntCacheLRU : NameIntCacheLRU
-	{
-
-	  internal NameHashIntCacheLRU(int maxCacheSize) : base(maxCacheSize)
-	  {
-	  }
-
-	  internal override object Key(FacetLabel name)
-	  {
-		return new long?(name.LongHashCode());
-	  }
-
-	  internal override object Key(FacetLabel name, int prefixLen)
-	  {
-		return new long?(name.Subpath(prefixLen).LongHashCode());
-	  }
+    /// <summary>
+    /// An an LRU cache of mapping from name to int.
+    /// Used to cache Ordinals of category paths.
+    /// It uses as key, hash of the path instead of the path.
+    /// This way the cache takes less RAM, but correctness depends on
+    /// assuming no collisions. 
+    /// 
+    /// @lucene.experimental
+    /// </summary>
+    public class NameHashIntCacheLRU : NameIntCacheLRU
+    {
+        internal NameHashIntCacheLRU(int maxCacheSize)
+            : base(maxCacheSize)
+        {
+        }
 
-	}
+        internal override object Key(FacetLabel name)
+        {
+            return new long?(name.LongHashCode());
+        }
 
+        internal override object Key(FacetLabel name, int prefixLen)
+        {
+            return new long?(name.Subpath(prefixLen).LongHashCode());
+        }
+    }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ae225b9e/src/Lucene.Net.Facet/Taxonomy/WriterCache/NameIntCacheLRU.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/NameIntCacheLRU.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/NameIntCacheLRU.cs
index a761aea..e5c81e5 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/NameIntCacheLRU.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/NameIntCacheLRU.cs
@@ -30,7 +30,6 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
     // synchronized so that no two methods of this class are called concurrently.
     public class NameIntCacheLRU
     {
-
         private Dictionary<object, int?> cache;
         internal long nMisses = 0; // for debug
         internal long nHits = 0; // for debug
@@ -63,16 +62,16 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
         }
 
         private void CreateCache(int maxSize)
-	  {
-        //if (maxSize < int.MaxValue)
-        //{
-        //    cache = new LRUHashMap<object,int?>(1000,true); //for LRU
-        //}
-        //else
-		{
-		  cache = new Dictionary<object, int?>(1000); //no need for LRU
-		}
-	  }
+        {
+            //if (maxSize < int.MaxValue)
+            //{
+            //    cache = new LRUHashMap<object,int?>(1000,true); //for LRU
+            //}
+            //else
+            {
+                cache = new Dictionary<object, int?>(1000); //no need for LRU
+            }
+        }
 
         internal virtual int? Get(FacetLabel name)
         {
@@ -163,7 +162,5 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
             }
             return true;
         }
-
     }
-
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ae225b9e/src/Lucene.Net.Facet/Taxonomy/WriterCache/TaxonomyWriterCache.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/TaxonomyWriterCache.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/TaxonomyWriterCache.cs
index a20dcd6..b0d32b9 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/TaxonomyWriterCache.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/TaxonomyWriterCache.cs
@@ -1,8 +1,5 @@
 \ufeffnamespace Lucene.Net.Facet.Taxonomy.WriterCache
 {
-
-    using DirectoryTaxonomyWriter = Lucene.Net.Facet.Taxonomy.Directory.DirectoryTaxonomyWriter;
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -20,6 +17,8 @@
      * limitations under the License.
      */
 
+    using DirectoryTaxonomyWriter = Lucene.Net.Facet.Taxonomy.Directory.DirectoryTaxonomyWriter;
+
     /// <summary>
     /// TaxonomyWriterCache is a relatively simple interface for a cache of
     /// category->ordinal mappings, used in TaxonomyWriter implementations (such as
@@ -49,7 +48,6 @@
     /// </summary>
     public interface TaxonomyWriterCache
     {
-
         /// <summary>
         /// Let go of whatever resources the cache is holding. After a close(),
         /// this object can no longer be used.
@@ -99,7 +97,5 @@
         /// assume that the cache is still operable after this method returns.
         /// </summary>
         void Clear();
-
     }
-
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ae225b9e/src/Lucene.Net.Facet/TopOrdAndFloatQueue.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/TopOrdAndFloatQueue.cs b/src/Lucene.Net.Facet/TopOrdAndFloatQueue.cs
index 6430624..4b4dad7 100644
--- a/src/Lucene.Net.Facet/TopOrdAndFloatQueue.cs
+++ b/src/Lucene.Net.Facet/TopOrdAndFloatQueue.cs
@@ -1,7 +1,8 @@
-\ufeffnamespace Lucene.Net.Facet
-{
+\ufeffusing Lucene.Net.Util;
 
-	/*
+namespace Lucene.Net.Facet
+{
+    /*
 	 * Licensed to the Apache Software Foundation (ASF) under one or more
 	 * contributor license agreements.  See the NOTICE file distributed with
 	 * this work for additional information regarding copyright ownership.
@@ -18,56 +19,51 @@
 	 * limitations under the License.
 	 */
 
-    using Lucene.Net.Util;
-
-	/// <summary>
-	/// Keeps highest results, first by largest float value,
-	///  then tie break by smallest ord. 
-	/// </summary>
-	public class TopOrdAndFloatQueue : PriorityQueue<TopOrdAndFloatQueue.OrdAndValue>
-	{
-
-	  /// <summary>
-	  /// Holds a single entry. </summary>
-	  public sealed class OrdAndValue
-	  {
-
-		/// <summary>
-		/// Ordinal of the entry. </summary>
-		public int ord;
-
-		/// <summary>
-		/// Value associated with the ordinal. </summary>
-		public float value;
+    /// <summary>
+    /// Keeps highest results, first by largest float value,
+    ///  then tie break by smallest ord. 
+    /// </summary>
+    public class TopOrdAndFloatQueue : PriorityQueue<TopOrdAndFloatQueue.OrdAndValue>
+    {
+        /// <summary>
+        /// Holds a single entry. </summary>
+        public sealed class OrdAndValue
+        {
+            /// <summary>
+            /// Ordinal of the entry. </summary>
+            public int ord;
 
-		/// <summary>
-		/// Default constructor. </summary>
-		public OrdAndValue()
-		{
-		}
-	  }
+            /// <summary>
+            /// Value associated with the ordinal. </summary>
+            public float value;
 
-	  /// <summary>
-	  /// Sole constructor. </summary>
-	  public TopOrdAndFloatQueue(int topN) : base(topN, false)
-	  {
-	  }
+            /// <summary>
+            /// Default constructor. </summary>
+            public OrdAndValue()
+            {
+            }
+        }
 
-	  public override bool LessThan(OrdAndValue a, OrdAndValue b)
-	  {
-		if (a.value < b.value)
-		{
-		  return true;
-		}
-		else if (a.value > b.value)
-		{
-		  return false;
-		}
-		else
-		{
-		  return a.ord > b.ord;
-		}
-	  }
-	}
+        /// <summary>
+        /// Sole constructor. </summary>
+        public TopOrdAndFloatQueue(int topN) : base(topN, false)
+        {
+        }
 
+        public override bool LessThan(OrdAndValue a, OrdAndValue b)
+        {
+            if (a.value < b.value)
+            {
+                return true;
+            }
+            else if (a.value > b.value)
+            {
+                return false;
+            }
+            else
+            {
+                return a.ord > b.ord;
+            }
+        }
+    }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ae225b9e/src/Lucene.Net.Facet/TopOrdAndIntQueue.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/TopOrdAndIntQueue.cs b/src/Lucene.Net.Facet/TopOrdAndIntQueue.cs
index 08bdffb..80246e5 100644
--- a/src/Lucene.Net.Facet/TopOrdAndIntQueue.cs
+++ b/src/Lucene.Net.Facet/TopOrdAndIntQueue.cs
@@ -2,7 +2,6 @@
 
 namespace Lucene.Net.Facet
 {
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -20,20 +19,16 @@ namespace Lucene.Net.Facet
      * limitations under the License.
      */
 
-    using Lucene.Net.Util;
-
     /// <summary>
     /// Keeps highest results, first by largest int value,
     ///  then tie break by smallest ord. 
     /// </summary>
     public class TopOrdAndIntQueue : PriorityQueue<TopOrdAndIntQueue.OrdAndValue>
     {
-
         /// <summary>
         /// Holds a single entry. </summary>
         public sealed class OrdAndValue
         {
-
             /// <summary>
             /// Ordinal of the entry. </summary>
             public int Ord;
@@ -72,5 +67,4 @@ namespace Lucene.Net.Facet
             }
         }
     }
-
 }
\ No newline at end of file


[46/46] lucenenet git commit: Fixed QueryParser project merge conflict.

Posted by sy...@apache.org.
Fixed QueryParser project merge conflict.


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/ddfb46c1
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/ddfb46c1
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/ddfb46c1

Branch: refs/heads/master
Commit: ddfb46c1024e57609f2c3df26ffd921ac95bb663
Parents: 36cde06
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Mon Oct 3 23:56:52 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Tue Oct 4 01:52:01 2016 +0700

----------------------------------------------------------------------
 Lucene.Net.sln | 25 +------------------------
 1 file changed, 1 insertion(+), 24 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ddfb46c1/Lucene.Net.sln
----------------------------------------------------------------------
diff --git a/Lucene.Net.sln b/Lucene.Net.sln
index 25ab1ab..0322498 100644
--- a/Lucene.Net.sln
+++ b/Lucene.Net.sln
@@ -51,6 +51,7 @@ EndProject
 Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Lucene.Net.QueryParser", "src\Lucene.Net.QueryParser\Lucene.Net.QueryParser.csproj", "{949BA34B-6AE6-4CE3-B578-61E13E4D76BF}"
 EndProject
 Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Lucene.Net.Tests.QueryParser", "src\Lucene.Net.Tests.QueryParser\Lucene.Net.Tests.QueryParser.csproj", "{5719FB4F-BF80-40E5-BACC-37E8E18FCA2E}"
+EndProject
 Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Lucene.Net.Misc", "src\Lucene.Net.Misc\Lucene.Net.Misc.csproj", "{A3A0D943-B91A-4B7A-9FCB-6160EA575D95}"
 EndProject
 Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Lucene.Net.Tests.Misc", "src\Lucene.Net.Tests.Misc\Lucene.Net.Tests.Misc.csproj", "{7895E023-EB91-401C-B2B3-754EEC42134B}"
@@ -59,10 +60,6 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Lucene.Net.Suggest", "src\L
 EndProject
 Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Lucene.Net.Tests.Suggest", "src\Lucene.Net.Tests.Suggest\Lucene.Net.Tests.Suggest.csproj", "{A6511598-3008-4A3B-AE68-2D1DA792CA8A}"
 EndProject
-Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Lucene.Net.QueryParser", "src\Lucene.Net.QueryParser\Lucene.Net.QueryParser.csproj", "{949BA34B-6AE6-4CE3-B578-61E13E4D76BF}"
-EndProject
-Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Lucene.Net.Tests.QueryParser", "src\Lucene.Net.Tests.QueryParser\Lucene.Net.Tests.QueryParser.csproj", "{5719FB4F-BF80-40E5-BACC-37E8E18FCA2E}"
-EndProject
 Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Lucene.Net.Memory", "src\Lucene.Net.Memory\Lucene.Net.Memory.csproj", "{42ECF239-AFC1-427D-921E-B5A277809CF0}"
 EndProject
 Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Lucene.Net.Tests.Memory", "src\Lucene.Net.Tests.Memory\Lucene.Net.Tests.Memory.csproj", "{7F9378BF-C88D-46FF-9AE8-5E7D8C0225D3}"
@@ -334,26 +331,6 @@ Global
 		{A6511598-3008-4A3B-AE68-2D1DA792CA8A}.Release|Mixed Platforms.Build.0 = Release|Any CPU
 		{A6511598-3008-4A3B-AE68-2D1DA792CA8A}.Release|x86.ActiveCfg = Release|Any CPU
 		{A6511598-3008-4A3B-AE68-2D1DA792CA8A}.Release|x86.Build.0 = Release|Any CPU
-		{949BA34B-6AE6-4CE3-B578-61E13E4D76BF}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
-		{949BA34B-6AE6-4CE3-B578-61E13E4D76BF}.Debug|Any CPU.Build.0 = Debug|Any CPU
-		{949BA34B-6AE6-4CE3-B578-61E13E4D76BF}.Debug|Mixed Platforms.ActiveCfg = Debug|Any CPU
-		{949BA34B-6AE6-4CE3-B578-61E13E4D76BF}.Debug|Mixed Platforms.Build.0 = Debug|Any CPU
-		{949BA34B-6AE6-4CE3-B578-61E13E4D76BF}.Debug|x86.ActiveCfg = Debug|Any CPU
-		{949BA34B-6AE6-4CE3-B578-61E13E4D76BF}.Release|Any CPU.ActiveCfg = Release|Any CPU
-		{949BA34B-6AE6-4CE3-B578-61E13E4D76BF}.Release|Any CPU.Build.0 = Release|Any CPU
-		{949BA34B-6AE6-4CE3-B578-61E13E4D76BF}.Release|Mixed Platforms.ActiveCfg = Release|Any CPU
-		{949BA34B-6AE6-4CE3-B578-61E13E4D76BF}.Release|Mixed Platforms.Build.0 = Release|Any CPU
-		{949BA34B-6AE6-4CE3-B578-61E13E4D76BF}.Release|x86.ActiveCfg = Release|Any CPU
-		{5719FB4F-BF80-40E5-BACC-37E8E18FCA2E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
-		{5719FB4F-BF80-40E5-BACC-37E8E18FCA2E}.Debug|Any CPU.Build.0 = Debug|Any CPU
-		{5719FB4F-BF80-40E5-BACC-37E8E18FCA2E}.Debug|Mixed Platforms.ActiveCfg = Debug|Any CPU
-		{5719FB4F-BF80-40E5-BACC-37E8E18FCA2E}.Debug|Mixed Platforms.Build.0 = Debug|Any CPU
-		{5719FB4F-BF80-40E5-BACC-37E8E18FCA2E}.Debug|x86.ActiveCfg = Debug|Any CPU
-		{5719FB4F-BF80-40E5-BACC-37E8E18FCA2E}.Release|Any CPU.ActiveCfg = Release|Any CPU
-		{5719FB4F-BF80-40E5-BACC-37E8E18FCA2E}.Release|Any CPU.Build.0 = Release|Any CPU
-		{5719FB4F-BF80-40E5-BACC-37E8E18FCA2E}.Release|Mixed Platforms.ActiveCfg = Release|Any CPU
-		{5719FB4F-BF80-40E5-BACC-37E8E18FCA2E}.Release|Mixed Platforms.Build.0 = Release|Any CPU
-		{5719FB4F-BF80-40E5-BACC-37E8E18FCA2E}.Release|x86.ActiveCfg = Release|Any CPU
 		{42ECF239-AFC1-427D-921E-B5A277809CF0}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
 		{42ECF239-AFC1-427D-921E-B5A277809CF0}.Debug|Any CPU.Build.0 = Debug|Any CPU
 		{42ECF239-AFC1-427D-921E-B5A277809CF0}.Debug|Mixed Platforms.ActiveCfg = Debug|Any CPU


[19/46] lucenenet git commit: .NETify Facet: Renamed method from entryIterator() to GetEnumerator()

Posted by sy...@apache.org.
.NETify Facet: Renamed method from entryIterator() to GetEnumerator()


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/e72d3cbf
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/e72d3cbf
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/e72d3cbf

Branch: refs/heads/master
Commit: e72d3cbf1fb8a1d2ee76b6d6c311960893daa456
Parents: e0a73b4
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Sep 25 10:43:36 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Mon Oct 3 23:31:16 2016 +0700

----------------------------------------------------------------------
 src/Lucene.Net.Facet/Taxonomy/WriterCache/CollisionMap.cs          | 2 +-
 src/Lucene.Net.Facet/Taxonomy/WriterCache/CompactLabelToOrdinal.cs | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e72d3cbf/src/Lucene.Net.Facet/Taxonomy/WriterCache/CollisionMap.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CollisionMap.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CollisionMap.cs
index 277a166..fd47cd9 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CollisionMap.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CollisionMap.cs
@@ -182,7 +182,7 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
             }
         }
 
-        internal virtual IEnumerator<CollisionMap.Entry> entryIterator()
+        internal virtual IEnumerator<CollisionMap.Entry> GetEnumerator()
         {
             return new EntryIterator(this, entries, size);
         }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e72d3cbf/src/Lucene.Net.Facet/Taxonomy/WriterCache/CompactLabelToOrdinal.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CompactLabelToOrdinal.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CompactLabelToOrdinal.cs
index 28f93c4..0ea1122 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CompactLabelToOrdinal.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CompactLabelToOrdinal.cs
@@ -208,7 +208,7 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
             this.collisionMap = new CollisionMap(oldCollisionMap.Capacity(), this.labelRepository);
             this.threshold = (int)(this.capacity * this.loadFactor);
 
-            var it = oldCollisionMap.entryIterator();
+            var it = oldCollisionMap.GetEnumerator();
 
             while (it.MoveNext())
             {


[20/46] lucenenet git commit: Fixed bug in Facet.Taxonomy.Directory.DirectoryTaxonomyWriter.MemoryOrdinalMap - setting size has no effect on the size of the array.

Posted by sy...@apache.org.
Fixed bug in Facet.Taxonomy.Directory.DirectoryTaxonomyWriter.MemoryOrdinalMap - setting size has no effect on the size of the array.


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/e4b7e0ba
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/e4b7e0ba
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/e4b7e0ba

Branch: refs/heads/master
Commit: e4b7e0ba9924265db97a0699da7487f6acd6f759
Parents: e72d3cb
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Sep 25 12:06:55 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Mon Oct 3 23:31:19 2016 +0700

----------------------------------------------------------------------
 .../Taxonomy/Directory/DirectoryTaxonomyWriter.cs               | 5 ++++-
 1 file changed, 4 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e4b7e0ba/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs
index a1c1939..b6dc7a1 100644
--- a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs
@@ -995,7 +995,10 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
                 map = new int[] { };
             }
 
-            public int Size { set; private get; }
+            public int Size
+            {
+                set { map = new int[value]; }
+            }
 
             public void AddMapping(int origOrdinal, int newOrdinal)
             {


[42/46] lucenenet git commit: Changed Close() to IDisposable.Dispose() in Facet.Taxonomy.WriterCache.ITaxonomyWriterCache.

Posted by sy...@apache.org.
Changed Close() to IDisposable.Dispose() in Facet.Taxonomy.WriterCache.ITaxonomyWriterCache.


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/49e25039
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/49e25039
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/49e25039

Branch: refs/heads/master
Commit: 49e25039131cb5b2d577e38d60094690f5321155
Parents: 4495810
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Sep 25 23:47:13 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Mon Oct 3 23:32:13 2016 +0700

----------------------------------------------------------------------
 .../Directory/DirectoryTaxonomyWriter.cs        |  2 +-
 .../WriterCache/Cl2oTaxonomyWriterCache.cs      |  2 +-
 .../WriterCache/LruTaxonomyWriterCache.cs       |  9 ++--
 .../Taxonomy/WriterCache/TaxonomyWriterCache.cs | 43 +++++++++-----------
 .../Directory/TestConcurrentFacetedIndexing.cs  |  2 +-
 .../Directory/TestDirectoryTaxonomyWriter.cs    |  2 +-
 6 files changed, 29 insertions(+), 31 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/49e25039/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs
index f59337e..7ecfe94 100644
--- a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs
@@ -381,7 +381,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
                 }
                 if (cache != null)
                 {
-                    cache.Close();
+                    cache.Dispose();
                 }
             }
         }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/49e25039/src/Lucene.Net.Facet/Taxonomy/WriterCache/Cl2oTaxonomyWriterCache.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/Cl2oTaxonomyWriterCache.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/Cl2oTaxonomyWriterCache.cs
index a6f6ff8..6d26448 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/Cl2oTaxonomyWriterCache.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/Cl2oTaxonomyWriterCache.cs
@@ -60,7 +60,7 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
             }
         }
 
-        public virtual void Close()
+        public virtual void Dispose()
         {
             lock (this)
             {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/49e25039/src/Lucene.Net.Facet/Taxonomy/WriterCache/LruTaxonomyWriterCache.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/LruTaxonomyWriterCache.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/LruTaxonomyWriterCache.cs
index 3f6c4dc..abad1ea 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/LruTaxonomyWriterCache.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/LruTaxonomyWriterCache.cs
@@ -100,12 +100,15 @@
             }
         }
 
-        public virtual void Close()
+        public virtual void Dispose()
         {
             lock (this)
             {
-                cache.Clear();
-                cache = null;
+                if (cache != null)
+                {
+                    cache.Clear();
+                    cache = null;
+                }
             }
         }
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/49e25039/src/Lucene.Net.Facet/Taxonomy/WriterCache/TaxonomyWriterCache.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/TaxonomyWriterCache.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/TaxonomyWriterCache.cs
index bb2eec8..127e752 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/TaxonomyWriterCache.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/TaxonomyWriterCache.cs
@@ -1,26 +1,27 @@
 \ufeffnamespace Lucene.Net.Facet.Taxonomy.WriterCache
 {
+    using System;
     /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
+* Licensed to the Apache Software Foundation (ASF) under one or more
+* contributor license agreements.  See the NOTICE file distributed with
+* this work for additional information regarding copyright ownership.
+* The ASF licenses this file to You under the Apache License, Version 2.0
+* (the "License"); you may not use this file except in compliance with
+* the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
 
     using DirectoryTaxonomyWriter = Lucene.Net.Facet.Taxonomy.Directory.DirectoryTaxonomyWriter;
 
     /// <summary>
-    /// ITaxonomyWriterCache is a relatively simple interface for a cache of
+    /// <see cref="ITaxonomyWriterCache"/> is a relatively simple interface for a cache of
     /// category->ordinal mappings, used in ITaxonomyWriter implementations (such as
     /// <see cref="DirectoryTaxonomyWriter"/>).
     /// <para>
@@ -46,15 +47,9 @@
     /// @lucene.experimental
     /// </para>
     /// </summary>
-    public interface ITaxonomyWriterCache
+    public interface ITaxonomyWriterCache : IDisposable
     {
         /// <summary>
-        /// Let go of whatever resources the cache is holding. After a <see cref="Close()"/>,
-        /// this object can no longer be used.
-        /// </summary>
-        void Close();
-
-        /// <summary>
         /// Lookup a category in the cache, returning its ordinal, or a negative
         /// number if the category is not in the cache.
         /// <para>
@@ -97,7 +92,7 @@
         bool IsFull { get; }
 
         /// <summary>
-        /// Clears the content of the cache. Unlike <see cref="Close()"/>, the caller can
+        /// Clears the content of the cache. Unlike <see cref="Dispose()"/>, the caller can
         /// assume that the cache is still operable after this method returns.
         /// </summary>
         void Clear();

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/49e25039/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestConcurrentFacetedIndexing.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestConcurrentFacetedIndexing.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestConcurrentFacetedIndexing.cs
index abd82f8..30de593 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestConcurrentFacetedIndexing.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestConcurrentFacetedIndexing.cs
@@ -49,7 +49,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
             }
 
 
-            public virtual void Close()
+            public virtual void Dispose()
             {
             }
             public virtual int Get(FacetLabel categoryPath)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/49e25039/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestDirectoryTaxonomyWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestDirectoryTaxonomyWriter.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestDirectoryTaxonomyWriter.cs
index 4e6dd85..ef6db80 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestDirectoryTaxonomyWriter.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestDirectoryTaxonomyWriter.cs
@@ -54,7 +54,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
 
         private class TaxonomyWriterCacheAnonymousInnerClassHelper : ITaxonomyWriterCache
         {
-            public virtual void Close()
+            public virtual void Dispose()
             {
             }
             public virtual int Get(FacetLabel categoryPath)


[28/46] lucenenet git commit: Renamed Facet.Taxonomy.WriterCache.CompactLabelToOrdinal.MemoryUsage, Cl2oTaxonomyWriterCache.MemoryUsage and CollisionMap.MemoryUsage back to their original name (GetMemoryUsage()) because these are intensive operations.

Posted by sy...@apache.org.
Renamed Facet.Taxonomy.WriterCache.CompactLabelToOrdinal.MemoryUsage, Cl2oTaxonomyWriterCache.MemoryUsage and CollisionMap.MemoryUsage back to their original name (GetMemoryUsage()) because these are intensive operations.


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/cb8d47f2
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/cb8d47f2
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/cb8d47f2

Branch: refs/heads/master
Commit: cb8d47f280b1b459c9b56401d447da0f70564338
Parents: 56bf502
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Sep 25 15:40:35 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Mon Oct 3 23:31:41 2016 +0700

----------------------------------------------------------------------
 .../WriterCache/Cl2oTaxonomyWriterCache.cs      |  7 +--
 .../Taxonomy/WriterCache/CollisionMap.cs        | 19 ++++-----
 .../WriterCache/CompactLabelToOrdinal.cs        | 45 +++++++++-----------
 3 files changed, 31 insertions(+), 40 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/cb8d47f2/src/Lucene.Net.Facet/Taxonomy/WriterCache/Cl2oTaxonomyWriterCache.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/Cl2oTaxonomyWriterCache.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/Cl2oTaxonomyWriterCache.cs
index ba9da33..6cbff1f 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/Cl2oTaxonomyWriterCache.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/Cl2oTaxonomyWriterCache.cs
@@ -107,12 +107,9 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
 
         /// <summary>
         /// Returns the number of bytes in memory used by this object. </summary>
-        public virtual int MemoryUsage
+        public virtual int GetMemoryUsage()
         {
-            get
-            {
-                return cache == null ? 0 : cache.MemoryUsage;
-            }
+            return cache == null ? 0 : cache.GetMemoryUsage();
         }
     }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/cb8d47f2/src/Lucene.Net.Facet/Taxonomy/WriterCache/CollisionMap.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CollisionMap.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CollisionMap.cs
index fd47cd9..1b2767b 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CollisionMap.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CollisionMap.cs
@@ -198,27 +198,24 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
         /// <summary>
         /// Returns an estimate of the memory usage of this CollisionMap. </summary>
         /// <returns> The approximate number of bytes used by this structure. </returns>
-        internal virtual int MemoryUsage
+        internal virtual int GetMemoryUsage()
         {
-            get
+            int memoryUsage = 0;
+            if (this.entries != null)
             {
-                int memoryUsage = 0;
-                if (this.entries != null)
+                foreach (Entry e in this.entries)
                 {
-                    foreach (Entry e in this.entries)
+                    if (e != null)
                     {
-                        if (e != null)
+                        memoryUsage += (4 * 4);
+                        for (Entry ee = e.next; ee != null; ee = ee.next)
                         {
                             memoryUsage += (4 * 4);
-                            for (Entry ee = e.next; ee != null; ee = ee.next)
-                            {
-                                memoryUsage += (4 * 4);
-                            }
                         }
                     }
                 }
-                return memoryUsage;
             }
+            return memoryUsage;
         }
 
         private class EntryIterator : IEnumerator<Entry>

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/cb8d47f2/src/Lucene.Net.Facet/Taxonomy/WriterCache/CompactLabelToOrdinal.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CompactLabelToOrdinal.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CompactLabelToOrdinal.cs
index 0ea1122..94a9e38 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CompactLabelToOrdinal.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CompactLabelToOrdinal.cs
@@ -356,35 +356,32 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
         /// this package. Memory is consumed mainly by three structures: the hash arrays,
         /// label repository and collision map.
         /// </summary>
-        internal virtual int MemoryUsage
+        internal virtual int GetMemoryUsage()
         {
-            get
+            int memoryUsage = 0;
+            if (this.hashArrays != null)
             {
-                int memoryUsage = 0;
-                if (this.hashArrays != null)
+                // HashArray capacity is instance-specific.
+                foreach (HashArray ha in this.hashArrays)
                 {
-                    // HashArray capacity is instance-specific.
-                    foreach (HashArray ha in this.hashArrays)
-                    {
-                        // Each has 2 capacity-length arrays of ints.
-                        memoryUsage += (ha.capacity * 2 * 4) + 4;
-                    }
-                }
-                if (this.labelRepository != null)
-                {
-                    // All blocks are the same size.
-                    int blockSize = this.labelRepository.blockSize;
-                    // Each block has room for blockSize UTF-16 chars.
-                    int actualBlockSize = (blockSize * 2) + 4;
-                    memoryUsage += this.labelRepository.blocks.Count * actualBlockSize;
-                    memoryUsage += 8; // Two int values for array as a whole.
-                }
-                if (this.collisionMap != null)
-                {
-                    memoryUsage += this.collisionMap.MemoryUsage;
+                    // Each has 2 capacity-length arrays of ints.
+                    memoryUsage += (ha.capacity * 2 * 4) + 4;
                 }
-                return memoryUsage;
             }
+            if (this.labelRepository != null)
+            {
+                // All blocks are the same size.
+                int blockSize = this.labelRepository.blockSize;
+                // Each block has room for blockSize UTF-16 chars.
+                int actualBlockSize = (blockSize * 2) + 4;
+                memoryUsage += this.labelRepository.blocks.Count * actualBlockSize;
+                memoryUsage += 8; // Two int values for array as a whole.
+            }
+            if (this.collisionMap != null)
+            {
+                memoryUsage += this.collisionMap.GetMemoryUsage();
+            }
+            return memoryUsage;
         }
 
         /// <summary>


[23/46] lucenenet git commit: Facet: Changed accessibility of class members to match Java Lucene.

Posted by sy...@apache.org.
Facet: Changed accessibility of class members to match Java Lucene.


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/9912999c
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/9912999c
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/9912999c

Branch: refs/heads/master
Commit: 9912999c154159dfcf0f87106132fb85a763271f
Parents: e76ee90
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Sep 25 14:40:30 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Mon Oct 3 23:31:26 2016 +0700

----------------------------------------------------------------------
 src/Lucene.Net.Facet/Range/DoubleRange.cs       |  8 ++++----
 src/Lucene.Net.Facet/Range/LongRange.cs         | 12 +++++------
 .../DefaultSortedSetDocValuesReaderState.cs     |  2 +-
 .../Directory/DirectoryTaxonomyWriter.cs        | 10 +++++-----
 src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs |  4 ++--
 .../Taxonomy/WriterCache/CharBlockArray.cs      | 21 ++++++++++----------
 6 files changed, 28 insertions(+), 29 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/9912999c/src/Lucene.Net.Facet/Range/DoubleRange.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Range/DoubleRange.cs b/src/Lucene.Net.Facet/Range/DoubleRange.cs
index 058ff52..affdf26 100644
--- a/src/Lucene.Net.Facet/Range/DoubleRange.cs
+++ b/src/Lucene.Net.Facet/Range/DoubleRange.cs
@@ -178,10 +178,10 @@ namespace Lucene.Net.Facet.Range
             {
                 private readonly FilterAnonymousInnerClassHelper outerInstance;
 
-                private Bits acceptDocs;
-                private FunctionValues values;
-                private int maxDoc;
-                private Bits fastMatchBits;
+                private readonly Bits acceptDocs;
+                private readonly FunctionValues values;
+                private readonly int maxDoc;
+                private readonly Bits fastMatchBits;
 
                 public DocIdSetAnonymousInnerClassHelper(FilterAnonymousInnerClassHelper outerInstance, Bits acceptDocs, FunctionValues values, int maxDoc, Bits fastMatchBits)
                 {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/9912999c/src/Lucene.Net.Facet/Range/LongRange.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Range/LongRange.cs b/src/Lucene.Net.Facet/Range/LongRange.cs
index eb7671b..7b245f9 100644
--- a/src/Lucene.Net.Facet/Range/LongRange.cs
+++ b/src/Lucene.Net.Facet/Range/LongRange.cs
@@ -120,8 +120,8 @@ namespace Lucene.Net.Facet.Range
         {
             private readonly LongRange outerInstance;
 
-            private Filter fastMatchFilter;
-            private ValueSource valueSource;
+            private readonly Filter fastMatchFilter;
+            private readonly ValueSource valueSource;
 
             public FilterAnonymousInnerClassHelper(LongRange outerInstance, Filter fastMatchFilter, ValueSource valueSource)
             {
@@ -175,10 +175,10 @@ namespace Lucene.Net.Facet.Range
             {
                 private readonly FilterAnonymousInnerClassHelper outerInstance;
 
-                private Bits acceptDocs;
-                private FunctionValues values;
-                private int maxDoc;
-                private Bits fastMatchBits;
+                private readonly Bits acceptDocs;
+                private readonly FunctionValues values;
+                private readonly int maxDoc;
+                private readonly Bits fastMatchBits;
 
                 public DocIdSetAnonymousInnerClassHelper(FilterAnonymousInnerClassHelper outerInstance, Bits acceptDocs, FunctionValues values, int maxDoc, Bits fastMatchBits)
                 {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/9912999c/src/Lucene.Net.Facet/SortedSet/DefaultSortedSetDocValuesReaderState.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/SortedSet/DefaultSortedSetDocValuesReaderState.cs b/src/Lucene.Net.Facet/SortedSet/DefaultSortedSetDocValuesReaderState.cs
index 10983d8..a5806c3 100644
--- a/src/Lucene.Net.Facet/SortedSet/DefaultSortedSetDocValuesReaderState.cs
+++ b/src/Lucene.Net.Facet/SortedSet/DefaultSortedSetDocValuesReaderState.cs
@@ -33,7 +33,7 @@ namespace Lucene.Net.Facet.SortedSet
 
         /// <summary>
         /// <seealso cref="IndexReader"/> passed to the constructor. </summary>
-        public readonly IndexReader origReader;
+        private readonly IndexReader origReader;
 
         private readonly IDictionary<string, OrdRange> prefixToOrdRange = new Dictionary<string, OrdRange>();
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/9912999c/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs
index b6dc7a1..e4f2d3b 100644
--- a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs
@@ -582,11 +582,11 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
 
         private class SinglePositionTokenStream : TokenStream
         {
-            internal ICharTermAttribute termAtt;
-            internal IPositionIncrementAttribute posIncrAtt;
-            internal bool returned;
-            internal int val;
-            internal readonly string word;
+            private ICharTermAttribute termAtt;
+            private IPositionIncrementAttribute posIncrAtt;
+            private bool returned;
+            private int val;
+            private readonly string word;
 
             public SinglePositionTokenStream(string word)
             {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/9912999c/src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs b/src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs
index f94ebe4..4dc391e 100644
--- a/src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs
@@ -70,8 +70,8 @@ namespace Lucene.Net.Facet.Taxonomy
         /// An iterator over a category's children. </summary>
         public class ChildrenIterator
         {
-            internal readonly int[] siblings;
-            internal int child;
+            private readonly int[] siblings;
+            private int child;
 
             internal ChildrenIterator(int child, int[] siblings)
             {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/9912999c/src/Lucene.Net.Facet/Taxonomy/WriterCache/CharBlockArray.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CharBlockArray.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CharBlockArray.cs
index 691ed41..60a0726 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CharBlockArray.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CharBlockArray.cs
@@ -92,12 +92,12 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
             return index % blockSize;
         }
 
-        public CharBlockArray Append(ICharSequence chars)
+        public virtual CharBlockArray Append(ICharSequence chars)
         {
             return Append(chars, 0, chars.Length);
         }
 
-        public CharBlockArray Append(char c)
+        public virtual CharBlockArray Append(char c)
         {
             if (this.current.length == this.blockSize)
             {
@@ -109,7 +109,7 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
             return this;
         }
 
-        public CharBlockArray Append(ICharSequence chars, int start, int length)
+        public virtual CharBlockArray Append(ICharSequence chars, int start, int length)
         {
             int end = start + length;
             for (int i = start; i < end; i++)
@@ -170,8 +170,13 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
             this.length_Renamed += s.Length;
             return this;
         }
+        public virtual char CharAt(int index)
+        {
+            Block b = blocks[BlockIndex(index)];
+            return b.chars[IndexInBlock(index)];
+        }
 
-        public int Length
+        public virtual int Length
         {
             get
             {
@@ -179,13 +184,7 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
             }
         }
 
-        public char CharAt(int index)
-        {
-            Block b = blocks[BlockIndex(index)];
-            return b.chars[IndexInBlock(index)];
-        }
-
-        public string SubSequence(int start, int end)
+        public virtual string SubSequence(int start, int end)
         {
             int remaining = end - start;
             StringBuilder sb = new StringBuilder(remaining);


[27/46] lucenenet git commit: Renamed Facet.Taxononmy.WriterCache.ITaxonomyWriterCache.Full back to its original name IsFull to indicate it is a boolean state.

Posted by sy...@apache.org.
Renamed Facet.Taxononmy.WriterCache.ITaxonomyWriterCache.Full back to its original name IsFull to indicate it is a boolean state.


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/56bf5021
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/56bf5021
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/56bf5021

Branch: refs/heads/master
Commit: 56bf5021831600d3bb373d65c9c637495b07fd4d
Parents: 67b29ee
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Sep 25 15:34:14 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Mon Oct 3 23:31:38 2016 +0700

----------------------------------------------------------------------
 src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs | 2 +-
 .../Taxonomy/WriterCache/Cl2oTaxonomyWriterCache.cs                | 2 +-
 .../Taxonomy/WriterCache/LruTaxonomyWriterCache.cs                 | 2 +-
 src/Lucene.Net.Facet/Taxonomy/WriterCache/TaxonomyWriterCache.cs   | 2 +-
 .../Taxonomy/Directory/TestConcurrentFacetedIndexing.cs            | 2 +-
 .../Taxonomy/Directory/TestDirectoryTaxonomyWriter.cs              | 2 +-
 6 files changed, 6 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/56bf5021/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs
index b07940e..9a7e4f9 100644
--- a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs
@@ -796,7 +796,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
                             termsEnum = terms.Iterator(termsEnum);
                             while (termsEnum.Next() != null)
                             {
-                                if (!cache.Full)
+                                if (!cache.IsFull)
                                 {
                                     BytesRef t = termsEnum.Term();
                                     // Since we guarantee uniqueness of categories, each term has exactly

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/56bf5021/src/Lucene.Net.Facet/Taxonomy/WriterCache/Cl2oTaxonomyWriterCache.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/Cl2oTaxonomyWriterCache.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/Cl2oTaxonomyWriterCache.cs
index 9a40f16..ba9da33 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/Cl2oTaxonomyWriterCache.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/Cl2oTaxonomyWriterCache.cs
@@ -67,7 +67,7 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
             }
         }
 
-        public virtual bool Full
+        public virtual bool IsFull
         {
             get
             {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/56bf5021/src/Lucene.Net.Facet/Taxonomy/WriterCache/LruTaxonomyWriterCache.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/LruTaxonomyWriterCache.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/LruTaxonomyWriterCache.cs
index 8727893..ad76ccd 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/LruTaxonomyWriterCache.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/LruTaxonomyWriterCache.cs
@@ -79,7 +79,7 @@
             }
         }
 
-        public virtual bool Full
+        public virtual bool IsFull
         {
             get
             {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/56bf5021/src/Lucene.Net.Facet/Taxonomy/WriterCache/TaxonomyWriterCache.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/TaxonomyWriterCache.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/TaxonomyWriterCache.cs
index bbe82ec..1dc4d64 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/TaxonomyWriterCache.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/TaxonomyWriterCache.cs
@@ -90,7 +90,7 @@
         /// Returns true if the cache is full, such that the next <seealso cref="#put"/> will
         /// evict entries from it, false otherwise.
         /// </summary>
-        bool Full { get; }
+        bool IsFull { get; }
 
         /// <summary>
         /// Clears the content of the cache. Unlike <seealso cref="#close()"/>, the caller can

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/56bf5021/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestConcurrentFacetedIndexing.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestConcurrentFacetedIndexing.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestConcurrentFacetedIndexing.cs
index 52a4d61..49df670 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestConcurrentFacetedIndexing.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestConcurrentFacetedIndexing.cs
@@ -60,7 +60,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
             {
                 return true;
             }
-            public virtual bool Full
+            public virtual bool IsFull
             {
                 get
                 {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/56bf5021/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestDirectoryTaxonomyWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestDirectoryTaxonomyWriter.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestDirectoryTaxonomyWriter.cs
index 778a889..5e4ec75 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestDirectoryTaxonomyWriter.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestDirectoryTaxonomyWriter.cs
@@ -65,7 +65,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
             {
                 return true;
             }
-            public virtual bool Full
+            public virtual bool IsFull
             {
                 get
                 {


[32/46] lucenenet git commit: Changed Facet.FacetsCollector.GetMatchingDocs back to a method (non-deterministic), modified it to return a List instead of IList and modified related members OriginalMatchingDocs and CreateSampledDocs() similarly.

Posted by sy...@apache.org.
Changed Facet.FacetsCollector.GetMatchingDocs back to a method (non-deterministic), modified it to return a List<T> instead of IList<T> and modified related members OriginalMatchingDocs and CreateSampledDocs() similarly.


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/fd13e8e5
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/fd13e8e5
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/fd13e8e5

Branch: refs/heads/master
Commit: fd13e8e5e8e6032cc4ec9b8e4bcb8ff320b7d01c
Parents: 03d05b3
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Sep 25 16:07:13 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Mon Oct 3 23:31:51 2016 +0700

----------------------------------------------------------------------
 src/Lucene.Net.Facet/FacetsCollector.cs         | 21 +++--
 .../RandomSamplingFacetsCollector.cs            | 81 +++++++++-----------
 .../Range/DoubleRangeFacetCounts.cs             |  2 +-
 .../Range/LongRangeFacetCounts.cs               |  2 +-
 .../SortedSet/SortedSetDocValuesFacetCounts.cs  |  2 +-
 .../Taxonomy/FastTaxonomyFacetCounts.cs         |  2 +-
 .../Taxonomy/TaxonomyFacetCounts.cs             |  2 +-
 .../TaxonomyFacetSumFloatAssociations.cs        |  2 +-
 .../Taxonomy/TaxonomyFacetSumIntAssociations.cs |  2 +-
 .../Taxonomy/TaxonomyFacetSumValueSource.cs     |  2 +-
 .../WriterCache/TestCompactLabelToOrdinal.cs    |  4 +-
 .../TestRandomSamplingFacetsCollector.cs        |  4 +-
 12 files changed, 59 insertions(+), 67 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/fd13e8e5/src/Lucene.Net.Facet/FacetsCollector.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/FacetsCollector.cs b/src/Lucene.Net.Facet/FacetsCollector.cs
index d7be2f0..0b225d0 100644
--- a/src/Lucene.Net.Facet/FacetsCollector.cs
+++ b/src/Lucene.Net.Facet/FacetsCollector.cs
@@ -38,7 +38,7 @@ namespace Lucene.Net.Facet
         private int totalHits;
         private float[] scores;
         private readonly bool keepScores;
-        private readonly IList<MatchingDocs> matchingDocs = new List<MatchingDocs>();
+        private readonly List<MatchingDocs> matchingDocs = new List<MatchingDocs>();
         private Docs docs;
 
         /// <summary>
@@ -166,20 +166,17 @@ namespace Lucene.Net.Facet
         /// Returns the documents matched by the query, one <seealso cref="GetMatchingDocs"/> per
         /// visited segment.
         /// </summary>
-        public virtual IList<MatchingDocs> GetMatchingDocs
+        public virtual List<MatchingDocs> GetMatchingDocs()
         {
-            get
+            if (docs != null)
             {
-                if (docs != null)
-                {
-                    matchingDocs.Add(new MatchingDocs(this.context, docs.DocIdSet, totalHits, scores));
-                    docs = null;
-                    scores = null;
-                    context = null;
-                }
-
-                return matchingDocs;
+                matchingDocs.Add(new MatchingDocs(this.context, docs.DocIdSet, totalHits, scores));
+                docs = null;
+                scores = null;
+                context = null;
             }
+
+            return matchingDocs;
         }
 
         public override sealed bool AcceptsDocsOutOfOrder()

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/fd13e8e5/src/Lucene.Net.Facet/RandomSamplingFacetsCollector.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/RandomSamplingFacetsCollector.cs b/src/Lucene.Net.Facet/RandomSamplingFacetsCollector.cs
index 6c00c40..491cf5e 100644
--- a/src/Lucene.Net.Facet/RandomSamplingFacetsCollector.cs
+++ b/src/Lucene.Net.Facet/RandomSamplingFacetsCollector.cs
@@ -1,25 +1,26 @@
 \ufeffusing System;
 using System.Collections.Generic;
 using System.IO;
+using System.Linq;
 
 namespace Lucene.Net.Facet
 {
     /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
+    * Licensed to the Apache Software Foundation (ASF) under one or more
+    * contributor license agreements.  See the NOTICE file distributed with
+    * this work for additional information regarding copyright ownership.
+    * The ASF licenses this file to You under the Apache License, Version 2.0
+    * (the "License"); you may not use this file except in compliance with
+    * the License.  You may obtain a copy of the License at
+    *
+    *     http://www.apache.org/licenses/LICENSE-2.0
+    *
+    * Unless required by applicable law or agreed to in writing, software
+    * distributed under the License is distributed on an "AS IS" BASIS,
+    * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    * See the License for the specific language governing permissions and
+    * limitations under the License.
+    */
 
     using DimConfig = FacetsConfig.DimConfig;
     using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
@@ -86,7 +87,7 @@ namespace Lucene.Net.Facet
         private readonly XORShift64Random random;
 
         private double samplingRate;
-        private IList<MatchingDocs> sampledDocs;
+        private List<MatchingDocs> sampledDocs;
         private int totalHits = NOT_CALCULATED;
         private int leftoverBin = NOT_CALCULATED;
         private int leftoverIndex = NOT_CALCULATED;
@@ -132,50 +133,44 @@ namespace Lucene.Net.Facet
         /// MatchingDocs, scores is set to {@code null}
         /// </para>
         /// </summary>
-        public override IList<MatchingDocs> GetMatchingDocs
+        public override List<MatchingDocs> GetMatchingDocs()
         {
-            get
-            {
-                IList<MatchingDocs> matchingDocs = base.GetMatchingDocs;
+            List<MatchingDocs> matchingDocs = base.GetMatchingDocs();
 
-                if (totalHits == NOT_CALCULATED)
+            if (totalHits == NOT_CALCULATED)
+            {
+                totalHits = 0;
+                foreach (MatchingDocs md in matchingDocs)
                 {
-                    totalHits = 0;
-                    foreach (MatchingDocs md in matchingDocs)
-                    {
-                        totalHits += md.TotalHits;
-                    }
+                    totalHits += md.TotalHits;
                 }
+            }
 
-                if (totalHits <= sampleSize)
-                {
-                    return matchingDocs;
-                }
+            if (totalHits <= sampleSize)
+            {
+                return matchingDocs;
+            }
 
-                if (sampledDocs == null)
-                {
-                    samplingRate = (1.0 * sampleSize) / totalHits;
-                    sampledDocs = CreateSampledDocs(matchingDocs);
-                }
-                return sampledDocs;
+            if (sampledDocs == null)
+            {
+                samplingRate = (1.0 * sampleSize) / totalHits;
+                sampledDocs = CreateSampledDocs(matchingDocs);
             }
+            return sampledDocs;
         }
 
         /// <summary>
         /// Returns the original matching documents. </summary>
-        public virtual IList<MatchingDocs> OriginalMatchingDocs
+        public virtual List<MatchingDocs> GetOriginalMatchingDocs()
         {
-            get
-            {
-                return base.GetMatchingDocs;
-            }
+            return base.GetMatchingDocs();
         }
 
         /// <summary>
         /// Create a sampled copy of the matching documents list. </summary>
-        private IList<MatchingDocs> CreateSampledDocs(IList<MatchingDocs> matchingDocsList)
+        private List<MatchingDocs> CreateSampledDocs(IEnumerable<MatchingDocs> matchingDocsList)
         {
-            IList<MatchingDocs> sampledDocsList = new List<MatchingDocs>(matchingDocsList.Count);
+            List<MatchingDocs> sampledDocsList = new List<MatchingDocs>(matchingDocsList.Count());
             foreach (MatchingDocs docs in matchingDocsList)
             {
                 sampledDocsList.Add(CreateSample(docs));

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/fd13e8e5/src/Lucene.Net.Facet/Range/DoubleRangeFacetCounts.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Range/DoubleRangeFacetCounts.cs b/src/Lucene.Net.Facet/Range/DoubleRangeFacetCounts.cs
index 52a3ad5..7905086 100644
--- a/src/Lucene.Net.Facet/Range/DoubleRangeFacetCounts.cs
+++ b/src/Lucene.Net.Facet/Range/DoubleRangeFacetCounts.cs
@@ -79,7 +79,7 @@ namespace Lucene.Net.Facet.Range
         public DoubleRangeFacetCounts(string field, ValueSource valueSource, FacetsCollector hits, Filter fastMatchFilter, DoubleRange[] ranges)
             : base(field, ranges, fastMatchFilter)
         {
-            Count(valueSource, hits.GetMatchingDocs);
+            Count(valueSource, hits.GetMatchingDocs());
         }
 
         private void Count(ValueSource valueSource, IEnumerable<MatchingDocs> matchingDocs)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/fd13e8e5/src/Lucene.Net.Facet/Range/LongRangeFacetCounts.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Range/LongRangeFacetCounts.cs b/src/Lucene.Net.Facet/Range/LongRangeFacetCounts.cs
index d906cf8..60451c4 100644
--- a/src/Lucene.Net.Facet/Range/LongRangeFacetCounts.cs
+++ b/src/Lucene.Net.Facet/Range/LongRangeFacetCounts.cs
@@ -72,7 +72,7 @@ namespace Lucene.Net.Facet.Range
             FacetsCollector hits, Filter fastMatchFilter, params LongRange[] ranges)
             : base(field, ranges, fastMatchFilter)
         {
-            Count(valueSource, hits.GetMatchingDocs);
+            Count(valueSource, hits.GetMatchingDocs());
         }
 
         private void Count(ValueSource valueSource, IList<MatchingDocs> matchingDocs)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/fd13e8e5/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetCounts.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetCounts.cs b/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetCounts.cs
index b32d430..c1a1da9 100644
--- a/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetCounts.cs
+++ b/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetCounts.cs
@@ -69,7 +69,7 @@ namespace Lucene.Net.Facet.SortedSet
             dv = state.DocValues;
             counts = new int[state.Size];
             //System.out.println("field=" + field);
-            Count(hits.GetMatchingDocs);
+            Count(hits.GetMatchingDocs());
         }
 
         public override FacetResult GetTopChildren(int topN, string dim, params string[] path)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/fd13e8e5/src/Lucene.Net.Facet/Taxonomy/FastTaxonomyFacetCounts.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/FastTaxonomyFacetCounts.cs b/src/Lucene.Net.Facet/Taxonomy/FastTaxonomyFacetCounts.cs
index e303394..3511af8 100644
--- a/src/Lucene.Net.Facet/Taxonomy/FastTaxonomyFacetCounts.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/FastTaxonomyFacetCounts.cs
@@ -51,7 +51,7 @@ namespace Lucene.Net.Facet.Taxonomy
         public FastTaxonomyFacetCounts(string indexFieldName, TaxonomyReader taxoReader, FacetsConfig config, FacetsCollector fc)
             : base(indexFieldName, taxoReader, config)
         {
-            Count(fc.GetMatchingDocs);
+            Count(fc.GetMatchingDocs());
         }
 
         private void Count(IList<FacetsCollector.MatchingDocs> matchingDocs)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/fd13e8e5/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetCounts.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetCounts.cs b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetCounts.cs
index 059822c..b86c924 100644
--- a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetCounts.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetCounts.cs
@@ -45,7 +45,7 @@ namespace Lucene.Net.Facet.Taxonomy
             : base(ordinalsReader.IndexFieldName, taxoReader, config)
         {
             this.ordinalsReader = ordinalsReader;
-            Count(fc.GetMatchingDocs);
+            Count(fc.GetMatchingDocs());
         }
 
         private void Count(IList<FacetsCollector.MatchingDocs> matchingDocs)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/fd13e8e5/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumFloatAssociations.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumFloatAssociations.cs b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumFloatAssociations.cs
index b943004..e30cef2 100644
--- a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumFloatAssociations.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumFloatAssociations.cs
@@ -50,7 +50,7 @@ namespace Lucene.Net.Facet.Taxonomy
         public TaxonomyFacetSumFloatAssociations(string indexFieldName, TaxonomyReader taxoReader, FacetsConfig config, FacetsCollector fc)
             : base(indexFieldName, taxoReader, config)
         {
-            SumValues(fc.GetMatchingDocs);
+            SumValues(fc.GetMatchingDocs());
         }
 
         private void SumValues(IList<FacetsCollector.MatchingDocs> matchingDocs)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/fd13e8e5/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumIntAssociations.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumIntAssociations.cs b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumIntAssociations.cs
index d053dfe..7ff982b 100644
--- a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumIntAssociations.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumIntAssociations.cs
@@ -49,7 +49,7 @@ namespace Lucene.Net.Facet.Taxonomy
         public TaxonomyFacetSumIntAssociations(string indexFieldName, TaxonomyReader taxoReader, FacetsConfig config, FacetsCollector fc)
             : base(indexFieldName, taxoReader, config)
         {
-            SumValues(fc.GetMatchingDocs);
+            SumValues(fc.GetMatchingDocs());
         }
 
         private void SumValues(IList<FacetsCollector.MatchingDocs> matchingDocs)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/fd13e8e5/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumValueSource.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumValueSource.cs b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumValueSource.cs
index 2ae0f49..99dbed1 100644
--- a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumValueSource.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumValueSource.cs
@@ -66,7 +66,7 @@ namespace Lucene.Net.Facet.Taxonomy
             : base(ordinalsReader.IndexFieldName, taxoReader, config)
         {
             this.ordinalsReader = ordinalsReader;
-            SumValues(fc.GetMatchingDocs, fc.KeepScores, valueSource);
+            SumValues(fc.GetMatchingDocs(), fc.KeepScores, valueSource);
         }
 
         private sealed class FakeScorer : Scorer

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/fd13e8e5/src/Lucene.Net.Tests.Facet/Taxonomy/WriterCache/TestCompactLabelToOrdinal.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/WriterCache/TestCompactLabelToOrdinal.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/WriterCache/TestCompactLabelToOrdinal.cs
index eafdd62..82c0bdd 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/WriterCache/TestCompactLabelToOrdinal.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/WriterCache/TestCompactLabelToOrdinal.cs
@@ -108,7 +108,7 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
 
                 if (ord1 == LabelToOrdinal.INVALID_ORDINAL)
                 {
-                    ord1 = compact.NextOrdinal;
+                    ord1 = compact.GetNextOrdinal();
                     map.AddLabel(label, ord1);
                     compact.AddLabel(label, ord1);
                 }
@@ -224,7 +224,7 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
 
                     if (ord1 == LabelToOrdinal.INVALID_ORDINAL)
                     {
-                        ord1 = compact.NextOrdinal;
+                        ord1 = compact.GetNextOrdinal();
                         map.AddLabel(label, ord1);
                         compact.AddLabel(label, ord1);
                     }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/fd13e8e5/src/Lucene.Net.Tests.Facet/TestRandomSamplingFacetsCollector.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Facet/TestRandomSamplingFacetsCollector.cs b/src/Lucene.Net.Tests.Facet/TestRandomSamplingFacetsCollector.cs
index 2969436..b98b283 100644
--- a/src/Lucene.Net.Tests.Facet/TestRandomSamplingFacetsCollector.cs
+++ b/src/Lucene.Net.Tests.Facet/TestRandomSamplingFacetsCollector.cs
@@ -75,10 +75,10 @@ namespace Lucene.Net.Facet
             searcher.Search(new TermQuery(new Term("EvenOdd", "NeverMatches")), collectRandomZeroResults);
 
             // There should be no divisions by zero and no null result
-            Assert.NotNull(collectRandomZeroResults.GetMatchingDocs);
+            Assert.NotNull(collectRandomZeroResults.GetMatchingDocs());
 
             // There should be no results at all
-            foreach (MatchingDocs doc in collectRandomZeroResults.GetMatchingDocs)
+            foreach (MatchingDocs doc in collectRandomZeroResults.GetMatchingDocs())
             {
                 Assert.AreEqual(0, doc.TotalHits);
             }


[17/46] lucenenet git commit: .NETify Facet: Field names should be camelCase. Fields should not be public (changed to properties with PascalCase names).

Posted by sy...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e0a73b45/src/Lucene.Net.Facet/Taxonomy/WriterCache/CollisionMap.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CollisionMap.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CollisionMap.cs
index 9e8c1ad..277a166 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CollisionMap.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CollisionMap.cs
@@ -29,9 +29,9 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
     /// </summary>
     public class CollisionMap
     {
-        private int capacity_Renamed;
+        private int capacity;
         private float loadFactor;
-        private int size_Renamed;
+        private int size;
         private int threshold;
 
         internal class Entry
@@ -68,17 +68,17 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
         {
             this.labelRepository = labelRepository;
             this.loadFactor = loadFactor;
-            this.capacity_Renamed = CompactLabelToOrdinal.DetermineCapacity(2, initialCapacity);
+            this.capacity = CompactLabelToOrdinal.DetermineCapacity(2, initialCapacity);
 
-            this.entries = new Entry[this.capacity_Renamed];
-            this.threshold = (int)(this.capacity_Renamed * this.loadFactor);
+            this.entries = new Entry[this.capacity];
+            this.threshold = (int)(this.capacity * this.loadFactor);
         }
 
         /// <summary>
         /// How many mappings. </summary>
         public virtual int Size()
         {
-            return this.size_Renamed;
+            return this.size;
         }
 
         /// <summary>
@@ -86,12 +86,12 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
         /// </summary>
         public virtual int Capacity()
         {
-            return this.capacity_Renamed;
+            return this.capacity;
         }
 
         private void Grow()
         {
-            int newCapacity = this.capacity_Renamed * 2;
+            int newCapacity = this.capacity * 2;
             Entry[] newEntries = new Entry[newCapacity];
             Entry[] src = this.entries;
 
@@ -113,9 +113,9 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
                 }
             }
 
-            this.capacity_Renamed = newCapacity;
+            this.capacity = newCapacity;
             this.entries = newEntries;
-            this.threshold = (int)(this.capacity_Renamed * this.loadFactor);
+            this.threshold = (int)(this.capacity * this.loadFactor);
         }
 
         /// <summary>
@@ -125,7 +125,7 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
         /// </summary>
         public virtual int Get(FacetLabel label, int hash)
         {
-            int bucketIndex = IndexFor(hash, this.capacity_Renamed);
+            int bucketIndex = IndexFor(hash, this.capacity);
             Entry e = this.entries[bucketIndex];
 
             while (e != null && !(hash == e.hash && CategoryPathUtils.EqualsToSerialized(label, labelRepository, e.offset)))
@@ -145,7 +145,7 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
         /// </summary>
         public virtual int AddLabel(FacetLabel label, int hash, int cid)
         {
-            int bucketIndex = IndexFor(hash, this.capacity_Renamed);
+            int bucketIndex = IndexFor(hash, this.capacity);
             for (Entry e = this.entries[bucketIndex]; e != null; e = e.next)
             {
                 if (e.hash == hash && CategoryPathUtils.EqualsToSerialized(label, labelRepository, e.offset))
@@ -168,7 +168,7 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
         /// </summary>
         public virtual void AddLabelOffset(int hash, int offset, int cid)
         {
-            int bucketIndex = IndexFor(hash, this.capacity_Renamed);
+            int bucketIndex = IndexFor(hash, this.capacity);
             AddEntry(offset, cid, hash, bucketIndex);
         }
 
@@ -176,7 +176,7 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
         {
             Entry e = this.entries[bucketIndex];
             this.entries[bucketIndex] = new Entry(offset, cid, hash, e);
-            if (this.size_Renamed++ >= this.threshold)
+            if (this.size++ >= this.threshold)
             {
                 Grow();
             }
@@ -184,7 +184,7 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
 
         internal virtual IEnumerator<CollisionMap.Entry> entryIterator()
         {
-            return new EntryIterator(this, entries, size_Renamed);
+            return new EntryIterator(this, entries, size);
         }
 
         /// <summary>

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e0a73b45/src/Lucene.Net.Facet/TopOrdAndFloatQueue.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/TopOrdAndFloatQueue.cs b/src/Lucene.Net.Facet/TopOrdAndFloatQueue.cs
index 4b4dad7..9c81f31 100644
--- a/src/Lucene.Net.Facet/TopOrdAndFloatQueue.cs
+++ b/src/Lucene.Net.Facet/TopOrdAndFloatQueue.cs
@@ -31,11 +31,11 @@ namespace Lucene.Net.Facet
         {
             /// <summary>
             /// Ordinal of the entry. </summary>
-            public int ord;
+            public int Ord { get; set; }
 
             /// <summary>
             /// Value associated with the ordinal. </summary>
-            public float value;
+            public float Value { get; set; }
 
             /// <summary>
             /// Default constructor. </summary>
@@ -52,17 +52,17 @@ namespace Lucene.Net.Facet
 
         public override bool LessThan(OrdAndValue a, OrdAndValue b)
         {
-            if (a.value < b.value)
+            if (a.Value < b.Value)
             {
                 return true;
             }
-            else if (a.value > b.value)
+            else if (a.Value > b.Value)
             {
                 return false;
             }
             else
             {
-                return a.ord > b.ord;
+                return a.Ord > b.Ord;
             }
         }
     }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e0a73b45/src/Lucene.Net.Facet/TopOrdAndIntQueue.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/TopOrdAndIntQueue.cs b/src/Lucene.Net.Facet/TopOrdAndIntQueue.cs
index 80246e5..7016e80 100644
--- a/src/Lucene.Net.Facet/TopOrdAndIntQueue.cs
+++ b/src/Lucene.Net.Facet/TopOrdAndIntQueue.cs
@@ -31,11 +31,11 @@ namespace Lucene.Net.Facet
         {
             /// <summary>
             /// Ordinal of the entry. </summary>
-            public int Ord;
+            public int Ord { get; set; }
 
             /// <summary>
             /// Value associated with the ordinal. </summary>
-            public int Value;
+            public int Value { get; set; }
 
             /// <summary>
             /// Default constructor. </summary>

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e0a73b45/src/Lucene.Net.Tests.Facet/FacetTestCase.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Facet/FacetTestCase.cs b/src/Lucene.Net.Tests.Facet/FacetTestCase.cs
index ccc5cdf..7c4b3c1 100644
--- a/src/Lucene.Net.Tests.Facet/FacetTestCase.cs
+++ b/src/Lucene.Net.Tests.Facet/FacetTestCase.cs
@@ -146,7 +146,7 @@ namespace Lucene.Net.Facet
             int i = 0;
             while (i <= labelValues.Length)
             {
-                if (i < labelValues.Length && (double)labelValues[i].value == lastValue)
+                if (i < labelValues.Length && (double)labelValues[i].Value == lastValue)
                 {
                     numInRow++;
                 }
@@ -159,7 +159,7 @@ namespace Lucene.Net.Facet
                     numInRow = 1;
                     if (i < labelValues.Length)
                     {
-                        lastValue = (double)labelValues[i].value;
+                        lastValue = (double)labelValues[i].Value;
                     }
                 }
                 i++;
@@ -177,8 +177,8 @@ namespace Lucene.Net.Facet
 
             public virtual int Compare(LabelAndValue a, LabelAndValue b)
             {
-                Debug.Assert((double)a.value == (double)b.value);
-                return (new BytesRef(a.label)).CompareTo(new BytesRef(b.label));
+                Debug.Assert((double)a.Value == (double)b.Value);
+                return (new BytesRef(a.Label)).CompareTo(new BytesRef(b.Label));
             }
         }
 
@@ -198,17 +198,17 @@ namespace Lucene.Net.Facet
 
             public virtual int Compare(LabelAndValue a, LabelAndValue b)
             {
-                if ((double)a.value > (double)b.value)
+                if ((double)a.Value > (double)b.Value)
                 {
                     return -1;
                 }
-                else if ((double)a.value < (double)b.value)
+                else if ((double)a.Value < (double)b.Value)
                 {
                     return 1;
                 }
                 else
                 {
-                    return (new BytesRef(a.label)).CompareTo(new BytesRef(b.label));
+                    return (new BytesRef(a.Label)).CompareTo(new BytesRef(b.Label));
                 }
             }
         }
@@ -278,8 +278,8 @@ namespace Lucene.Net.Facet
             Assert.AreEqual(a.LabelValues.Length, b.LabelValues.Length);
             for (int i = 0; i < a.LabelValues.Length; i++)
             {
-                Assert.AreEqual(a.LabelValues[i].label, b.LabelValues[i].label);
-                Assert.AreEqual((float)a.LabelValues[i].value, (float)b.LabelValues[i].value, (float)a.LabelValues[i].value / 1e5);
+                Assert.AreEqual(a.LabelValues[i].Label, b.LabelValues[i].Label);
+                Assert.AreEqual((float)a.LabelValues[i].Value, (float)b.LabelValues[i].Value, (float)a.LabelValues[i].Value / 1e5);
             }
         }
     }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e0a73b45/src/Lucene.Net.Tests.Facet/Range/TestRangeFacetCounts.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Facet/Range/TestRangeFacetCounts.cs b/src/Lucene.Net.Tests.Facet/Range/TestRangeFacetCounts.cs
index e3a2623..d39cefd 100644
--- a/src/Lucene.Net.Tests.Facet/Range/TestRangeFacetCounts.cs
+++ b/src/Lucene.Net.Tests.Facet/Range/TestRangeFacetCounts.cs
@@ -433,11 +433,11 @@ namespace Lucene.Net.Facet.Range
                         LongRange prevRange = ranges[Random().Next(rangeID)];
                         if (Random().NextBoolean())
                         {
-                            min = prevRange.min;
+                            min = prevRange.Min;
                         }
                         else
                         {
-                            min = prevRange.max;
+                            min = prevRange.Max;
                         }
                     }
                     else
@@ -451,11 +451,11 @@ namespace Lucene.Net.Facet.Range
                         LongRange prevRange = ranges[Random().Next(rangeID)];
                         if (Random().NextBoolean())
                         {
-                            max = prevRange.min;
+                            max = prevRange.Min;
                         }
                         else
                         {
-                            max = prevRange.max;
+                            max = prevRange.Max;
                         }
                     }
                     else
@@ -546,8 +546,8 @@ namespace Lucene.Net.Facet.Range
                         Console.WriteLine("  range " + rangeID + " expectedCount=" + expectedCounts[rangeID]);
                     }
                     LabelAndValue subNode = result.LabelValues[rangeID];
-                    Assert.AreEqual("r" + rangeID, subNode.label);
-                    Assert.AreEqual(expectedCounts[rangeID], (int)subNode.value);
+                    Assert.AreEqual("r" + rangeID, subNode.Label);
+                    Assert.AreEqual(expectedCounts[rangeID], (int)subNode.Value);
 
                     LongRange range = ranges[rangeID];
 
@@ -557,11 +557,11 @@ namespace Lucene.Net.Facet.Range
                     {
                         if (Random().NextBoolean())
                         {
-                            ddq.Add("field", NumericRangeFilter.NewLongRange("field", range.min, range.max, range.minInclusive, range.maxInclusive));
+                            ddq.Add("field", NumericRangeFilter.NewLongRange("field", range.Min, range.Max, range.MinInclusive, range.MaxInclusive));
                         }
                         else
                         {
-                            ddq.Add("field", NumericRangeQuery.NewLongRange("field", range.min, range.max, range.minInclusive, range.maxInclusive));
+                            ddq.Add("field", NumericRangeQuery.NewLongRange("field", range.Min, range.Max, range.MinInclusive, range.MaxInclusive));
                         }
                     }
                     else
@@ -751,8 +751,8 @@ namespace Lucene.Net.Facet.Range
                         Console.WriteLine("TEST: verify range " + rangeID + " expectedCount=" + expectedCounts[rangeID]);
                     }
                     LabelAndValue subNode = result.LabelValues[rangeID];
-                    Assert.AreEqual("r" + rangeID, subNode.label);
-                    Assert.AreEqual(expectedCounts[rangeID], (int)subNode.value);
+                    Assert.AreEqual("r" + rangeID, subNode.Label);
+                    Assert.AreEqual(expectedCounts[rangeID], (int)subNode.Value);
 
                     DoubleRange range = ranges[rangeID];
 
@@ -937,8 +937,8 @@ namespace Lucene.Net.Facet.Range
                         Console.WriteLine("  range " + rangeID + " expectedCount=" + expectedCounts[rangeID]);
                     }
                     LabelAndValue subNode = result.LabelValues[rangeID];
-                    Assert.AreEqual("r" + rangeID, subNode.label);
-                    Assert.AreEqual(expectedCounts[rangeID], (int)subNode.value);
+                    Assert.AreEqual("r" + rangeID, subNode.Label);
+                    Assert.AreEqual(expectedCounts[rangeID], (int)subNode.Value);
 
                     DoubleRange range = ranges[rangeID];
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e0a73b45/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestConcurrentFacetedIndexing.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestConcurrentFacetedIndexing.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestConcurrentFacetedIndexing.cs
index f2c0fe7..8c36649 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestConcurrentFacetedIndexing.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestConcurrentFacetedIndexing.cs
@@ -203,7 +203,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
                             FacetField ff = NewCategory();
                             doc.Add(ff);
 
-                            FacetLabel label = new FacetLabel(ff.dim, ff.path);
+                            FacetLabel label = new FacetLabel(ff.Dim, ff.Path);
                             // add all prefixes to values
                             int level = label.Length;
                             while (level > 0)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e0a73b45/src/Lucene.Net.Tests.Facet/Taxonomy/TestSearcherTaxonomyManager.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/TestSearcherTaxonomyManager.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/TestSearcherTaxonomyManager.cs
index 0d02696..79f8b80 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/TestSearcherTaxonomyManager.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/TestSearcherTaxonomyManager.cs
@@ -175,10 +175,10 @@ namespace Lucene.Net.Facet.Taxonomy
                     {
                         //System.out.println("search maxOrd=" + pair.taxonomyReader.getSize());
                         FacetsCollector sfc = new FacetsCollector();
-                        pair.searcher.Search(new MatchAllDocsQuery(), sfc);
-                        Facets facets = GetTaxonomyFacetCounts(pair.taxonomyReader, config, sfc);
+                        pair.Searcher.Search(new MatchAllDocsQuery(), sfc);
+                        Facets facets = GetTaxonomyFacetCounts(pair.TaxonomyReader, config, sfc);
                         FacetResult result = facets.GetTopChildren(10, "field");
-                        if (pair.searcher.IndexReader.NumDocs > 0)
+                        if (pair.Searcher.IndexReader.NumDocs > 0)
                         {
                             //System.out.println(pair.taxonomyReader.getSize());
                             Assert.True(result.ChildCount > 0);
@@ -283,10 +283,10 @@ namespace Lucene.Net.Facet.Taxonomy
                     {
                         //System.out.println("search maxOrd=" + pair.taxonomyReader.getSize());
                         FacetsCollector sfc = new FacetsCollector();
-                        pair.searcher.Search(new MatchAllDocsQuery(), sfc);
-                        Facets facets = GetTaxonomyFacetCounts(pair.taxonomyReader, config, sfc);
+                        pair.Searcher.Search(new MatchAllDocsQuery(), sfc);
+                        Facets facets = GetTaxonomyFacetCounts(pair.TaxonomyReader, config, sfc);
                         FacetResult result = facets.GetTopChildren(10, "field");
-                        if (pair.searcher.IndexReader.NumDocs > 0)
+                        if (pair.Searcher.IndexReader.NumDocs > 0)
                         {
                             //System.out.println(pair.taxonomyReader.getSize());
                             Assert.True(result.ChildCount > 0);
@@ -365,7 +365,7 @@ namespace Lucene.Net.Facet.Taxonomy
             SearcherAndTaxonomy pair = mgr.Acquire();
             try
             {
-                Assert.AreEqual(1, pair.taxonomyReader.Size);
+                Assert.AreEqual(1, pair.TaxonomyReader.Size);
             }
             finally
             {
@@ -382,7 +382,7 @@ namespace Lucene.Net.Facet.Taxonomy
             pair = mgr.Acquire();
             try
             {
-                Assert.AreEqual(3, pair.taxonomyReader.Size);
+                Assert.AreEqual(3, pair.TaxonomyReader.Size);
             }
             finally
             {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e0a73b45/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetCounts.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetCounts.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetCounts.cs
index 263a1d2..6bee101 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetCounts.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetCounts.cs
@@ -362,7 +362,7 @@ namespace Lucene.Net.Facet.Taxonomy
 
             FacetResult result = facets.GetTopChildren(10, "a");
             Assert.AreEqual(1, result.LabelValues.Length);
-            Assert.AreEqual(1, (int)result.LabelValues[0].value);
+            Assert.AreEqual(1, (int)result.LabelValues[0].Value);
 
             IOUtils.Close(writer, taxoWriter, searcher.IndexReader, taxoReader, dir, taxoDir);
         }
@@ -502,8 +502,8 @@ namespace Lucene.Net.Facet.Taxonomy
             var allLabels = new HashSet<string>();
             foreach (LabelAndValue labelValue in result.LabelValues)
             {
-                allLabels.Add(labelValue.label);
-                Assert.AreEqual(1, (int)labelValue.value);
+                allLabels.Add(labelValue.Label);
+                Assert.AreEqual(1, (int)labelValue.Value);
             }
             Assert.AreEqual(numLabels, allLabels.Count);
 
@@ -744,7 +744,7 @@ namespace Lucene.Net.Facet.Taxonomy
             Assert.AreEqual(2, result.LabelValues.Length, "wrong number of children");
             foreach (LabelAndValue labelValue in result.LabelValues)
             {
-                Assert.AreEqual(2, (int)labelValue.value, "wrong weight for child " + labelValue.label);
+                Assert.AreEqual(2, (int)labelValue.Value, "wrong weight for child " + labelValue.Label);
             }
 
             IOUtils.Close(indexReader, taxoReader, indexDir, taxoDir);

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e0a73b45/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetCounts2.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetCounts2.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetCounts2.cs
index 6f4cb8b..09d1119 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetCounts2.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetCounts2.cs
@@ -126,7 +126,7 @@ namespace Lucene.Net.Facet.Taxonomy
             foreach (FacetField ff in docCategories)
             {
                 doc.Add(ff);
-                string cp = ff.dim + "/" + ff.path[0];
+                string cp = ff.Dim + "/" + ff.Path[0];
                 allExpectedCounts[cp] = allExpectedCounts[cp] + 1;
                 if (updateTermExpectedCounts)
                 {
@@ -228,19 +228,19 @@ namespace Lucene.Net.Facet.Taxonomy
             counts[CP_D] = 0;
             foreach (FacetField ff in CATEGORIES_A)
             {
-                counts[ff.dim + "/" + ff.path[0]] = 0;
+                counts[ff.Dim + "/" + ff.Path[0]] = 0;
             }
             foreach (FacetField ff in CATEGORIES_B)
             {
-                counts[ff.dim + "/" + ff.path[0]] = 0;
+                counts[ff.Dim + "/" + ff.Path[0]] = 0;
             }
             foreach (FacetField ff in CATEGORIES_C)
             {
-                counts[ff.dim + "/" + ff.path[0]] = 0;
+                counts[ff.Dim + "/" + ff.Path[0]] = 0;
             }
             foreach (FacetField ff in CATEGORIES_D)
             {
-                counts[ff.dim + "/" + ff.path[0]] = 0;
+                counts[ff.Dim + "/" + ff.Path[0]] = 0;
             }
             return counts;
         }
@@ -296,13 +296,13 @@ namespace Lucene.Net.Facet.Taxonomy
             Assert.AreEqual(-1, (int)result.Value);
             foreach (LabelAndValue labelValue in result.LabelValues)
             {
-                Assert.AreEqual(termExpectedCounts[CP_A + "/" + labelValue.label], labelValue.value);
+                Assert.AreEqual(termExpectedCounts[CP_A + "/" + labelValue.Label], labelValue.Value);
             }
             result = facets.GetTopChildren(NUM_CHILDREN_CP_B, CP_B);
             Assert.AreEqual(termExpectedCounts[CP_B], result.Value);
             foreach (LabelAndValue labelValue in result.LabelValues)
             {
-                Assert.AreEqual(termExpectedCounts[CP_B + "/" + labelValue.label], labelValue.value);
+                Assert.AreEqual(termExpectedCounts[CP_B + "/" + labelValue.Label], labelValue.Value);
             }
 
             IOUtils.Close(indexReader, taxoReader);
@@ -325,9 +325,9 @@ namespace Lucene.Net.Facet.Taxonomy
             int prevValue = int.MaxValue;
             foreach (LabelAndValue labelValue in result.LabelValues)
             {
-                Assert.AreEqual(allExpectedCounts[CP_A + "/" + labelValue.label], labelValue.value);
-                Assert.True((int)labelValue.value <= prevValue, "wrong sort order of sub results: labelValue.value=" + labelValue.value + " prevValue=" + prevValue);
-                prevValue = (int)labelValue.value;
+                Assert.AreEqual(allExpectedCounts[CP_A + "/" + labelValue.Label], labelValue.Value);
+                Assert.True((int)labelValue.Value <= prevValue, "wrong sort order of sub results: labelValue.value=" + labelValue.Value + " prevValue=" + prevValue);
+                prevValue = (int)labelValue.Value;
             }
 
             result = facets.GetTopChildren(NUM_CHILDREN_CP_B, CP_B);
@@ -335,9 +335,9 @@ namespace Lucene.Net.Facet.Taxonomy
             prevValue = int.MaxValue;
             foreach (LabelAndValue labelValue in result.LabelValues)
             {
-                Assert.AreEqual(allExpectedCounts[CP_B + "/" + labelValue.label], labelValue.value);
-                Assert.True((int)labelValue.value <= prevValue, "wrong sort order of sub results: labelValue.value=" + labelValue.value + " prevValue=" + prevValue);
-                prevValue = (int)labelValue.value;
+                Assert.AreEqual(allExpectedCounts[CP_B + "/" + labelValue.Label], labelValue.Value);
+                Assert.True((int)labelValue.Value <= prevValue, "wrong sort order of sub results: labelValue.value=" + labelValue.Value + " prevValue=" + prevValue);
+                prevValue = (int)labelValue.Value;
             }
 
             IOUtils.Close(indexReader, taxoReader);
@@ -359,13 +359,13 @@ namespace Lucene.Net.Facet.Taxonomy
             Assert.AreEqual(-1, (int)result.Value);
             foreach (LabelAndValue labelValue in result.LabelValues)
             {
-                Assert.AreEqual(allExpectedCounts[CP_A + "/" + labelValue.label], labelValue.value);
+                Assert.AreEqual(allExpectedCounts[CP_A + "/" + labelValue.Label], labelValue.Value);
             }
             result = facets.GetTopChildren(int.MaxValue, CP_B);
             Assert.AreEqual(allExpectedCounts[CP_B], result.Value);
             foreach (LabelAndValue labelValue in result.LabelValues)
             {
-                Assert.AreEqual(allExpectedCounts[CP_B + "/" + labelValue.label], labelValue.value);
+                Assert.AreEqual(allExpectedCounts[CP_B + "/" + labelValue.Label], labelValue.Value);
             }
 
             IOUtils.Close(indexReader, taxoReader);
@@ -387,13 +387,13 @@ namespace Lucene.Net.Facet.Taxonomy
             Assert.AreEqual(allExpectedCounts[CP_C], result.Value);
             foreach (LabelAndValue labelValue in result.LabelValues)
             {
-                Assert.AreEqual(allExpectedCounts[CP_C + "/" + labelValue.label], labelValue.value);
+                Assert.AreEqual(allExpectedCounts[CP_C + "/" + labelValue.Label], labelValue.Value);
             }
             result = facets.GetTopChildren(NUM_CHILDREN_CP_D, CP_D);
             Assert.AreEqual(allExpectedCounts[CP_C], result.Value);
             foreach (LabelAndValue labelValue in result.LabelValues)
             {
-                Assert.AreEqual(allExpectedCounts[CP_D + "/" + labelValue.label], labelValue.value);
+                Assert.AreEqual(allExpectedCounts[CP_D + "/" + labelValue.Label], labelValue.Value);
             }
 
             IOUtils.Close(indexReader, taxoReader);

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e0a73b45/src/Lucene.Net.Tests.Facet/TestDrillSideways.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Facet/TestDrillSideways.cs b/src/Lucene.Net.Tests.Facet/TestDrillSideways.cs
index 719e212..e32693d 100644
--- a/src/Lucene.Net.Tests.Facet/TestDrillSideways.cs
+++ b/src/Lucene.Net.Tests.Facet/TestDrillSideways.cs
@@ -1213,10 +1213,10 @@ namespace Lucene.Net.Facet
                 {
                     foreach (LabelAndValue labelValue in fr.LabelValues)
                     {
-                        actualValues[labelValue.label] = (int)labelValue.value;
+                        actualValues[labelValue.Label] = (int)labelValue.Value;
                         if (VERBOSE)
                         {
-                            Console.WriteLine("        " + idx + ": " + new BytesRef(labelValue.label) + ": " + labelValue.value);
+                            Console.WriteLine("        " + idx + ": " + new BytesRef(labelValue.Label) + ": " + labelValue.Value);
                             idx++;
                         }
                     }
@@ -1254,12 +1254,12 @@ namespace Lucene.Net.Facet
                     for (int i = 0; i < topNIDs.Length; i++)
                     {
                         int expectedOrd = topNIDs[i];
-                        Assert.AreEqual(expected.Counts[dim][expectedOrd], (int)fr.LabelValues[i].value);
+                        Assert.AreEqual(expected.Counts[dim][expectedOrd], (int)fr.LabelValues[i].Value);
                         if (isSortedSetDV)
                         {
                             // Tie-break facet labels are only in unicode
                             // order with SortedSetDVFacets:
-                            assertEquals("value @ idx=" + i, dimValues[dim][expectedOrd], fr.LabelValues[i].label);
+                            assertEquals("value @ idx=" + i, dimValues[dim][expectedOrd], fr.LabelValues[i].Label);
                         }
                     }
                 }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e0a73b45/src/Lucene.Net.Tests.Facet/TestFacetsConfig.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Facet/TestFacetsConfig.cs b/src/Lucene.Net.Tests.Facet/TestFacetsConfig.cs
index 9c14f79..73a4123 100644
--- a/src/Lucene.Net.Tests.Facet/TestFacetsConfig.cs
+++ b/src/Lucene.Net.Tests.Facet/TestFacetsConfig.cs
@@ -90,7 +90,7 @@ namespace Lucene.Net.Facet
             Facets facets = GetTaxonomyFacetCounts(taxoReader, facetsConfig, fc);
             FacetResult res = facets.GetTopChildren(10, "a");
             Assert.AreEqual(1, res.LabelValues.Length);
-            Assert.AreEqual(2, res.LabelValues[0].value);
+            Assert.AreEqual(2, res.LabelValues[0].Value);
             IOUtils.Close(indexReader, taxoReader);
 
             IOUtils.Close(indexDir, taxoDir);

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e0a73b45/src/Lucene.Net.Tests.Facet/TestRandomSamplingFacetsCollector.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Facet/TestRandomSamplingFacetsCollector.cs b/src/Lucene.Net.Tests.Facet/TestRandomSamplingFacetsCollector.cs
index 7b5915b..2969436 100644
--- a/src/Lucene.Net.Tests.Facet/TestRandomSamplingFacetsCollector.cs
+++ b/src/Lucene.Net.Tests.Facet/TestRandomSamplingFacetsCollector.cs
@@ -80,7 +80,7 @@ namespace Lucene.Net.Facet
             // There should be no results at all
             foreach (MatchingDocs doc in collectRandomZeroResults.GetMatchingDocs)
             {
-                Assert.AreEqual(0, doc.totalHits);
+                Assert.AreEqual(0, doc.TotalHits);
             }
 
             // Now start searching and retrieve results.
@@ -125,14 +125,14 @@ namespace Lucene.Net.Facet
             int sum = 0;
             foreach (LabelAndValue lav in random10Result.LabelValues)
             {
-                sum += (int)lav.value;
+                sum += (int)lav.Value;
             }
             float mu = (float)sum / (float)maxNumChildren;
 
             float variance = 0;
             foreach (LabelAndValue lav in random10Result.LabelValues)
             {
-                variance += (float)Math.Pow((mu - (int)lav.value), 2);
+                variance += (float)Math.Pow((mu - (int)lav.Value), 2);
             }
             variance = variance / maxNumChildren;
             float sigma = (float)Math.Sqrt(variance);


[03/46] lucenenet git commit: Finished implementation of Facet.Taxonomy.WriterCache.TestCharBlockArray test and fixed the implementation of CharBlockArray and CompactLabelToOrdinal to make the test pass.

Posted by sy...@apache.org.
Finished implementation of Facet.Taxonomy.WriterCache.TestCharBlockArray test and fixed the implementation of CharBlockArray and CompactLabelToOrdinal to make the test pass.


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/861aa737
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/861aa737
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/861aa737

Branch: refs/heads/master
Commit: 861aa7377a915465fac212c8db66b1a76996b28e
Parents: bcbfad5
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Fri Sep 23 23:38:42 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Mon Oct 3 23:30:36 2016 +0700

----------------------------------------------------------------------
 src/Lucene.Net.Core/Support/StreamUtils.cs      | 11 ++--
 src/Lucene.Net.Facet/Properties/AssemblyInfo.cs |  3 +
 .../Taxonomy/WriterCache/CharBlockArray.cs      | 18 +++---
 .../WriterCache/CompactLabelToOrdinal.cs        | 26 +++------
 .../Taxonomy/WriterCache/TestCharBlockArray.cs  | 58 +++++++++-----------
 5 files changed, 48 insertions(+), 68 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/861aa737/src/Lucene.Net.Core/Support/StreamUtils.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Support/StreamUtils.cs b/src/Lucene.Net.Core/Support/StreamUtils.cs
index 75e7d15..7e05717 100644
--- a/src/Lucene.Net.Core/Support/StreamUtils.cs
+++ b/src/Lucene.Net.Core/Support/StreamUtils.cs
@@ -13,13 +13,12 @@ namespace Lucene.Net.Support
     {
         static readonly BinaryFormatter Formatter = new BinaryFormatter();
 
-        public static MemoryStream SerializeToStream(object o)
+        public static void SerializeToStream(object o, Stream outputStream)
         {
-            using (var stream = new MemoryStream())
-            {
-                Formatter.Serialize(stream, o);
-                return stream;
-            }
+            // LUCENENET TODO: It would probably be better to serialize to
+            // XML so this works across .NET framework versions or alternatively
+            // find/create an alternative binary formatter implementation that works that way.
+            Formatter.Serialize(outputStream, o);
         }
 
         public static object DeserializeFromStream(Stream stream)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/861aa737/src/Lucene.Net.Facet/Properties/AssemblyInfo.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Properties/AssemblyInfo.cs b/src/Lucene.Net.Facet/Properties/AssemblyInfo.cs
index 741693d..7325e4c 100644
--- a/src/Lucene.Net.Facet/Properties/AssemblyInfo.cs
+++ b/src/Lucene.Net.Facet/Properties/AssemblyInfo.cs
@@ -22,6 +22,9 @@ using System.Runtime.InteropServices;
 // The following GUID is for the ID of the typelib if this project is exposed to COM
 [assembly: Guid("8dd11ab2-c5b3-4691-99da-2941f27e0e10")]
 
+// for testing
+[assembly: InternalsVisibleTo("Lucene.Net.Tests.Facet")]
+
 // Version information for an assembly consists of the following four values:
 //
 //      Major Version

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/861aa737/src/Lucene.Net.Facet/Taxonomy/WriterCache/CharBlockArray.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CharBlockArray.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CharBlockArray.cs
index a6a60e7..a38329b 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CharBlockArray.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CharBlockArray.cs
@@ -54,7 +54,10 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
 
             public object Clone()
             {
-                throw new NotImplementedException();
+                var clone = new Block(chars.Length);
+                clone.length = length;
+                Array.Copy(chars, clone.chars, chars.Length);
+                return clone;
             }
         }
 
@@ -214,21 +217,14 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
             return sb.ToString();
         }
 
-        internal virtual void Flush(OutputStreamDataOutput @out)
+        internal virtual void Flush(Stream @out)
         {
-            
-            using (var ms = StreamUtils.SerializeToStream(this))
-            {
-                var bytes = ms.ToArray();
-                @out.WriteBytes(bytes, 0, bytes.Length);
-            }
+            StreamUtils.SerializeToStream(this, @out);
         }
 
-        public static CharBlockArray Open(BinaryReader @in)
+        public static CharBlockArray Open(Stream @in)
         {
             return StreamUtils.DeserializeFromStream(@in) as CharBlockArray;
         }
-
     }
-
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/861aa737/src/Lucene.Net.Facet/Taxonomy/WriterCache/CompactLabelToOrdinal.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CompactLabelToOrdinal.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CompactLabelToOrdinal.cs
index 07bd162..f717fb1 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CompactLabelToOrdinal.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CompactLabelToOrdinal.cs
@@ -396,7 +396,7 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
         /// Opens the file and reloads the CompactLabelToOrdinal. The file it expects
         /// is generated from the <seealso cref="#flush(File)"/> command.
         /// </summary>
-        public static CompactLabelToOrdinal Open(string file, float loadFactor, int numHashArrays)
+        internal static CompactLabelToOrdinal Open(string file, float loadFactor, int numHashArrays)
         {
             /// <summary>
             /// Part of the file is the labelRepository, which needs to be rehashed
@@ -422,7 +422,7 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
                 l2o.Init();
 
                 // now read the chars
-                l2o.labelRepository = CharBlockArray.Open(dis);
+                l2o.labelRepository = CharBlockArray.Open(dis.BaseStream);
 
                 l2o.collisionMap = new CollisionMap(l2o.labelRepository);
 
@@ -443,13 +443,13 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
                     // identical code to CategoryPath.hashFromSerialized. since we need to
                     // advance offset, we cannot call the method directly. perhaps if we
                     // could pass a mutable Integer or something...
-                    int length = (short)l2o.labelRepository.CharAt(offset++);
+                    int length = (ushort)l2o.labelRepository.CharAt(offset++);
                     int hash = length;
                     if (length != 0)
                     {
                         for (int i = 0; i < length; i++)
                         {
-                            int len = (short)l2o.labelRepository.CharAt(offset++);
+                            int len = (ushort)l2o.labelRepository.CharAt(offset++);
                             hash = hash * 31 + l2o.labelRepository.SubSequence(offset, offset + len).GetHashCode();
                             offset += len;
                         }
@@ -482,24 +482,14 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
 
         }
 
-        public virtual void Flush(Stream stream)
+        internal virtual void Flush(FileStream stream)
         {
-
-            OutputStreamDataOutput dos = new OutputStreamDataOutput(stream);
-
-            try
+            using (BinaryWriter dos = new BinaryWriter(stream))
             {
-                dos.WriteInt(this.counter);
+                dos.Write(this.counter);
 
                 // write the labelRepository
-                this.labelRepository.Flush(dos);
-                // Closes the data output stream
-                dos.Dispose();
-
-            }
-            finally
-            {
-                dos.Dispose();
+                this.labelRepository.Flush(dos.BaseStream);
             }
         }
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/861aa737/src/Lucene.Net.Tests.Facet/Taxonomy/WriterCache/TestCharBlockArray.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/WriterCache/TestCharBlockArray.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/WriterCache/TestCharBlockArray.cs
index f4be988..f059a79 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/WriterCache/TestCharBlockArray.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/WriterCache/TestCharBlockArray.cs
@@ -1,13 +1,9 @@
-\ufeffusing System.Text;
-using Lucene.Net.Support;
-using NUnit.Framework;
+\ufeffusing NUnit.Framework;
+using System.IO;
+using System.Text;
 
 namespace Lucene.Net.Facet.Taxonomy.WriterCache
 {
-
-
-    using TestUtil = Lucene.Net.Util.TestUtil;
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -24,14 +20,13 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
      * See the License for the specific language governing permissions and
      * limitations under the License.
      */
+
     [TestFixture]
     public class TestCharBlockArray : FacetTestCase
     {
 
-        /* not finished yet because of missing charset decoder */
-
-        /*
-        public virtual void testArray()
+        [Test]
+        public virtual void TestArray()
         {
             CharBlockArray array = new CharBlockArray();
             StringBuilder builder = new StringBuilder();
@@ -47,9 +42,8 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
                 // This test is turning random bytes into a string,
                 // this is asking for trouble.
 
-                CharsetDecoder decoder = StandardCharsets.UTF_8.newDecoder().onUnmappableCharacter(CodingErrorAction.REPLACE).onMalformedInput(CodingErrorAction.REPLACE);
-                string s = decoder.Decode(ByteBuffer.Wrap(buffer, 0, size)).ToString();
-                array.append(s);
+                string s = Encoding.UTF8.GetString(buffer, 0, size);
+                array.Append(s);
                 builder.Append(s);
             }
 
@@ -59,9 +53,8 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
                 int size = 1 + Random().Next(50);
                 // This test is turning random bytes into a string,
                 // this is asking for trouble.
-                CharsetDecoder decoder = StandardCharsets.UTF_8.newDecoder().onUnmappableCharacter(CodingErrorAction.REPLACE).onMalformedInput(CodingErrorAction.REPLACE);
-                string s = decoder.decode(ByteBuffer.Wrap(buffer, 0, size)).ToString();
-                array.append((CharSequence)s);
+                string s = Encoding.UTF8.GetString(buffer, 0, size);
+                array.Append(s);
                 builder.Append(s);
             }
 
@@ -71,29 +64,30 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
                 int size = 1 + Random().Next(50);
                 // This test is turning random bytes into a string,
                 // this is asking for trouble.
-                CharsetDecoder decoder = StandardCharsets.UTF_8.newDecoder().onUnmappableCharacter(CodingErrorAction.REPLACE).onMalformedInput(CodingErrorAction.REPLACE);
-                string s = decoder.decode(ByteBuffer.Wrap(buffer, 0, size)).ToString();
+                string s = Encoding.UTF8.GetString(buffer, 0, size);
                 for (int j = 0; j < s.Length; j++)
                 {
-                    array.append(s[j]);
+                    array.Append(s[j]);
                 }
                 builder.Append(s);
             }
 
             AssertEqualsInternal("GrowingCharArray<->StringBuilder mismatch.", builder, array);
 
-            File tempDir = CreateTempDir("growingchararray");
-            File f = new File(tempDir, "GrowingCharArrayTest.tmp");
-            BufferedOutputStream @out = new BufferedOutputStream(new FileOutputStream(f));
-            array.flush(@out);
-            @out.flush();
-            @out.Close();
+            DirectoryInfo tempDir = CreateTempDir("growingchararray");
+            FileInfo f = new FileInfo(Path.Combine(tempDir.FullName, "GrowingCharArrayTest.tmp"));
+            using (Stream @out = new FileStream(f.FullName, FileMode.OpenOrCreate, FileAccess.Write))
+            {
+                array.Flush(@out);
+                @out.Flush();
+            }
 
-            BufferedInputStream @in = new BufferedInputStream(new FileInputStream(f));
-            array = CharBlockArray.open(@in);
-            AssertEqualsInternal("GrowingCharArray<->StringBuilder mismatch after flush/load.", builder, array);
-            @in.Close();
-            f.delete();
+            using (Stream @in = new FileStream(f.FullName, FileMode.Open, FileAccess.Read))
+            {
+                array = CharBlockArray.Open(@in);
+                AssertEqualsInternal("GrowingCharArray<->StringBuilder mismatch after flush/load.", builder, array);
+            }
+            f.Delete();
         }
 
         private static void AssertEqualsInternal(string msg, StringBuilder expected, CharBlockArray actual)
@@ -104,7 +98,5 @@ namespace Lucene.Net.Facet.Taxonomy.WriterCache
                 Assert.AreEqual(expected[i], actual.CharAt(i), msg);
             }
         }
-        */
     }
-
 }
\ No newline at end of file


[09/46] lucenenet git commit: Fixed Facet.FacetTestCase.SortTies() to include a sort length rather than end index.

Posted by sy...@apache.org.
Fixed Facet.FacetTestCase.SortTies() to include a sort length rather than end index.


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/e9302a87
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/e9302a87
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/e9302a87

Branch: refs/heads/master
Commit: e9302a877437b8a5197756201a16d3a907301504
Parents: 44f7d9c
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sat Sep 24 20:30:05 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Mon Oct 3 23:30:52 2016 +0700

----------------------------------------------------------------------
 src/Lucene.Net.Tests.Facet/FacetTestCase.cs | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e9302a87/src/Lucene.Net.Tests.Facet/FacetTestCase.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Facet/FacetTestCase.cs b/src/Lucene.Net.Tests.Facet/FacetTestCase.cs
index 0c71f32..ccc5cdf 100644
--- a/src/Lucene.Net.Tests.Facet/FacetTestCase.cs
+++ b/src/Lucene.Net.Tests.Facet/FacetTestCase.cs
@@ -154,7 +154,7 @@ namespace Lucene.Net.Facet
                 {
                     if (numInRow > 1)
                     {
-                        Array.Sort(labelValues, i - numInRow, i, new ComparatorAnonymousInnerClassHelper(this));
+                        Array.Sort(labelValues, i - numInRow, i - (i - numInRow), new ComparatorAnonymousInnerClassHelper(this));
                     }
                     numInRow = 1;
                     if (i < labelValues.Length)