You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucenenet.apache.org by mh...@apache.org on 2013/09/24 20:32:57 UTC

[21/50] [abbrv] git commit: Massive cleanup, reducing compiler errors

Massive cleanup, reducing compiler errors


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/80561f72
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/80561f72
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/80561f72

Branch: refs/heads/branch_4x
Commit: 80561f72bcad93c9cd46011a0d3efb2031db996b
Parents: fc17ce3
Author: Paul Irwin <pa...@gmail.com>
Authored: Tue Jul 23 20:15:43 2013 -0400
Committer: Paul Irwin <pa...@gmail.com>
Committed: Tue Jul 23 20:15:43 2013 -0400

----------------------------------------------------------------------
 src/core/Analysis/TokenFilter.cs                |    2 +-
 src/core/Analysis/TokenStream.cs                |    4 +-
 src/core/Analysis/TokenStreamToAutomaton.cs     |    2 +-
 src/core/Codecs/BlockTreeTermsWriter.cs         |    2 +-
 .../CompressingStoredFieldsFormat.cs            |   43 +-
 .../CompressingStoredFieldsIndexReader.cs       |  209 +--
 .../CompressingStoredFieldsIndexWriter.cs       |   50 +-
 .../CompressingStoredFieldsReader.cs            |  162 +--
 .../CompressingStoredFieldsWriter.cs            |  440 +++---
 .../Compressing/CompressingTermVectorsFormat.cs |   34 +-
 .../Compressing/CompressingTermVectorsReader.cs | 1352 +++++++++++++-----
 .../Compressing/CompressingTermVectorsWriter.cs |  933 ++++++------
 src/core/Codecs/Compressing/CompressionMode.cs  |   12 +-
 src/core/Codecs/FieldsProducer.cs               |    6 +-
 .../Lucene3x/Lucene3xStoredFieldsReader.cs      |    2 +-
 src/core/Codecs/Lucene3x/SegmentTermDocs.cs     |    2 +-
 src/core/Codecs/MultiLevelSkipListReader.cs     |    2 +-
 src/core/Codecs/PostingsWriterBase.cs           |    8 +-
 src/core/Document/Document.cs                   |   10 +
 src/core/Document/Field.cs                      |    8 +-
 src/core/Document/FieldType.cs                  |    2 +-
 src/core/Index/AtomicReader.cs                  |    2 +-
 src/core/Index/AtomicReaderContext.cs           |    2 +-
 src/core/Index/BaseCompositeReader.cs           |    7 +-
 src/core/Index/BinaryDocValuesWriter.cs         |    6 +-
 src/core/Index/ByteSliceReader.cs               |   14 +-
 src/core/Index/ByteSliceWriter.cs               |   28 +-
 src/core/Index/CompositeReader.cs               |   22 +-
 src/core/Index/CompositeReaderContext.cs        |    2 +-
 src/core/Index/DocValuesProcessor.cs            |    2 +-
 src/core/Index/DocumentsWriter.cs               |    2 +-
 src/core/Index/FilterDirectoryReader.cs         |    2 +-
 src/core/Index/FreqProxTermsWriterPerField.cs   |   10 +-
 src/core/Index/IIndexableField.cs               |    2 +-
 src/core/Index/IndexWriter.cs                   |    2 +-
 src/core/Index/IndexWriterConfig.cs             |   12 +-
 .../Index/KeepOnlyLastCommitDeletionPolicy.cs   |    4 +-
 src/core/Index/LogMergePolicy.cs                |    2 +-
 src/core/Index/MultiDocValues.cs                |   20 +-
 src/core/Index/MultiReader.cs                   |    2 +-
 src/core/Index/NormsConsumerPerField.cs         |    4 +
 src/core/Index/ParallelCompositeReader.cs       |    4 +-
 src/core/Index/SegmentInfos.cs                  |    2 +-
 src/core/Index/SlowCompositeReaderWrapper.cs    |   48 +-
 src/core/Index/StandardDirectoryReader.cs       |   10 +-
 src/core/Index/TermVectorsConsumerPerField.cs   |   14 +-
 src/core/Index/TermsEnum.cs                     |    4 +-
 src/core/Index/TermsHash.cs                     |    2 +-
 src/core/Index/TermsHashPerField.cs             |    8 +-
 src/core/Lucene.Net.csproj                      |    1 +
 src/core/Search/AutomatonQuery.cs               |    2 +-
 src/core/Search/BitsFilteredDocIdSet.cs         |    2 +-
 src/core/Search/ConstantScoreAutoRewrite.cs     |   11 +-
 src/core/Search/DocTermOrdsRangeFilter.cs       |    6 +-
 src/core/Search/FieldCacheRangeFilter.cs        |   34 +-
 src/core/Search/FieldComparator.cs              |   26 +-
 src/core/Search/FieldValueHitQueue.cs           |   16 +-
 src/core/Search/NumericRangeQuery.cs            |    6 +-
 src/core/Search/Payloads/PayloadNearQuery.cs    |  361 ++---
 src/core/Search/Payloads/PayloadSpanUtil.cs     |    4 +-
 src/core/Search/PhraseQuery.cs                  |    9 +-
 src/core/Search/PositiveScoresOnlyCollector.cs  |   82 +-
 src/core/Search/PrefixQuery.cs                  |    2 +-
 src/core/Search/PrefixTermsEnum.cs              |    2 +-
 src/core/Search/ReqExclScorer.cs                |   14 +-
 src/core/Search/ReqOptSumScorer.cs              |  140 +-
 src/core/Search/ScoreCachingWrappingScorer.cs   |   18 +-
 src/core/Search/ScoringRewrite.cs               |    7 +-
 src/core/Search/Similarities/BM25Similarity.cs  |   65 +-
 src/core/Search/Similarities/BasicStats.cs      |    7 +-
 src/core/Search/Similarities/MultiSimilarity.cs |    9 +-
 .../Similarities/PerFieldSimilarityWrapper.cs   |    7 +-
 src/core/Search/Similarities/TFIDFSimilarity.cs |    9 +-
 src/core/Search/Spans/FieldMaskingSpanQuery.cs  |    2 +-
 src/core/Search/Spans/NearSpansOrdered.cs       |  785 +++++-----
 src/core/Search/Spans/NearSpansUnordered.cs     |  783 +++++-----
 src/core/Search/Spans/SpanFirstQuery.cs         |    7 +-
 .../Search/Spans/SpanMultiTermQueryWrapper.cs   |   30 +-
 .../Search/Spans/SpanNearPayloadCheckQuery.cs   |    4 +-
 src/core/Search/Spans/SpanNearQuery.cs          |    4 +-
 src/core/Search/Spans/SpanNotQuery.cs           |   15 +-
 src/core/Search/Spans/SpanOrQuery.cs            |   17 +-
 src/core/Search/Spans/SpanPayloadCheckQuery.cs  |    8 +-
 src/core/Search/Spans/SpanPositionCheckQuery.cs |   27 +-
 src/core/Search/Spans/SpanPositionRangeQuery.cs |   34 +-
 src/core/Search/Spans/SpanQuery.cs              |    2 +-
 src/core/Search/Spans/SpanScorer.cs             |   11 +-
 src/core/Search/Spans/SpanTermQuery.cs          |    6 +-
 src/core/Search/Spans/SpanWeight.cs             |    7 +-
 src/core/Search/Spans/Spans.cs                  |    3 +-
 src/core/Search/Spans/TermSpans.cs              |    2 +-
 src/core/Search/TermCollectingRewrite.cs        |    4 +-
 src/core/Search/TopFieldCollector.cs            |    2 +-
 src/core/Support/ByteBuffer.cs                  |   20 +-
 src/core/Util/Automaton/State.cs                |    5 +
 src/core/Util/ByteBlockPool.cs                  |    4 +-
 src/core/Util/DocIdBitSet.cs                    |   14 +-
 src/core/Util/Fst/BytesStore.cs                 |    6 +-
 src/core/Util/Fst/FSTEnum.cs                    |    3 +-
 src/core/Util/Fst/ForwardBytesReader.cs         |    4 +-
 src/core/Util/Fst/ReverseBytesReader.cs         |    4 +-
 src/core/Util/NamedThreadFactory.cs             |    2 +-
 src/core/Util/OpenBitSetIterator.cs             |   14 +-
 src/core/Util/Packed/AppendingLongBuffer.cs     |    6 +-
 src/core/Util/Packed/BlockPackedWriter.cs       |    2 +-
 .../Util/Packed/MonotonicAppendingLongBuffer.cs |    6 +-
 src/core/Util/Packed/Packed64SingleBlock.cs     |   17 +-
 src/core/Util/RollingBuffer.cs                  |    2 +-
 108 files changed, 3637 insertions(+), 2589 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/80561f72/src/core/Analysis/TokenFilter.cs
----------------------------------------------------------------------
diff --git a/src/core/Analysis/TokenFilter.cs b/src/core/Analysis/TokenFilter.cs
index b1b9435..f818ff6 100644
--- a/src/core/Analysis/TokenFilter.cs
+++ b/src/core/Analysis/TokenFilter.cs
@@ -56,7 +56,7 @@ namespace Lucene.Net.Analysis
             {
                 if (input != null)
                 {
-                    input.Close();
+                    input.Dispose();
                 }
             }
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/80561f72/src/core/Analysis/TokenStream.cs
----------------------------------------------------------------------
diff --git a/src/core/Analysis/TokenStream.cs b/src/core/Analysis/TokenStream.cs
index 430e28f..518774b 100644
--- a/src/core/Analysis/TokenStream.cs
+++ b/src/core/Analysis/TokenStream.cs
@@ -150,6 +150,8 @@ namespace Lucene.Net.Analysis
             Dispose(true);
         }
 
-	    protected abstract void Dispose(bool disposing);
+        protected virtual void Dispose(bool disposing)
+        {
+        }
 	}
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/80561f72/src/core/Analysis/TokenStreamToAutomaton.cs
----------------------------------------------------------------------
diff --git a/src/core/Analysis/TokenStreamToAutomaton.cs b/src/core/Analysis/TokenStreamToAutomaton.cs
index 9646a7c..8264f75 100644
--- a/src/core/Analysis/TokenStreamToAutomaton.cs
+++ b/src/core/Analysis/TokenStreamToAutomaton.cs
@@ -22,7 +22,7 @@ namespace Lucene.Net.Analysis
             // Any tokens that start at our position leave from this state:
             internal State leaving;
 
-            public override void Reset()
+            public void Reset()
             {
                 arriving = null;
                 leaving = null;

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/80561f72/src/core/Codecs/BlockTreeTermsWriter.cs
----------------------------------------------------------------------
diff --git a/src/core/Codecs/BlockTreeTermsWriter.cs b/src/core/Codecs/BlockTreeTermsWriter.cs
index 2a0012f..14d5444 100644
--- a/src/core/Codecs/BlockTreeTermsWriter.cs
+++ b/src/core/Codecs/BlockTreeTermsWriter.cs
@@ -310,7 +310,7 @@ namespace Lucene.Net.Codecs
                     //if (DEBUG) {
                     //  System.out.println("      add sub=" + indexEnt.input + " " + indexEnt.input + " output=" + indexEnt.output);
                     //}
-                    builder.Add(Util.ToIntsRef(indexEnt.Input, scratchIntsRef), indexEnt.Output);
+                    builder.Add(Lucene.Net.Util.Fst.Util.ToIntsRef(indexEnt.Input, scratchIntsRef), indexEnt.Output);
                 }
             }
         }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/80561f72/src/core/Codecs/Compressing/CompressingStoredFieldsFormat.cs
----------------------------------------------------------------------
diff --git a/src/core/Codecs/Compressing/CompressingStoredFieldsFormat.cs b/src/core/Codecs/Compressing/CompressingStoredFieldsFormat.cs
index 991daee..b6725c7 100644
--- a/src/core/Codecs/Compressing/CompressingStoredFieldsFormat.cs
+++ b/src/core/Codecs/Compressing/CompressingStoredFieldsFormat.cs
@@ -37,11 +37,10 @@ namespace Lucene.Net.Codecs.Compressing
 {
     public class CompressingStoredFieldsFormat : StoredFieldsFormat
     {
-
-        private string _formatName;
-        private string _segmentSuffix;
-        private CompressionMode _compressionMode;
-        private int _chunkSize;
+        private readonly string formatName;
+        private readonly string segmentSuffix;
+        private readonly CompressionMode compressionMode;
+        private readonly int chunkSize;
 
         /**
          * Create a new {@link CompressingStoredFieldsFormat} with an empty segment 
@@ -50,10 +49,8 @@ namespace Lucene.Net.Codecs.Compressing
          * @see CompressingStoredFieldsFormat#CompressingStoredFieldsFormat(String, String, CompressionMode, int)
          */
         public CompressingStoredFieldsFormat(string formatName, CompressionMode compressionMode, int chunkSize)
+            : this(formatName, "", compressionMode, chunkSize)
         {
-            _formatName = formatName;
-            _compressionMode = compressionMode;
-            _chunkSize = chunkSize;
         }
 
         /**
@@ -92,35 +89,33 @@ namespace Lucene.Net.Codecs.Compressing
         public CompressingStoredFieldsFormat(string formatName, string segmentSuffix,
                                              CompressionMode compressionMode, int chunkSize)
         {
-            this._formatName = formatName;
-            this._segmentSuffix = segmentSuffix;
-            this._compressionMode = compressionMode;
+            this.formatName = formatName;
+            this.segmentSuffix = segmentSuffix;
+            this.compressionMode = compressionMode;
             if (chunkSize < 1)
             {
-                throw new System.ArgumentOutOfRangeException("chunkSize must be >= 1");
+                throw new ArgumentOutOfRangeException("chunkSize must be >= 1");
             }
-            this._chunkSize = chunkSize;
+            this.chunkSize = chunkSize;
 
         }
-
-        public override StoredFieldsReader fieldsReader(Directory directory, SegmentInfo si,
-            FieldInfos fn, IOContext context)
+        
+        public override StoredFieldsReader FieldsReader(Directory directory, SegmentInfo si, FieldInfos fn, IOContext context)
         {
-            return new CompressingStoredFieldsReader(directory, si, _segmentSuffix, fn,
-                context, _formatName, _compressionMode);
+            return new CompressingStoredFieldsReader(directory, si, segmentSuffix, fn,
+                context, formatName, compressionMode);
         }
 
-        public override StoredFieldsWriter fieldsWriter(Directory directory, SegmentInfo si,
-            IOContext context)
+        public override StoredFieldsWriter FieldsWriter(Directory directory, SegmentInfo si, IOContext context)
         {
-            return new CompressingStoredFieldsWriter(directory, si, _segmentSuffix, context,
-                _formatName, _compressionMode, _chunkSize);
+            return new CompressingStoredFieldsWriter(directory, si, segmentSuffix, context,
+                formatName, compressionMode, chunkSize);
         }
 
         public override string ToString()
         {
-            return this.GetType().Name + "(compressionMode=" + _compressionMode
-                + ", chunkSize=" + _chunkSize + ")";
+            return this.GetType().Name + "(compressionMode=" + compressionMode
+                + ", chunkSize=" + chunkSize + ")";
         }
 
     }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/80561f72/src/core/Codecs/Compressing/CompressingStoredFieldsIndexReader.cs
----------------------------------------------------------------------
diff --git a/src/core/Codecs/Compressing/CompressingStoredFieldsIndexReader.cs b/src/core/Codecs/Compressing/CompressingStoredFieldsIndexReader.cs
index d5a16df..8023c44 100644
--- a/src/core/Codecs/Compressing/CompressingStoredFieldsIndexReader.cs
+++ b/src/core/Codecs/Compressing/CompressingStoredFieldsIndexReader.cs
@@ -10,24 +10,24 @@ using System.Text;
 
 namespace Lucene.Net.Codecs.Compressing
 {
-    public sealed class CompressingStoredFieldsIndexReader: ICloneable //Closable??
+    public sealed class CompressingStoredFieldsIndexReader : ICloneable, IDisposable
     {
-        int maxDoc;
-        int[] docBases;
-        long[] startPointers;
-        int[] avgChunkDocs;
-        long[] avgChunkSizes;
-        PackedInts.Reader[] docBasesDeltas; // delta from the avg
-        PackedInts.Reader[] startPointersDeltas; // delta from the avg
+        internal readonly IndexInput fieldsIndexIn;
 
-        IndexInput fieldsIndexIn;
-
-        static long MoveLowOrderBitToSign(long n) 
+        internal static long MoveLowOrderBitToSign(long n)
         {
             return ((Number.URShift(n, 1) ^ -(n & 1)));
         }
 
-        public CompressingStoredFieldsIndexReader(IndexInput fieldsIndexIn, SegmentInfo si) 
+        internal readonly int maxDoc;
+        internal readonly int[] docBases;
+        internal readonly long[] startPointers;
+        internal readonly int[] avgChunkDocs;
+        internal readonly long[] avgChunkSizes;
+        internal readonly PackedInts.IReader[] docBasesDeltas; // delta from the avg
+        internal readonly PackedInts.IReader[] startPointersDeltas; // delta from the avg
+
+        public CompressingStoredFieldsIndexReader(IndexInput fieldsIndexIn, SegmentInfo si)
         {
             this.fieldsIndexIn = fieldsIndexIn;
             maxDoc = si.DocCount;
@@ -42,41 +42,46 @@ namespace Lucene.Net.Codecs.Compressing
 
             int blockCount = 0;
 
-            for (;;) {
-              int numChunks = fieldsIndexIn.ReadVInt();
-              if (numChunks == 0) {
-                break;
-              }
-
-              if (blockCount == docBases.Length) {
-                int newSize = ArrayUtil.Oversize(blockCount + 1, 8);
-                docBases = Arrays.CopyOf(docBases, newSize);
-                startPointers = Arrays.CopyOf(startPointers, newSize);
-                avgChunkDocs = Arrays.CopyOf(avgChunkDocs, newSize);
-                avgChunkSizes = Arrays.CopyOf(avgChunkSizes, newSize);
-                docBasesDeltas = Arrays.CopyOf(docBasesDeltas, newSize);
-                startPointersDeltas = Arrays.CopyOf(startPointersDeltas, newSize);
-              }
-
-              // doc bases
-              docBases[blockCount] = fieldsIndexIn.ReadVInt();
-              avgChunkDocs[blockCount] = fieldsIndexIn.ReadVInt();
-              int bitsPerDocBase = fieldsIndexIn.ReadVInt();
-              if (bitsPerDocBase > 32) {
-                throw new CorruptIndexException("Corrupted");
-              }
-              docBasesDeltas[blockCount] = (Lucene.Net.Util.Packed.PackedInts.Reader)PackedInts.GetReaderNoHeader(fieldsIndexIn, PackedInts.Format.PACKED, packedIntsVersion, numChunks, bitsPerDocBase);
-
-              // start pointers
-              startPointers[blockCount] = fieldsIndexIn.ReadVLong();
-              avgChunkSizes[blockCount] = fieldsIndexIn.ReadVLong();
-              int bitsPerStartPointer = fieldsIndexIn.ReadVInt();
-              if (bitsPerStartPointer > 64) {
-                throw new CorruptIndexException("Corrupted");
-              }
-              startPointersDeltas[blockCount] = (Lucene.Net.Util.Packed.PackedInts.Reader)PackedInts.GetReaderNoHeader(fieldsIndexIn, PackedInts.Format.PACKED, packedIntsVersion, numChunks, bitsPerStartPointer);
-
-              ++blockCount;
+            for (; ; )
+            {
+                int numChunks = fieldsIndexIn.ReadVInt();
+                if (numChunks == 0)
+                {
+                    break;
+                }
+
+                if (blockCount == docBases.Length)
+                {
+                    int newSize = ArrayUtil.Oversize(blockCount + 1, 8);
+                    docBases = Arrays.CopyOf(docBases, newSize);
+                    startPointers = Arrays.CopyOf(startPointers, newSize);
+                    avgChunkDocs = Arrays.CopyOf(avgChunkDocs, newSize);
+                    avgChunkSizes = Arrays.CopyOf(avgChunkSizes, newSize);
+                    docBasesDeltas = Arrays.CopyOf(docBasesDeltas, newSize);
+                    startPointersDeltas = Arrays.CopyOf(startPointersDeltas, newSize);
+                }
+
+                // doc bases
+                docBases[blockCount] = fieldsIndexIn.ReadVInt();
+                avgChunkDocs[blockCount] = fieldsIndexIn.ReadVInt();
+                int bitsPerDocBase = fieldsIndexIn.ReadVInt();
+                if (bitsPerDocBase > 32)
+                {
+                    throw new CorruptIndexException("Corrupted");
+                }
+                docBasesDeltas[blockCount] = (Lucene.Net.Util.Packed.PackedInts.Reader)PackedInts.GetReaderNoHeader(fieldsIndexIn, PackedInts.Format.PACKED, packedIntsVersion, numChunks, bitsPerDocBase);
+
+                // start pointers
+                startPointers[blockCount] = fieldsIndexIn.ReadVLong();
+                avgChunkSizes[blockCount] = fieldsIndexIn.ReadVLong();
+                int bitsPerStartPointer = fieldsIndexIn.ReadVInt();
+                if (bitsPerStartPointer > 64)
+                {
+                    throw new CorruptIndexException("Corrupted");
+                }
+                startPointersDeltas[blockCount] = (Lucene.Net.Util.Packed.PackedInts.Reader)PackedInts.GetReaderNoHeader(fieldsIndexIn, PackedInts.Format.PACKED, packedIntsVersion, numChunks, bitsPerStartPointer);
+
+                ++blockCount;
             }
 
             this.docBases = Arrays.CopyOf(docBases, blockCount);
@@ -99,77 +104,93 @@ namespace Lucene.Net.Codecs.Compressing
             this.startPointersDeltas = other.startPointersDeltas;
         }
 
-        private int Block(int docID) 
+        private int Block(int docID)
         {
             int lo = 0, hi = docBases.Length - 1;
-            while (lo <= hi) {
-              int mid = Number.URShift(lo + hi, 1);
-              int midValue = docBases[mid];
-              if (midValue == docID) {
-                return mid;
-              } else if (midValue < docID) {
-                lo = mid + 1;
-              } else {
-                hi = mid - 1;
-              }
+            while (lo <= hi)
+            {
+                int mid = Number.URShift(lo + hi, 1);
+                int midValue = docBases[mid];
+                if (midValue == docID)
+                {
+                    return mid;
+                }
+                else if (midValue < docID)
+                {
+                    lo = mid + 1;
+                }
+                else
+                {
+                    hi = mid - 1;
+                }
             }
             return hi;
         }
 
-        private int relativeDocBase(int block, int relativeChunk) 
+        private int RelativeDocBase(int block, int relativeChunk)
         {
             int expected = avgChunkDocs[block] * relativeChunk;
             long delta = MoveLowOrderBitToSign(docBasesDeltas[block].Get(relativeChunk));
-            return expected + (int) delta;
+            return expected + (int)delta;
         }
 
-          private long relativeStartPointer(int block, int relativeChunk) 
-          {
+        private long RelativeStartPointer(int block, int relativeChunk)
+        {
             long expected = avgChunkSizes[block] * relativeChunk;
             long delta = MoveLowOrderBitToSign(startPointersDeltas[block].Get(relativeChunk));
             return expected + delta;
-          }
+        }
 
-          private int relativeChunk(int block, int relativeDoc) 
-          {
+        private int RelativeChunk(int block, int relativeDoc)
+        {
             int lo = 0, hi = docBasesDeltas[block].Size() - 1;
-            while (lo <= hi) {
-              int mid = Number.URShift(lo + hi, 1);
-              int midValue = relativeDocBase(block, mid);
-              if (midValue == relativeDoc) {
-                return mid;
-              } else if (midValue < relativeDoc) {
-                lo = mid + 1;
-              } else {
-                hi = mid - 1;
-              }
+            while (lo <= hi)
+            {
+                int mid = Number.URShift(lo + hi, 1);
+                int midValue = RelativeDocBase(block, mid);
+                if (midValue == relativeDoc)
+                {
+                    return mid;
+                }
+                else if (midValue < relativeDoc)
+                {
+                    lo = mid + 1;
+                }
+                else
+                {
+                    hi = mid - 1;
+                }
             }
             return hi;
-          }
+        }
 
-          public long GetStartPointer(int docID) 
-          {
-            if (docID < 0 || docID >= maxDoc) {
-              throw new ArgumentException("docID out of range [0-" + maxDoc + "]: " + docID);
+        public long GetStartPointer(int docID)
+        {
+            if (docID < 0 || docID >= maxDoc)
+            {
+                throw new ArgumentException("docID out of range [0-" + maxDoc + "]: " + docID);
             }
             int block = Block(docID);
-            int relativeChunk = this.relativeChunk(block, docID - docBases[block]);
-            return startPointers[block] + relativeStartPointer(block, relativeChunk);
-          }
-
-          public override CompressingStoredFieldsIndexReader clone() 
-          {
-            if (fieldsIndexIn == null) {
-              return this;
-            } else {
-              return new CompressingStoredFieldsIndexReader(this);
+            int relativeChunk = this.RelativeChunk(block, docID - docBases[block]);
+            return startPointers[block] + RelativeStartPointer(block, relativeChunk);
+        }
+
+        public object Clone()
+        {
+            if (fieldsIndexIn == null)
+            {
+                return this;
+            }
+            else
+            {
+                return new CompressingStoredFieldsIndexReader(this);
             }
-          }
+        }
 
-          public override void close()
-          {
+        public void Dispose()
+        {
             IOUtils.Close(fieldsIndexIn);
-          }
+        }
 
     }
 }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/80561f72/src/core/Codecs/Compressing/CompressingStoredFieldsIndexWriter.cs
----------------------------------------------------------------------
diff --git a/src/core/Codecs/Compressing/CompressingStoredFieldsIndexWriter.cs b/src/core/Codecs/Compressing/CompressingStoredFieldsIndexWriter.cs
index ece363a..3b348cf 100644
--- a/src/core/Codecs/Compressing/CompressingStoredFieldsIndexWriter.cs
+++ b/src/core/Codecs/Compressing/CompressingStoredFieldsIndexWriter.cs
@@ -9,40 +9,40 @@ namespace Lucene.Net.Codecs.Compressing
 {
     public sealed class CompressingStoredFieldsIndexWriter : IDisposable
     {
-        static readonly int BLOCK_SIZE = 1024; // number of chunks to serialize at once
-
-        private IndexOutput fieldsIndexOut;
-        private int totalDocs;
-        private int blockDocs;
-        private int blockChunks;
-        private long firstStartPointer;
-        private long maxStartPointer;
-        private int[] docBaseDeltas;
-        private long[] startPointerDeltas;
-
-        static long moveSignToLowOrderBit(long n)
+        internal const int BLOCK_SIZE = 1024; // number of chunks to serialize at once
+
+        internal static long MoveSignToLowOrderBit(long n)
         {
             return (n >> 63) ^ (n << 1);
         }
 
-        CompressingStoredFieldsIndexWriter(IndexOutput indexOutput)
+        internal readonly IndexOutput fieldsIndexOut;
+        internal int totalDocs;
+        internal int blockDocs;
+        internal int blockChunks;
+        internal long firstStartPointer;
+        internal long maxStartPointer;
+        internal readonly int[] docBaseDeltas;
+        internal readonly long[] startPointerDeltas;
+
+        internal CompressingStoredFieldsIndexWriter(IndexOutput indexOutput)
         {
             this.fieldsIndexOut = indexOutput;
-            reset();
+            Reset();
             totalDocs = 0;
             docBaseDeltas = new int[BLOCK_SIZE];
             startPointerDeltas = new long[BLOCK_SIZE];
             fieldsIndexOut.WriteVInt(PackedInts.VERSION_CURRENT);
         }
 
-        private void reset()
+        private void Reset()
         {
             blockChunks = 0;
             blockDocs = 0;
             firstStartPointer = -1; // means unset
         }
 
-        private void writeBlock()
+        private void WriteBlock()
         {
             fieldsIndexOut.WriteVInt(blockChunks);
 
@@ -72,7 +72,7 @@ namespace Lucene.Net.Codecs.Compressing
             for (int i = 0; i < blockChunks; ++i)
             {
                 int delta = docBase - avgChunkDocs * i;
-                maxDelta |= moveSignToLowOrderBit(delta);
+                maxDelta |= MoveSignToLowOrderBit(delta);
                 docBase += docBaseDeltas[i];
             }
 
@@ -84,7 +84,7 @@ namespace Lucene.Net.Codecs.Compressing
             for (int i = 0; i < blockChunks; ++i)
             {
                 long delta = docBase - avgChunkDocs * i;
-                writer.Add(moveSignToLowOrderBit(delta));
+                writer.Add(MoveSignToLowOrderBit(delta));
                 docBase += docBaseDeltas[i];
             }
             writer.Finish();
@@ -107,7 +107,7 @@ namespace Lucene.Net.Codecs.Compressing
             {
                 startPointer += startPointerDeltas[i];
                 long delta = startPointer - avgChunkSize * i;
-                maxDelta |= moveSignToLowOrderBit(delta);
+                maxDelta |= MoveSignToLowOrderBit(delta);
             }
 
             int bitsPerStartPointer = PackedInts.BitsRequired(maxDelta);
@@ -119,17 +119,17 @@ namespace Lucene.Net.Codecs.Compressing
             {
                 startPointer += startPointerDeltas[i];
                 long delta = startPointer - avgChunkSize * i;
-                writer.Add(moveSignToLowOrderBit(delta));
+                writer.Add(MoveSignToLowOrderBit(delta));
             }
             writer.Finish();
         }
 
-        void writeIndex(int numDocs, long startPointer)
+        internal void WriteIndex(int numDocs, long startPointer)
         {
             if (blockChunks == BLOCK_SIZE)
             {
-                writeBlock();
-                reset();
+                WriteBlock();
+                Reset();
             }
 
             if (firstStartPointer == -1)
@@ -146,7 +146,7 @@ namespace Lucene.Net.Codecs.Compressing
             maxStartPointer = startPointer;
         }
 
-        void finish(int numDocs)
+        internal void Finish(int numDocs)
         {
             if (numDocs != totalDocs)
             {
@@ -154,7 +154,7 @@ namespace Lucene.Net.Codecs.Compressing
             }
             if (blockChunks > 0)
             {
-                writeBlock();
+                WriteBlock();
             }
             fieldsIndexOut.WriteVInt(0); // end marker
         }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/80561f72/src/core/Codecs/Compressing/CompressingStoredFieldsReader.cs
----------------------------------------------------------------------
diff --git a/src/core/Codecs/Compressing/CompressingStoredFieldsReader.cs b/src/core/Codecs/Compressing/CompressingStoredFieldsReader.cs
index a4d7f7d..170b3e3 100644
--- a/src/core/Codecs/Compressing/CompressingStoredFieldsReader.cs
+++ b/src/core/Codecs/Compressing/CompressingStoredFieldsReader.cs
@@ -15,6 +15,7 @@
  * limitations under the License.
  */
 
+using Lucene.Net.Codecs.Lucene40;
 using Lucene.Net.Index;
 using Lucene.Net.Store;
 using Lucene.Net.Support;
@@ -47,7 +48,7 @@ namespace Lucene.Net.Codecs.Compressing
         {
             this.fieldInfos = reader.fieldInfos;
             this.fieldsStream = (IndexInput)reader.fieldsStream.Clone();
-            this.indexReader = reader.indexReader.clone();
+            this.indexReader = (CompressingStoredFieldsIndexReader)reader.indexReader.Clone();
             this.packedIntsVersion = reader.packedIntsVersion;
             this.compressionMode = reader.compressionMode;
             this.decompressor = (Decompressor)reader.decompressor.Clone();
@@ -68,14 +69,14 @@ namespace Lucene.Net.Codecs.Compressing
             IndexInput indexStream = null;
             try
             {
-                fieldsStream = d.OpenInput(IndexFileNames.SegmentFileName(segment, segmentSuffix, FIELDS_EXTENSION), context);
-                string indexStreamFN = IndexFileNames.SegmentFileName(segment, segmentSuffix, FIELDS_INDEX_EXTENSION);
+                fieldsStream = d.OpenInput(IndexFileNames.SegmentFileName(segment, segmentSuffix, Lucene40StoredFieldsWriter.FIELDS_EXTENSION), context);
+                string indexStreamFN = IndexFileNames.SegmentFileName(segment, segmentSuffix, Lucene40StoredFieldsWriter.FIELDS_INDEX_EXTENSION);
                 indexStream = d.OpenInput(indexStreamFN, context);
 
-                string codecNameIdx = formatName + CODEC_SFX_IDX;
-                string codecNameDat = formatName + CODEC_SFX_DAT;
-                CodecUtil.CheckHeader(indexStream, codecNameIdx, VERSION_START, VERSION_CURRENT);
-                CodecUtil.CheckHeader(fieldsStream, codecNameDat, VERSION_START, VERSION_CURRENT);
+                string codecNameIdx = formatName + CompressingStoredFieldsWriter.CODEC_SFX_IDX;
+                string codecNameDat = formatName + CompressingStoredFieldsWriter.CODEC_SFX_DAT;
+                CodecUtil.CheckHeader(indexStream, codecNameIdx, CompressingStoredFieldsWriter.VERSION_START, CompressingStoredFieldsWriter.VERSION_CURRENT);
+                CodecUtil.CheckHeader(fieldsStream, codecNameDat, CompressingStoredFieldsWriter.VERSION_START, CompressingStoredFieldsWriter.VERSION_CURRENT);
 
                 indexReader = new CompressingStoredFieldsIndexReader(indexStream, si);
                 indexStream = null;
@@ -98,7 +99,7 @@ namespace Lucene.Net.Codecs.Compressing
         /**
          * @throws AlreadyClosedException if this FieldsReader is closed
          */
-        private void ensureOpen()
+        private void EnsureOpen()
         {
             if (closed)
             {
@@ -109,7 +110,7 @@ namespace Lucene.Net.Codecs.Compressing
         /** 
          * Close the underlying {@link IndexInput}s.
          */
-        public override void Close()
+        protected override void Dispose(bool disposing)
         {
             if (!closed)
             {
@@ -120,62 +121,62 @@ namespace Lucene.Net.Codecs.Compressing
 
         private static void ReadField(ByteArrayDataInput input, StoredFieldVisitor visitor, FieldInfo info, int bits)
         {
-            switch (bits & TYPE_MASK)
+            switch (bits & CompressingStoredFieldsWriter.TYPE_MASK)
             {
-                case BYTE_ARR:
-                    int length = input.readVInt();
+                case CompressingStoredFieldsWriter.BYTE_ARR:
+                    int length = input.ReadVInt();
                     byte[] data = new byte[length];
-                    input.readBytes(data, 0, length);
-                    visitor.binaryField(info, data);
+                    input.ReadBytes(data, 0, length);
+                    visitor.BinaryField(info, (sbyte[])(Array)data);
                     break;
-                case STRING:
-                    length = input.readVInt();
+                case CompressingStoredFieldsWriter.STRING:
+                    length = input.ReadVInt();
                     data = new byte[length];
-                    input.readBytes(data, 0, length);
-                    visitor.stringField(info, new string(data, IOUtils.CHARSET_UTF_8));
+                    input.ReadBytes(data, 0, length);
+                    visitor.StringField(info, IOUtils.CHARSET_UTF_8.GetString(data));
                     break;
-                case NUMERIC_INT:
-                    visitor.intField(info, input.readInt());
+                case CompressingStoredFieldsWriter.NUMERIC_INT:
+                    visitor.IntField(info, input.ReadInt());
                     break;
-                case NUMERIC_FLOAT:
-                    visitor.floatField(info, Float.intBitsToFloat(input.readInt()));
+                case CompressingStoredFieldsWriter.NUMERIC_FLOAT:
+                    visitor.FloatField(info, Number.IntBitsToFloat(input.ReadInt()));
                     break;
-                case NUMERIC_LONG:
-                    visitor.longField(info, input.readLong());
+                case CompressingStoredFieldsWriter.NUMERIC_LONG:
+                    visitor.LongField(info, input.ReadLong());
                     break;
-                case NUMERIC_DOUBLE:
-                    visitor.doubleField(info, Double.longBitsToDouble(input.readLong()));
+                case CompressingStoredFieldsWriter.NUMERIC_DOUBLE:
+                    visitor.DoubleField(info, BitConverter.Int64BitsToDouble(input.ReadLong()));
                     break;
                 default:
-                    throw new AssertionError("Unknown type flag: " + Integer.toHexString(bits));
+                    throw new InvalidOperationException("Unknown type flag: " + bits.ToString("X"));
             }
         }
 
         private static void SkipField(ByteArrayDataInput input, int bits)
         {
-            switch (bits & TYPE_MASK)
+            switch (bits & CompressingStoredFieldsWriter.TYPE_MASK)
             {
-                case BYTE_ARR:
-                case STRING:
-                    int length = input.readVInt();
-                    input.skipBytes(length);
+                case CompressingStoredFieldsWriter.BYTE_ARR:
+                case CompressingStoredFieldsWriter.STRING:
+                    int length = input.ReadVInt();
+                    input.SkipBytes(length);
                     break;
-                case NUMERIC_INT:
-                case NUMERIC_FLOAT:
-                    input.readInt();
+                case CompressingStoredFieldsWriter.NUMERIC_INT:
+                case CompressingStoredFieldsWriter.NUMERIC_FLOAT:
+                    input.ReadInt();
                     break;
-                case NUMERIC_LONG:
-                case NUMERIC_DOUBLE:
-                    input.readLong();
+                case CompressingStoredFieldsWriter.NUMERIC_LONG:
+                case CompressingStoredFieldsWriter.NUMERIC_DOUBLE:
+                    input.ReadLong();
                     break;
                 default:
-                    throw new AssertionError("Unknown type flag: " + Integer.toHexString(bits));
+                    throw new InvalidOperationException("Unknown type flag: " + bits.ToString("X"));
             }
         }
 
         public override void VisitDocument(int docID, StoredFieldVisitor visitor)
         {
-            fieldsStream.Seek(indexReader.getStartPointer(docID));
+            fieldsStream.Seek(indexReader.GetStartPointer(docID));
 
             int docBase = fieldsStream.ReadVInt();
             int chunkDocs = fieldsStream.ReadVInt();
@@ -234,14 +235,14 @@ namespace Lucene.Net.Codecs.Compressing
                     {
                         //TODO - HACKMP - Paul, this is a point of concern for me, in that everything from this file, and the 
                         //decompressor.Decompress() contract is looking for int.  But, I don't want to simply cast from long to int here.
-                        off += it.Next();
+                        off += (int)it.Next();
                     }
                     offset = off;
                     length = (int)it.Next();
                     off += length;
                     for (int i = docID - docBase + 1; i < chunkDocs; ++i)
                     {
-                        off += it.Next();
+                        off += (int)it.Next();
                     }
                     totalLength = off;
                 }
@@ -263,10 +264,10 @@ namespace Lucene.Net.Codecs.Compressing
             for (int fieldIDX = 0; fieldIDX < numStoredFields; fieldIDX++)
             {
                 long infoAndBits = documentInput.ReadVLong();
-                int fieldNumber = Number.URShift(infoAndBits, TYPE_BITS); // (infoAndBits >>> TYPE_BITS);
+                int fieldNumber = (int)Number.URShift(infoAndBits, CompressingStoredFieldsWriter.TYPE_BITS); // (infoAndBits >>> TYPE_BITS);
                 FieldInfo fieldInfo = fieldInfos.FieldInfo(fieldNumber);
 
-                int bits = (int)(infoAndBits & TYPE_MASK);
+                int bits = (int)(infoAndBits & CompressingStoredFieldsWriter.TYPE_MASK);
 
                 switch (visitor.NeedsField(fieldInfo))
                 {
@@ -282,9 +283,9 @@ namespace Lucene.Net.Codecs.Compressing
             }
         }
 
-        public override StoredFieldsReader Clone()
+        public override object Clone()
         {
-            ensureOpen();
+            EnsureOpen();
             return new CompressingStoredFieldsReader(this);
         }
 
@@ -299,32 +300,25 @@ namespace Lucene.Net.Codecs.Compressing
         // .NET Port: renamed to GetChunkIterator to avoid conflict with nested type.
         internal ChunkIterator GetChunkIterator(int startDocID)
         {
-            ensureOpen();
-            fieldsStream.Seek(indexReader.getStartPointer(startDocID));
-            return new ChunkIterator(fieldsStream, indexReader, numDocs, packedIntsVersion, decompressor);
+            EnsureOpen();
+            fieldsStream.Seek(indexReader.GetStartPointer(startDocID));
+            return new ChunkIterator(this);
         }
 
         internal sealed class ChunkIterator
         {
-            private IndexInput _fieldsStream;
-            private CompressingStoredFieldsReader _indexReader;
-            private Decompressor _decompressor;
-            private int _numOfDocs;
-            private int _packedIntsVersion;
-            BytesRef bytes;
-            int docBase;
-            int chunkDocs;
-            int[] numStoredFields;
-            int[] lengths;
-
-            public ChunkIterator(IndexInput fieldsStream, CompressingStoredFieldsReader indexReader,
-                                    int numOfDocs, int packedIntsVersion, Decompressor decompressor)
+            internal BytesRef bytes;
+            internal int docBase;
+            internal int chunkDocs;
+            internal int[] numStoredFields;
+            internal int[] lengths;
+
+            private readonly CompressingStoredFieldsReader parent;
+
+            public ChunkIterator(CompressingStoredFieldsReader parent)
             {
-                _indexReader = indexReader;
-                _numOfDocs = numOfDocs;
-                _packedIntsVersion = packedIntsVersion;
-                _decompressor = decompressor;
-                _fieldsStream = fieldsStream;
+                this.parent = parent; // .NET Port
+
                 this.docBase = -1;
                 bytes = new BytesRef();
                 numStoredFields = new int[1];
@@ -349,12 +343,12 @@ namespace Lucene.Net.Codecs.Compressing
              */
             public void Next(int doc)
             {
-                _fieldsStream.Seek(_indexReader.getStartPointer(doc));
+                parent.fieldsStream.Seek(parent.indexReader.GetStartPointer(doc));
 
-                int docBase = _fieldsStream.ReadVInt();
-                int chunkDocs = _fieldsStream.ReadVInt();
+                int docBase = parent.fieldsStream.ReadVInt();
+                int chunkDocs = parent.fieldsStream.ReadVInt();
                 if (docBase < this.docBase + this.chunkDocs
-                    || docBase + chunkDocs > _numOfDocs)
+                    || docBase + chunkDocs > parent.numDocs)
                 {
                     throw new CorruptIndexException("Corrupted: current docBase=" + this.docBase
                         + ", current numDocs=" + this.chunkDocs + ", new docBase=" + docBase
@@ -372,15 +366,15 @@ namespace Lucene.Net.Codecs.Compressing
 
                 if (chunkDocs == 1)
                 {
-                    numStoredFields[0] = _fieldsStream.ReadVInt();
-                    lengths[0] = _fieldsStream.ReadVInt();
+                    numStoredFields[0] = parent.fieldsStream.ReadVInt();
+                    lengths[0] = parent.fieldsStream.ReadVInt();
                 }
                 else
                 {
-                    int bitsPerStoredFields = _fieldsStream.ReadVInt();
+                    int bitsPerStoredFields = parent.fieldsStream.ReadVInt();
                     if (bitsPerStoredFields == 0)
                     {
-                        Arrays.Fill(numStoredFields, 0, chunkDocs, _fieldsStream.ReadVInt());
+                        Arrays.Fill(numStoredFields, 0, chunkDocs, parent.fieldsStream.ReadVInt());
                     }
                     else if (bitsPerStoredFields > 31)
                     {
@@ -388,17 +382,17 @@ namespace Lucene.Net.Codecs.Compressing
                     }
                     else
                     {
-                        PackedInts.ReaderIterator it = (PackedInts.ReaderIterator)PackedInts.GetReaderIteratorNoHeader(_fieldsStream, PackedInts.Format.PACKED, _packedIntsVersion, chunkDocs, bitsPerStoredFields, 1);
+                        PackedInts.ReaderIterator it = (PackedInts.ReaderIterator)PackedInts.GetReaderIteratorNoHeader(parent.fieldsStream, PackedInts.Format.PACKED, parent.packedIntsVersion, chunkDocs, bitsPerStoredFields, 1);
                         for (int i = 0; i < chunkDocs; ++i)
                         {
                             numStoredFields[i] = (int)it.Next();
                         }
                     }
 
-                    int bitsPerLength = _fieldsStream.ReadVInt();
+                    int bitsPerLength = parent.fieldsStream.ReadVInt();
                     if (bitsPerLength == 0)
                     {
-                        Arrays.Fill(lengths, 0, chunkDocs, _fieldsStream.ReadVInt());
+                        Arrays.Fill(lengths, 0, chunkDocs, parent.fieldsStream.ReadVInt());
                     }
                     else if (bitsPerLength > 31)
                     {
@@ -406,7 +400,7 @@ namespace Lucene.Net.Codecs.Compressing
                     }
                     else
                     {
-                        PackedInts.ReaderIterator it = (PackedInts.ReaderIterator)PackedInts.GetReaderIteratorNoHeader(_fieldsStream, PackedInts.Format.PACKED, _packedIntsVersion, chunkDocs, bitsPerLength, 1);
+                        PackedInts.ReaderIterator it = (PackedInts.ReaderIterator)PackedInts.GetReaderIteratorNoHeader(parent.fieldsStream, PackedInts.Format.PACKED, parent.packedIntsVersion, chunkDocs, bitsPerLength, 1);
                         for (int i = 0; i < chunkDocs; ++i)
                         {
                             lengths[i] = (int)it.Next();
@@ -422,7 +416,7 @@ namespace Lucene.Net.Codecs.Compressing
             {
                 // decompress data
                 int chunkSize = this.ChunkSize();
-                _decompressor.Decompress(_fieldsStream, chunkSize, 0, chunkSize, bytes);
+                parent.decompressor.Decompress(parent.fieldsStream, chunkSize, 0, chunkSize, bytes);
                 if (bytes.length != chunkSize)
                 {
                     throw new CorruptIndexException("Corrupted: expected chunk size = " + this.ChunkSize() + ", got " + bytes.length);
@@ -434,10 +428,10 @@ namespace Lucene.Net.Codecs.Compressing
              */
             public void CopyCompressedData(DataOutput output)
             {
-                long chunkEnd = docBase + chunkDocs == _numOfDocs
-                    ? _fieldsStream.Length
-                    : _indexReader.getStartPointer(docBase + chunkDocs);
-                output.CopyBytes(_fieldsStream, chunkEnd - _fieldsStream.FilePointer);
+                long chunkEnd = docBase + chunkDocs == parent.numDocs
+                    ? parent.fieldsStream.Length
+                    : parent.indexReader.GetStartPointer(docBase + chunkDocs);
+                output.CopyBytes(parent.fieldsStream, chunkEnd - parent.fieldsStream.FilePointer);
             }
 
         }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/80561f72/src/core/Codecs/Compressing/CompressingStoredFieldsWriter.cs
----------------------------------------------------------------------
diff --git a/src/core/Codecs/Compressing/CompressingStoredFieldsWriter.cs b/src/core/Codecs/Compressing/CompressingStoredFieldsWriter.cs
index 1543196..15a7484 100644
--- a/src/core/Codecs/Compressing/CompressingStoredFieldsWriter.cs
+++ b/src/core/Codecs/Compressing/CompressingStoredFieldsWriter.cs
@@ -1,3 +1,4 @@
+using Lucene.Net.Codecs.Lucene40;
 using Lucene.Net.Documents;
 using Lucene.Net.Index;
 using Lucene.Net.Store;
@@ -13,80 +14,82 @@ namespace Lucene.Net.Codecs.Compressing
 {
     public sealed class CompressingStoredFieldsWriter : StoredFieldsWriter
     {
-        static readonly int MAX_DOCUMENTS_PER_CHUNK = 128;
-        static readonly int STRING = 0x00;
-        static readonly int BYTE_ARR = 0x01;
-        static readonly int NUMERIC_INT = 0x02;
-        static readonly int NUMERIC_FLOAT = 0x03;
-        static readonly int NUMERIC_LONG = 0x04;
-        static readonly int NUMERIC_DOUBLE = 0x05;
-
-        static readonly int TYPE_BITS = PackedInts.bitsRequired(NUMERIC_DOUBLE);
-        static readonly int TYPE_MASK = (int)PackedInts.maxValue(TYPE_BITS);
-
-        static readonly String CODEC_SFX_IDX = "Index";
-        static readonly String CODEC_SFX_DAT = "Data";
-        static readonly int VERSION_START = 0;
-        static readonly int VERSION_CURRENT = VERSION_START;
-
-        private Directory directory;
-        private string segment;
-        private string segmentSuffix;
+        internal const int MAX_DOCUMENTS_PER_CHUNK = 128;
+
+        internal const int STRING = 0x00;
+        internal const int BYTE_ARR = 0x01;
+        internal const int NUMERIC_INT = 0x02;
+        internal const int NUMERIC_FLOAT = 0x03;
+        internal const int NUMERIC_LONG = 0x04;
+        internal const int NUMERIC_DOUBLE = 0x05;
+
+        internal static readonly int TYPE_BITS = PackedInts.BitsRequired(NUMERIC_DOUBLE);
+        internal static readonly int TYPE_MASK = (int)PackedInts.MaxValue(TYPE_BITS);
+
+        internal const String CODEC_SFX_IDX = "Index";
+        internal const String CODEC_SFX_DAT = "Data";
+        internal const int VERSION_START = 0;
+        internal const int VERSION_CURRENT = VERSION_START;
+
+        private readonly Directory directory;
+        private readonly string segment;
+        private readonly string segmentSuffix;
         private CompressingStoredFieldsIndexWriter indexWriter;
         private IndexOutput fieldsStream;
 
-        private CompressionMode compressionMode;
-        private Compressor compressor;
-        private int chunkSize;
+        private readonly CompressionMode compressionMode;
+        private readonly Compressor compressor;
+        private readonly int chunkSize;
 
-        private GrowableByteArrayDataOutput bufferedDocs;
+        private readonly GrowableByteArrayDataOutput bufferedDocs;
         private int[] numStoredFields; // number of stored fields
         private int[] endOffsets; // end offsets in bufferedDocs
         private int docBase; // doc ID at the beginning of the chunk
         private int numBufferedDocs; // docBase + numBufferedDocs == current doc ID
 
-        public CompressingStoredFieldsWriter(Directory directory, SegmentInfo si, string segmentSuffix, IOContext context, string formatName, CompressionMode compressionMode, int chunkSize) 
+        public CompressingStoredFieldsWriter(Directory directory, SegmentInfo si, string segmentSuffix, IOContext context, string formatName, CompressionMode compressionMode, int chunkSize)
         {
-          this.directory = directory;
-          this.segment = si.name;
-          this.segmentSuffix = segmentSuffix;
-          this.compressionMode = compressionMode;
-          this.compressor = compressionMode.newCompressor();
-          this.chunkSize = chunkSize;
-          this.docBase = 0;
-          this.bufferedDocs = new GrowableByteArrayDataOutput(chunkSize);
-          this.numStoredFields = new int[16];
-          this.endOffsets = new int[16];
-          this.numBufferedDocs = 0;
-
-          bool success = false;
-          IndexOutput indexStream = directory.CreateOutput(IndexFileNames.SegmentFileName(segment, segmentSuffix, FIELDS_INDEX_EXTENSION), context);
-          try 
-          {
-            fieldsStream = directory.CreateOutput(IndexFileNames.SegmentFileName(segment, segmentSuffix, FIELDS_EXTENSION), context);
-
-            string codecNameIdx = formatName + CODEC_SFX_IDX;
-            string codecNameDat = formatName + CODEC_SFX_DAT;
-            CodecUtil.WriteHeader(indexStream, codecNameIdx, VERSION_CURRENT);
-            CodecUtil.WriteHeader(fieldsStream, codecNameDat, VERSION_CURRENT);
-
-            indexWriter = new CompressingStoredFieldsIndexWriter(indexStream);
-            indexStream = null;
-
-            fieldsStream.WriteVInt(PackedInts.VERSION_CURRENT);
-
-            success = true;
-          } 
-          finally 
-          {
-            if (!success) {
-              IOUtils.CloseWhileHandlingException(indexStream);
-              abort();
+            this.directory = directory;
+            this.segment = si.name;
+            this.segmentSuffix = segmentSuffix;
+            this.compressionMode = compressionMode;
+            this.compressor = compressionMode.newCompressor();
+            this.chunkSize = chunkSize;
+            this.docBase = 0;
+            this.bufferedDocs = new GrowableByteArrayDataOutput(chunkSize);
+            this.numStoredFields = new int[16];
+            this.endOffsets = new int[16];
+            this.numBufferedDocs = 0;
+
+            bool success = false;
+            IndexOutput indexStream = directory.CreateOutput(IndexFileNames.SegmentFileName(segment, segmentSuffix, Lucene40StoredFieldsWriter.FIELDS_INDEX_EXTENSION), context);
+            try
+            {
+                fieldsStream = directory.CreateOutput(IndexFileNames.SegmentFileName(segment, segmentSuffix, Lucene40StoredFieldsWriter.FIELDS_EXTENSION), context);
+
+                string codecNameIdx = formatName + CODEC_SFX_IDX;
+                string codecNameDat = formatName + CODEC_SFX_DAT;
+                CodecUtil.WriteHeader(indexStream, codecNameIdx, VERSION_CURRENT);
+                CodecUtil.WriteHeader(fieldsStream, codecNameDat, VERSION_CURRENT);
+
+                indexWriter = new CompressingStoredFieldsIndexWriter(indexStream);
+                indexStream = null;
+
+                fieldsStream.WriteVInt(PackedInts.VERSION_CURRENT);
+
+                success = true;
+            }
+            finally
+            {
+                if (!success)
+                {
+                    IOUtils.CloseWhileHandlingException((IDisposable)indexStream);
+                    Abort();
+                }
             }
-          }
         }
 
-        public override void Close()
+        protected override void Dispose(bool disposing)
         {
             try
             {
@@ -98,7 +101,7 @@ namespace Lucene.Net.Codecs.Compressing
                 indexWriter = null;
             }
         }
-
+        
         public override void StartDocument(int numStoredFields)
         {
             if (numBufferedDocs == this.numStoredFields.Length)
@@ -120,40 +123,45 @@ namespace Lucene.Net.Codecs.Compressing
             }
         }
 
-        private static void saveInts(int[] values, int length, DataOutput output) 
+        private static void SaveInts(int[] values, int length, DataOutput output)
         {
-          if (length == 1) 
-          {
-            output.WriteVInt(values[0]);
-          } 
-          else 
-          {
-            bool allEqual = true;
-            for (int i = 1; i < length; ++i) {
-              if (values[i] != values[0]) {
-                allEqual = false;
-                //break;
-              }
+            if (length == 1)
+            {
+                output.WriteVInt(values[0]);
             }
-            if (allEqual) {
-              output.WriteVInt(0);
-              output.WriteVInt(values[0]);
-            } 
-            else 
+            else
             {
-              long max = 0;
-              for (int i = 0; i < length; ++i) {
-                max |= values[i];
-              }
-              int bitsRequired = PackedInts.BitsRequired(max);
-              output.WriteVInt(bitsRequired);
-              PackedInts.Writer w = PackedInts.GetWriterNoHeader(output, PackedInts.Format.PACKED, length, bitsRequired, 1);
-              for (int i = 0; i < length; ++i) {
-                w.Add(values[i]);
-              }
-              w.Finish();
+                bool allEqual = true;
+                for (int i = 1; i < length; ++i)
+                {
+                    if (values[i] != values[0])
+                    {
+                        allEqual = false;
+                        //break;
+                    }
+                }
+                if (allEqual)
+                {
+                    output.WriteVInt(0);
+                    output.WriteVInt(values[0]);
+                }
+                else
+                {
+                    long max = 0;
+                    for (int i = 0; i < length; ++i)
+                    {
+                        max |= values[i];
+                    }
+                    int bitsRequired = PackedInts.BitsRequired(max);
+                    output.WriteVInt(bitsRequired);
+                    PackedInts.Writer w = PackedInts.GetWriterNoHeader(output, PackedInts.Format.PACKED, length, bitsRequired, 1);
+                    for (int i = 0; i < length; ++i)
+                    {
+                        w.Add(values[i]);
+                    }
+                    w.Finish();
+                }
             }
-          }
         }
 
         private void WriteHeader(int docBase, int numBufferedDocs, int[] numStoredFields, int[] lengths)
@@ -163,10 +171,10 @@ namespace Lucene.Net.Codecs.Compressing
             fieldsStream.WriteVInt(numBufferedDocs);
 
             // save numStoredFields
-            saveInts(numStoredFields, numBufferedDocs, fieldsStream);
+            SaveInts(numStoredFields, numBufferedDocs, fieldsStream);
 
             // save lengths
-            saveInts(lengths, numBufferedDocs, fieldsStream);
+            SaveInts(lengths, numBufferedDocs, fieldsStream);
         }
 
         private bool TriggerFlush()
@@ -197,37 +205,37 @@ namespace Lucene.Net.Codecs.Compressing
             bufferedDocs.Length = 0;
         }
 
-        public override void writeField(FieldInfo info, IndexableField field)
+        public override void WriteField(FieldInfo info, IIndexableField field)
         {
           int bits = 0;
           BytesRef bytes;
           string str;
 
-          Number number = field.numericValue();
+          object number = field.NumericValue;
           if (number != null) {
-            if (number instanceof Byte || number instanceof Short || number instanceof Integer) {
+            if (number is byte || number is sbyte || number is short || number is int) {
               bits = NUMERIC_INT;
-            } else if (number instanceof Long) {
+            } else if (number is long) {
               bits = NUMERIC_LONG;
-            } else if (number instanceof Float) {
+            } else if (number is float) {
               bits = NUMERIC_FLOAT;
-            } else if (number instanceof Double) {
+            } else if (number is double) {
               bits = NUMERIC_DOUBLE;
             } else {
-              throw new IllegalArgumentException("cannot store numeric type " + number.getClass());
+              throw new ArgumentException("cannot store numeric type " + number.GetType());
             }
             str = null;
             bytes = null;
           } else {
-            bytes = field.binaryValue();
+            bytes = field.BinaryValue;
             if (bytes != null) {
               bits = BYTE_ARR;
               str = null;
             } else {
               bits = STRING;
-              str = field.stringValue();
+              str = field.StringValue;
               if (str == null) {
-                throw new ArgumentException("field " + field.name() + " is stored but does not have binaryValue, stringValue nor numericValue");
+                throw new ArgumentException("field " + field.Name + " is stored but does not have binaryValue, stringValue nor numericValue");
               }
             }
           }
@@ -239,126 +247,144 @@ namespace Lucene.Net.Codecs.Compressing
             bufferedDocs.WriteVInt(bytes.length);
             bufferedDocs.WriteBytes(bytes.bytes, bytes.offset, bytes.length);
           } else if (str != null) {
-            bufferedDocs.WriteString(field.stringValue());
+            bufferedDocs.WriteString(field.StringValue);
           } else {
-            if (number instanceof Byte || number instanceof Short || number instanceof Integer) {
-              bufferedDocs.writeInt(number.intValue());
-            } else if (number instanceof Long) {
-              bufferedDocs.writeLong(number.longValue());
-            } else if (number instanceof Float) {
-              bufferedDocs.writeInt(Float.floatToIntBits(number.floatValue()));
-            } else if (number instanceof Double) {
-              bufferedDocs.writeLong(Double.doubleToLongBits(number.doubleValue()));
+            if (number is byte || number is sbyte || number is short || number is int) {
+              bufferedDocs.WriteInt((int)number);
+            } else if (number is long) {
+              bufferedDocs.WriteLong((long)number);
+            } else if (number is float) {
+              bufferedDocs.WriteInt(Number.FloatToIntBits((float)number));
+            } else if (number is double) {
+              bufferedDocs.WriteLong(BitConverter.DoubleToInt64Bits((double)number));
             } else {
-              throw new AssertionError("Cannot get here");
+              throw new InvalidOperationException("Cannot get here");
             }
           }
         }
 
-        public override void Abort() {
-          IOUtils.CloseWhileHandlingException(this);
-          IOUtils.DeleteFilesIgnoringExceptions(directory,
-              IndexFileNames.SegmentFileName(segment, segmentSuffix, FIELDS_EXTENSION),
-              IndexFileNames.SegmentFileName(segment, segmentSuffix, FIELDS_INDEX_EXTENSION));
-        }
-
-        public override void finish(FieldInfos fis, int numDocs) 
+        public override void Abort()
         {
-          if (numBufferedDocs > 0) {
-            Flush();
-          } else {
-            //assert bufferedDocs.length == 0;
-          }
-          if (docBase != numDocs) {
-            throw new RuntimeException("Wrote " + docBase + " docs, finish called with numDocs=" + numDocs);
-          }
-          indexWriter.finish(numDocs);
+            IOUtils.CloseWhileHandlingException((IDisposable)this);
+            IOUtils.DeleteFilesIgnoringExceptions(directory,
+                IndexFileNames.SegmentFileName(segment, segmentSuffix, Lucene40StoredFieldsWriter.FIELDS_EXTENSION),
+                IndexFileNames.SegmentFileName(segment, segmentSuffix, Lucene40StoredFieldsWriter.FIELDS_INDEX_EXTENSION));
         }
 
-        public override int Merge(MergeState mergeState) 
+        public override void Finish(FieldInfos fis, int numDocs)
         {
-          int docCount = 0;
-          int idx = 0;
-
-          foreach (AtomicReader reader in mergeState.readers) 
-          {
-            SegmentReader matchingSegmentReader = mergeState.matchingSegmentReaders[idx++];
-            CompressingStoredFieldsReader matchingFieldsReader = null;
-            if (matchingSegmentReader != null) 
+            if (numBufferedDocs > 0)
             {
-              StoredFieldsReader fieldsReader = matchingSegmentReader.FieldsReader;
-              // we can only bulk-copy if the matching reader is also a CompressingStoredFieldsReader
-              if (fieldsReader != null && fieldsReader is CompressingStoredFieldsReader) 
-              {
-                matchingFieldsReader = (CompressingStoredFieldsReader) fieldsReader;
-              }
+                Flush();
             }
+            else
+            {
+                //assert bufferedDocs.length == 0;
+            }
+            if (docBase != numDocs)
+            {
+                throw new SystemException("Wrote " + docBase + " docs, finish called with numDocs=" + numDocs);
+            }
+            indexWriter.Finish(numDocs);
+        }
 
-            int maxDoc = reader.MaxDoc;
-            IBits liveDocs = reader.LiveDocs;
+        public override int Merge(MergeState mergeState)
+        {
+            int docCount = 0;
+            int idx = 0;
 
-            if (matchingFieldsReader == null) {
-              // naive merge...
-              for (int i = NextLiveDoc(0, liveDocs, maxDoc); i < maxDoc; i = NextLiveDoc(i + 1, liveDocs, maxDoc)) {
-                Document doc = reader.Document(i);
-                AddDocument(doc, mergeState.fieldInfos);
-                ++docCount;
-                mergeState.checkAbort.Work(300);
-              }
-            } else {
-              int docID = NextLiveDoc(0, liveDocs, maxDoc);
-              if (docID < maxDoc) {
-                // not all docs were deleted
-                ChunkIterator it = matchingFieldsReader.ChunkIterator(docID);
-                int[] startOffsets = new int[0];
-                do {
-                  // go to the next chunk that contains docID
-                  it.next(docID);
-                  // transform lengths into offsets
-                  if (startOffsets.Length < it.chunkDocs) {
-                    startOffsets = new int[ArrayUtil.Oversize(it.chunkDocs, 4)];
-                  }
-                  for (int i = 1; i < it.chunkDocs; ++i) {
-                    startOffsets[i] = startOffsets[i - 1] + it.lengths[i - 1];
-                  }
-
-                  if (compressionMode == matchingFieldsReader.getCompressionMode() // same compression mode
-                      && numBufferedDocs == 0 // starting a new chunk
-                      && startOffsets[it.chunkDocs - 1] < chunkSize // chunk is small enough
-                      && startOffsets[it.chunkDocs - 1] + it.lengths[it.chunkDocs - 1] >= chunkSize // chunk is large enough
-                      && nextDeletedDoc(it.docBase, liveDocs, it.docBase + it.chunkDocs) == it.docBase + it.chunkDocs) { // no deletion in the chunk
-
-                    // no need to decompress, just copy data
-                    indexWriter.writeIndex(it.chunkDocs, fieldsStream.FilePointer);
-                    WriteHeader(this.docBase, it.chunkDocs, it.numStoredFields, it.lengths);
-                    it.copyCompressedData(fieldsStream);
-                    this.docBase += it.chunkDocs;
-                    docID = NextLiveDoc(it.docBase + it.chunkDocs, liveDocs, maxDoc);
-                    docCount += it.chunkDocs;
-                    mergeState.checkAbort.Work(300 * it.chunkDocs);
-                  } else {
-                    // decompress
-                    it.decompress();
-                    if (startOffsets[it.chunkDocs - 1] + it.lengths[it.chunkDocs - 1] != it.bytes.length) {
-                      throw new CorruptIndexException("Corrupted: expected chunk size=" + startOffsets[it.chunkDocs - 1] + it.lengths[it.chunkDocs - 1] + ", got " + it.bytes.length);
+            foreach (AtomicReader reader in mergeState.readers)
+            {
+                SegmentReader matchingSegmentReader = mergeState.matchingSegmentReaders[idx++];
+                CompressingStoredFieldsReader matchingFieldsReader = null;
+                if (matchingSegmentReader != null)
+                {
+                    StoredFieldsReader fieldsReader = matchingSegmentReader.FieldsReader;
+                    // we can only bulk-copy if the matching reader is also a CompressingStoredFieldsReader
+                    if (fieldsReader != null && fieldsReader is CompressingStoredFieldsReader)
+                    {
+                        matchingFieldsReader = (CompressingStoredFieldsReader)fieldsReader;
                     }
-                    // copy non-deleted docs
-                    for (; docID < it.docBase + it.chunkDocs; docID = NextLiveDoc(docID + 1, liveDocs, maxDoc)) {
-                      int diff = docID - it.docBase;
-                      StartDocument(it.numStoredFields[diff]);
-                      bufferedDocs.WriteBytes(it.bytes.bytes, it.bytes.offset + startOffsets[diff], it.lengths[diff]);
-                      FinishDocument();
-                      ++docCount;
-                      mergeState.checkAbort.Work(300);
+                }
+
+                int maxDoc = reader.MaxDoc;
+                IBits liveDocs = reader.LiveDocs;
+
+                if (matchingFieldsReader == null)
+                {
+                    // naive merge...
+                    for (int i = NextLiveDoc(0, liveDocs, maxDoc); i < maxDoc; i = NextLiveDoc(i + 1, liveDocs, maxDoc))
+                    {
+                        Document doc = reader.Document(i);
+                        AddDocument(doc, mergeState.fieldInfos);
+                        ++docCount;
+                        mergeState.checkAbort.Work(300);
                     }
-                  }
-                } while (docID < maxDoc);
-              }
+                }
+                else
+                {
+                    int docID = NextLiveDoc(0, liveDocs, maxDoc);
+                    if (docID < maxDoc)
+                    {
+                        // not all docs were deleted
+                        CompressingStoredFieldsReader.ChunkIterator it = matchingFieldsReader.GetChunkIterator(docID);
+                        int[] startOffsets = new int[0];
+                        do
+                        {
+                            // go to the next chunk that contains docID
+                            it.Next(docID);
+                            // transform lengths into offsets
+                            if (startOffsets.Length < it.chunkDocs)
+                            {
+                                startOffsets = new int[ArrayUtil.Oversize(it.chunkDocs, 4)];
+                            }
+                            for (int i = 1; i < it.chunkDocs; ++i)
+                            {
+                                startOffsets[i] = startOffsets[i - 1] + it.lengths[i - 1];
+                            }
+
+                            if (compressionMode == matchingFieldsReader.CompressionMode // same compression mode
+                                && numBufferedDocs == 0 // starting a new chunk
+                                && startOffsets[it.chunkDocs - 1] < chunkSize // chunk is small enough
+                                && startOffsets[it.chunkDocs - 1] + it.lengths[it.chunkDocs - 1] >= chunkSize // chunk is large enough
+                                && NextDeletedDoc(it.docBase, liveDocs, it.docBase + it.chunkDocs) == it.docBase + it.chunkDocs)
+                            { // no deletion in the chunk
+
+                                // no need to decompress, just copy data
+                                indexWriter.WriteIndex(it.chunkDocs, fieldsStream.FilePointer);
+                                WriteHeader(this.docBase, it.chunkDocs, it.numStoredFields, it.lengths);
+                                it.CopyCompressedData(fieldsStream);
+                                this.docBase += it.chunkDocs;
+                                docID = NextLiveDoc(it.docBase + it.chunkDocs, liveDocs, maxDoc);
+                                docCount += it.chunkDocs;
+                                mergeState.checkAbort.Work(300 * it.chunkDocs);
+                            }
+                            else
+                            {
+                                // decompress
+                                it.Decompress();
+                                if (startOffsets[it.chunkDocs - 1] + it.lengths[it.chunkDocs - 1] != it.bytes.length)
+                                {
+                                    throw new CorruptIndexException("Corrupted: expected chunk size=" + startOffsets[it.chunkDocs - 1] + it.lengths[it.chunkDocs - 1] + ", got " + it.bytes.length);
+                                }
+                                // copy non-deleted docs
+                                for (; docID < it.docBase + it.chunkDocs; docID = NextLiveDoc(docID + 1, liveDocs, maxDoc))
+                                {
+                                    int diff = docID - it.docBase;
+                                    StartDocument(it.numStoredFields[diff]);
+                                    bufferedDocs.WriteBytes(it.bytes.bytes, it.bytes.offset + startOffsets[diff], it.lengths[diff]);
+                                    FinishDocument();
+                                    ++docCount;
+                                    mergeState.checkAbort.Work(300);
+                                }
+                            }
+                        } while (docID < maxDoc);
+                    }
+                }
             }
-          }
 
-          Finish(mergeState.fieldInfos, docCount);
-          return docCount;
+            Finish(mergeState.fieldInfos, docCount);
+            return docCount;
         }
 
         private static int NextLiveDoc(int doc, IBits liveDocs, int maxDoc)
@@ -374,7 +400,7 @@ namespace Lucene.Net.Codecs.Compressing
             return doc;
         }
 
-        private static int nextDeletedDoc(int doc, Bits liveDocs, int maxDoc)
+        private static int NextDeletedDoc(int doc, IBits liveDocs, int maxDoc)
         {
             if (liveDocs == null)
             {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/80561f72/src/core/Codecs/Compressing/CompressingTermVectorsFormat.cs
----------------------------------------------------------------------
diff --git a/src/core/Codecs/Compressing/CompressingTermVectorsFormat.cs b/src/core/Codecs/Compressing/CompressingTermVectorsFormat.cs
index 0a2afd1..49aac8a 100644
--- a/src/core/Codecs/Compressing/CompressingTermVectorsFormat.cs
+++ b/src/core/Codecs/Compressing/CompressingTermVectorsFormat.cs
@@ -1,18 +1,20 @@
-using System;
+using Lucene.Net.Index;
+using Lucene.Net.Store;
+using System;
 using System.Collections.Generic;
 using System.Linq;
 using System.Text;
 
 namespace Lucene.Net.Codecs.Compressing
 {
-    public class CompressingTermVectorsFormat: TermVectorsFormat
+    public class CompressingTermVectorsFormat : TermVectorsFormat
     {
-        private string formatName;
-        private string segmentSuffix;
-        private CompressionMode compressionMode;
-        private int chunkSize;
+        private readonly string formatName;
+        private readonly string segmentSuffix;
+        private readonly CompressionMode compressionMode;
+        private readonly int chunkSize;
 
-        public CompressingTermVectorsFormat(String formatName, String segmentSuffix, 
+        public CompressingTermVectorsFormat(string formatName, string segmentSuffix,
             CompressionMode compressionMode, int chunkSize)
         {
             this.formatName = formatName;
@@ -24,5 +26,23 @@ namespace Lucene.Net.Codecs.Compressing
             }
             this.chunkSize = chunkSize;
         }
+
+        public override TermVectorsReader VectorsReader(Directory directory, SegmentInfo segmentInfo, FieldInfos fieldInfos, IOContext context)
+        {
+            return new CompressingTermVectorsReader(directory, segmentInfo, segmentSuffix,
+                fieldInfos, context, formatName, compressionMode);
+        }
+
+        public override TermVectorsWriter VectorsWriter(Directory directory, SegmentInfo segmentInfo, IOContext context)
+        {
+            return new CompressingTermVectorsWriter(directory, segmentInfo, segmentSuffix,
+                context, formatName, compressionMode, chunkSize);
+        }
+
+        public override string ToString()
+        {
+            return GetType().Name + "(compressionMode=" + compressionMode
+                + ", chunkSize=" + chunkSize + ")";
+        }
     }
 }