You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucenenet.apache.org by mh...@apache.org on 2013/09/24 20:32:39 UTC

[03/50] [abbrv] git commit: resolve merge conflict

resolve merge conflict


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/e47e6637
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/e47e6637
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/e47e6637

Branch: refs/heads/branch_4x
Commit: e47e6637572c7b58b2a366caee7f3e7f1a308274
Parents: 64c13f3 d4b0864
Author: Mike Potts <mi...@feature23.com>
Authored: Wed Jul 10 23:24:34 2013 -0400
Committer: Mike Potts <mi...@feature23.com>
Committed: Wed Jul 10 23:24:34 2013 -0400

----------------------------------------------------------------------
 build/vs2012/test/Lucene.Net.Test.sln           |  58 +-
 src/contrib/Analyzers/Contrib.Analyzers.csproj  |  10 +
 src/contrib/Analyzers/Core/KeywordAnalyzer.cs   |  15 +
 src/contrib/Analyzers/Core/KeywordTokenizer.cs  |  87 +++
 .../Analyzers/Core/KeywordTokenizerFactory.cs   |  12 +
 .../Analyzers/Support/StringExtensions.cs       |  15 +
 .../Analyzers/Util/AbstractAnalysisFactory.cs   | 355 +++++++++
 src/contrib/Analyzers/Util/CharArrayMap.cs      | 480 ++++++++++++
 src/contrib/Analyzers/Util/CharArraySet.cs      |  15 +
 src/contrib/Analyzers/Util/CharacterUtils.cs    | 130 ++++
 src/contrib/Analyzers/Util/IResourceLoader.cs   |  17 +
 src/contrib/Analyzers/Util/TokenizerFactory.cs  |  11 +
 .../CompressingStoredFieldsFormat.cs            | 168 +++--
 .../CompressingStoredFieldsReader.cs            | 744 ++++++++++---------
 src/core/Codecs/Compressing/CompressionMode.cs  | 446 +++++------
 src/core/Codecs/Compressing/LZ4.cs              |   2 +-
 src/core/Index/IndexWriter.cs                   |   2 +-
 src/core/Support/Character.cs                   |  19 +
 src/core/Util/Version.cs                        |   2 +-
 test/core/Analysis/BaseTokenStreamTestCase.cs   | 257 -------
 test/core/Lucene.Net.Test.csproj                |  10 +-
 test/core/Util/LuceneTestCase.cs                | 297 --------
 .../Analysis/BaseTokenStreamTestCase.cs         | 532 +++++++++++++
 .../Lucene.Net.TestFramework.csproj             |  70 ++
 test/test-framework/Properties/AssemblyInfo.cs  |  36 +
 test/test-framework/Support/RandomizedTest.cs   |  41 +
 test/test-framework/Support/SystemProperties.cs |  26 +
 test/test-framework/Util/LuceneTestCase.cs      | 469 ++++++++++++
 28 files changed, 3100 insertions(+), 1226 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e47e6637/src/core/Codecs/Compressing/CompressingStoredFieldsReader.cs
----------------------------------------------------------------------
diff --cc src/core/Codecs/Compressing/CompressingStoredFieldsReader.cs
index 9c55e07,748efab..a4d7f7d
--- a/src/core/Codecs/Compressing/CompressingStoredFieldsReader.cs
+++ b/src/core/Codecs/Compressing/CompressingStoredFieldsReader.cs
@@@ -20,357 -20,426 +20,427 @@@ using Lucene.Net.Store
  using Lucene.Net.Support;
  using Lucene.Net.Util;
  using Lucene.Net.Util.Packed;
+ using System;
+ 
  namespace Lucene.Net.Codecs.Compressing
  {
- /**
-  * {@link StoredFieldsReader} impl for {@link CompressingStoredFieldsFormat}.
-  * @lucene.experimental
-  */
- public sealed class CompressingStoredFieldsReader: StoredFieldsReader {
- 
-   private FieldInfos fieldInfos;
-   private CompressingStoredFieldsIndexReader indexReader;
-   private IndexInput fieldsStream;
-   private int packedIntsVersion;
-   private CompressionMode compressionMode;
-   private Decompressor decompressor;
-   private BytesRef bytes;
-   private int numDocs;
-   private bool closed;
- 
-   // used by clone
-   private CompressingStoredFieldsReader(CompressingStoredFieldsReader reader) {
-     this.fieldInfos = reader.fieldInfos;
-     this.fieldsStream = (IndexInput)reader.fieldsStream.Clone();
-     this.indexReader = reader.indexReader.clone();
-     this.packedIntsVersion = reader.packedIntsVersion;
-     this.compressionMode = reader.compressionMode;
-     this.decompressor = (Decompressor)reader.decompressor.Clone();
-     this.numDocs = reader.numDocs;
-     this.bytes = new BytesRef(reader.bytes.bytes.Length);
-     this.closed = false;
-   }
- 
-   /** Sole constructor. */
-   public CompressingStoredFieldsReader(Directory d, SegmentInfo si, string segmentSuffix, FieldInfos fn,
-       IOContext context, string formatName, CompressionMode compressionMode) 
-   {
-     this.compressionMode = compressionMode;
-     string segment = si.name;
-     bool success = false;
-     fieldInfos = fn;
-     numDocs = si.DocCount;
-     IndexInput indexStream = null;
-     try {
-       fieldsStream = d.OpenInput(IndexFileNames.SegmentFileName(segment, segmentSuffix, FIELDS_EXTENSION), context);
-       string indexStreamFN = IndexFileNames.SegmentFileName(segment, segmentSuffix, FIELDS_INDEX_EXTENSION);
-       indexStream = d.OpenInput(indexStreamFN, context);
- 
-       string codecNameIdx = formatName + CODEC_SFX_IDX;
-       string codecNameDat = formatName + CODEC_SFX_DAT;
-       CodecUtil.CheckHeader(indexStream, codecNameIdx, VERSION_START, VERSION_CURRENT);
-       CodecUtil.CheckHeader(fieldsStream, codecNameDat, VERSION_START, VERSION_CURRENT);
- 
-       indexReader = new CompressingStoredFieldsIndexReader(indexStream, si);
-       indexStream = null;
- 
-       packedIntsVersion = fieldsStream.ReadVInt();
-       decompressor = compressionMode.newDecompressor();
-       this.bytes = new BytesRef();
- 
-       success = true;
-     } finally {
-       if (!success) {
-         IOUtils.closeWhileHandlingException(this, indexStream);
-       }
-     }
-   }
- 
-   /**
-    * @throws AlreadyClosedException if this FieldsReader is closed
-    */
-   private void ensureOpen() {
-     if (closed) {
-       throw new AlreadyClosedException("this FieldsReader is closed");
-     }
-   }
- 
-   /** 
-    * Close the underlying {@link IndexInput}s.
-    */
-   public override void close() {
-     if (!closed) {
-       IOUtils.Close(fieldsStream, indexReader);
-       closed = true;
-     }
-   }
- 
-   private static void readField(ByteArrayDataInput input, StoredFieldVisitor visitor, FieldInfo info, int bits) {
-     switch (bits & TYPE_MASK) {
-       case BYTE_ARR:
-         int length = input.readVInt();
-         byte[] data = new byte[length];
-         input.readBytes(data, 0, length);
-         visitor.binaryField(info, data);
-         break;
-       case STRING:
-         length = input.readVInt();
-         data = new byte[length];
-         input.readBytes(data, 0, length);
-         visitor.stringField(info, new string(data, IOUtils.CHARSET_UTF_8));
-         break;
-       case NUMERIC_INT:
-         visitor.intField(info, input.readInt());
-         break;
-       case NUMERIC_FLOAT:
-         visitor.floatField(info, Float.intBitsToFloat(input.readInt()));
-         break;
-       case NUMERIC_LONG:
-         visitor.longField(info, input.readLong());
-         break;
-       case NUMERIC_DOUBLE:
-         visitor.doubleField(info, Double.longBitsToDouble(input.readLong()));
-         break;
-       default:
-         throw new AssertionError("Unknown type flag: " + Integer.toHexString(bits));
-     }
-   }
- 
-   private static void skipField(ByteArrayDataInput input, int bits) {
-     switch (bits & TYPE_MASK) {
-       case BYTE_ARR:
-       case STRING:
-         int length = input.readVInt();
-         input.skipBytes(length);
-         break;
-       case NUMERIC_INT:
-       case NUMERIC_FLOAT:
-         input.readInt();
-         break;
-       case NUMERIC_LONG:
-       case NUMERIC_DOUBLE:
-         input.readLong();
-         break;
-       default:
-         throw new AssertionError("Unknown type flag: " + Integer.toHexString(bits));
-     }
-   }
- 
-   public override void visitDocument(int docID, StoredFieldVisitor visitor)
-   {
-     fieldsStream.Seek(indexReader.getStartPointer(docID));
- 
-     int docBase = fieldsStream.ReadVInt();
-     int chunkDocs = fieldsStream.ReadVInt();
-     if (docID < docBase
-         || docID >= docBase + chunkDocs
-         || docBase + chunkDocs > numDocs) {
-       throw new CorruptIndexException("Corrupted: docID=" + docID
-           + ", docBase=" + docBase + ", chunkDocs=" + chunkDocs
-           + ", numDocs=" + numDocs);
-     }
 +
-     int numStoredFields, length, offset, totalLength;
-     if (chunkDocs == 1) {
-       numStoredFields = fieldsStream.ReadVInt();
-       offset = 0;
-       length = fieldsStream.ReadVInt();
-       totalLength = length;
-     } else {
-       int bitsPerStoredFields = fieldsStream.ReadVInt();
-       if (bitsPerStoredFields == 0) {
-         numStoredFields = fieldsStream.ReadVInt();
-       } else if (bitsPerStoredFields > 31) {
-         throw new CorruptIndexException("bitsPerStoredFields=" + bitsPerStoredFields);
-       } else {
-         long filePointer = fieldsStream.getFilePointer();
-         PackedInts.Reader reader = PackedInts.GetDirectReaderNoHeader(fieldsStream, PackedInts.Format.PACKED, packedIntsVersion, chunkDocs, bitsPerStoredFields);
-         numStoredFields = (int) (reader.Get(docID - docBase));
-         fieldsStream.Seek(filePointer + PackedInts.Format.PACKED.ByteCount(packedIntsVersion, chunkDocs, bitsPerStoredFields));
-       }
- 
-       int bitsPerLength = fieldsStream.ReadVInt();
-       if (bitsPerLength == 0) {
-         length = fieldsStream.ReadVInt();
-         offset = (docID - docBase) * length;
-         totalLength = chunkDocs * length;
-       } else if (bitsPerStoredFields > 31) {
-         throw new CorruptIndexException("bitsPerLength=" + bitsPerLength);
-       } else {
-         PackedInts.ReaderIterator it = (PackedInts.ReaderIterator)PackedInts.GetReaderIteratorNoHeader(fieldsStream, PackedInts.Format.PACKED, packedIntsVersion, chunkDocs, bitsPerLength, 1);
-         int off = 0;
-         for (int i = 0; i < docID - docBase; ++i) {
-           //TODO - HACKMP - Paul, this is a point of concern for me, in that everything from this file, and the 
-           //decompressor.Decompress() contract is looking for int.  But, I don't want to simply cast from long to int here.
-           off += it.Next();
+     /**
+      * {@link StoredFieldsReader} impl for {@link CompressingStoredFieldsFormat}.
+      * @lucene.experimental
+      */
+     public sealed class CompressingStoredFieldsReader : StoredFieldsReader
+     {
+ 
+         private FieldInfos fieldInfos;
+         private CompressingStoredFieldsIndexReader indexReader;
+         private IndexInput fieldsStream;
+         private int packedIntsVersion;
+         private CompressionMode compressionMode;
+         private Decompressor decompressor;
+         private BytesRef bytes;
+         private int numDocs;
+         private bool closed;
+ 
+         // used by clone
+         private CompressingStoredFieldsReader(CompressingStoredFieldsReader reader)
+         {
+             this.fieldInfos = reader.fieldInfos;
+             this.fieldsStream = (IndexInput)reader.fieldsStream.Clone();
+             this.indexReader = reader.indexReader.clone();
+             this.packedIntsVersion = reader.packedIntsVersion;
+             this.compressionMode = reader.compressionMode;
+             this.decompressor = (Decompressor)reader.decompressor.Clone();
+             this.numDocs = reader.numDocs;
+             this.bytes = new BytesRef(reader.bytes.bytes.Length);
+             this.closed = false;
          }
-         offset = off;
-         length = (int) it.Next();
-         off += length;
-         for (int i = docID - docBase + 1; i < chunkDocs; ++i) {
-           off += it.Next();
+ 
+         /** Sole constructor. */
+         public CompressingStoredFieldsReader(Directory d, SegmentInfo si, string segmentSuffix, FieldInfos fn,
+             IOContext context, string formatName, CompressionMode compressionMode)
+         {
+             this.compressionMode = compressionMode;
+             string segment = si.name;
+             bool success = false;
+             fieldInfos = fn;
+             numDocs = si.DocCount;
+             IndexInput indexStream = null;
+             try
+             {
+                 fieldsStream = d.OpenInput(IndexFileNames.SegmentFileName(segment, segmentSuffix, FIELDS_EXTENSION), context);
+                 string indexStreamFN = IndexFileNames.SegmentFileName(segment, segmentSuffix, FIELDS_INDEX_EXTENSION);
+                 indexStream = d.OpenInput(indexStreamFN, context);
+ 
+                 string codecNameIdx = formatName + CODEC_SFX_IDX;
+                 string codecNameDat = formatName + CODEC_SFX_DAT;
+                 CodecUtil.CheckHeader(indexStream, codecNameIdx, VERSION_START, VERSION_CURRENT);
+                 CodecUtil.CheckHeader(fieldsStream, codecNameDat, VERSION_START, VERSION_CURRENT);
+ 
+                 indexReader = new CompressingStoredFieldsIndexReader(indexStream, si);
+                 indexStream = null;
+ 
+                 packedIntsVersion = fieldsStream.ReadVInt();
+                 decompressor = compressionMode.newDecompressor();
+                 this.bytes = new BytesRef();
+ 
+                 success = true;
+             }
+             finally
+             {
+                 if (!success)
+                 {
+                     IOUtils.CloseWhileHandlingException((IDisposable)this, indexStream);
+                 }
+             }
          }
-         totalLength = off;
-       }
-     }
  
-     if ((length == 0) != (numStoredFields == 0)) {
-       throw new CorruptIndexException("length=" + length + ", numStoredFields=" + numStoredFields);
-     }
-     if (numStoredFields == 0) {
-       // nothing to do
-       return;
-     }
+         /**
+          * @throws AlreadyClosedException if this FieldsReader is closed
+          */
+         private void ensureOpen()
+         {
+             if (closed)
+             {
+                 throw new AlreadyClosedException("this FieldsReader is closed");
+             }
+         }
  
-     decompressor.Decompress(fieldsStream, totalLength, offset, length, bytes);
- 
-     ByteArrayDataInput documentInput = new ByteArrayDataInput(bytes.bytes, bytes.offset, bytes.length);
-     for (int fieldIDX = 0; fieldIDX < numStoredFields; fieldIDX++) {
-       long infoAndBits = documentInput.ReadVLong();
-       int fieldNumber = Number.URShift(infoAndBits, TYPE_BITS); // (infoAndBits >>> TYPE_BITS);
-       FieldInfo fieldInfo = fieldInfos.FieldInfo(fieldNumber);
- 
-       int bits = (int) (infoAndBits & TYPE_MASK);
- 
-       switch(visitor.NeedsField(fieldInfo)) {
-         case YES:
-           readField(documentInput, visitor, fieldInfo, bits);
-           break;
-         case NO:
-           skipField(documentInput, bits);
-           break;
-         case STOP:
-           return;
-       }
-     }
-   }
- 
-   public override StoredFieldsReader clone() {
-     ensureOpen();
-     return new CompressingStoredFieldsReader(this);
-   }
- 
-   CompressionMode getCompressionMode() {
-     return compressionMode;
-   }
- 
-   ChunkIterator chunkIterator(int startDocID) {
-     ensureOpen();
-     fieldsStream.Seek(indexReader.getStartPointer(startDocID));
-     return new ChunkIterator();
-   }
- 
-   private readonly class ChunkIterator {
- 
-     private IndexInput _indexInput;
-     private CompressingStoredFieldsReader _indexReader;
-     private int _numOfDocs;
-     private int _packedIntsVersion;
-     BytesRef bytes;
-     int docBase;
-     int chunkDocs;
-     int[] numStoredFields;
-     int[] lengths;
- 
-     public ChunkIterator(IndexInput indexInput, CompressingStoredFieldsReader indexReader, 
-                             int numOfDocs, int packedIntsVersion) {
-         _indexInput = indexInput;
-         _indexReader = indexReader;
-         _numOfDocs = numOfDocs;
-         _packedIntsVersion = packedIntsVersion;
-       this.docBase = -1;
-       bytes = new BytesRef();
-       numStoredFields = new int[1];
-       lengths = new int[1];
-     }
+         /** 
+          * Close the underlying {@link IndexInput}s.
+          */
+         public override void Close()
+         {
+             if (!closed)
+             {
+                 IOUtils.Close(fieldsStream, indexReader);
+                 closed = true;
+             }
+         }
  
-     /**
-      * Return the decompressed size of the chunk
-      */
-     private int chunkSize() {
-       int sum = 0;
-       for (int i = 0; i < chunkDocs; ++i) {
-         sum += lengths[i];
-       }
-       return sum;
-     }
+         private static void ReadField(ByteArrayDataInput input, StoredFieldVisitor visitor, FieldInfo info, int bits)
+         {
+             switch (bits & TYPE_MASK)
+             {
+                 case BYTE_ARR:
+                     int length = input.readVInt();
+                     byte[] data = new byte[length];
+                     input.readBytes(data, 0, length);
+                     visitor.binaryField(info, data);
+                     break;
+                 case STRING:
+                     length = input.readVInt();
+                     data = new byte[length];
+                     input.readBytes(data, 0, length);
+                     visitor.stringField(info, new string(data, IOUtils.CHARSET_UTF_8));
+                     break;
+                 case NUMERIC_INT:
+                     visitor.intField(info, input.readInt());
+                     break;
+                 case NUMERIC_FLOAT:
+                     visitor.floatField(info, Float.intBitsToFloat(input.readInt()));
+                     break;
+                 case NUMERIC_LONG:
+                     visitor.longField(info, input.readLong());
+                     break;
+                 case NUMERIC_DOUBLE:
+                     visitor.doubleField(info, Double.longBitsToDouble(input.readLong()));
+                     break;
+                 default:
+                     throw new AssertionError("Unknown type flag: " + Integer.toHexString(bits));
+             }
+         }
  
-     /**
-      * Go to the chunk containing the provided doc ID.
-      */
-     void next(int doc) {
-       _indexInput.Seek(_indexReader.getStartPointer(doc));
- 
-       int docBase = _indexInput.ReadVInt();
-       int chunkDocs = _indexInput.ReadVInt();
-       if (docBase < this.docBase + this.chunkDocs
-           || docBase + chunkDocs > _numOfDocs) {
-         throw new CorruptIndexException("Corrupted: current docBase=" + this.docBase
-             + ", current numDocs=" + this.chunkDocs + ", new docBase=" + docBase
-             + ", new numDocs=" + chunkDocs);
-       }
-       this.docBase = docBase;
-       this.chunkDocs = chunkDocs;
- 
-       if (chunkDocs > numStoredFields.Length) {
-         int newLength = ArrayUtil.Oversize(chunkDocs, 4);
-         numStoredFields = new int[newLength];
-         lengths = new int[newLength];
-       }
- 
-       if (chunkDocs == 1) {
-           numStoredFields[0] = _indexInput.ReadVInt();
-           lengths[0] = _indexInput.ReadVInt();
-       } else {
-           int bitsPerStoredFields = _indexInput.ReadVInt();
-         if (bitsPerStoredFields == 0) {
-             Arrays.Fill(numStoredFields, 0, chunkDocs, _indexInput.ReadVInt());
-         } else if (bitsPerStoredFields > 31) {
-           throw new CorruptIndexException("bitsPerStoredFields=" + bitsPerStoredFields);
-         } else {
-             PackedInts.ReaderIterator it = (PackedInts.ReaderIterator)PackedInts.GetReaderIteratorNoHeader(_indexInput, PackedInts.Format.PACKED, _packedIntsVersion, chunkDocs, bitsPerStoredFields, 1);
-           for (int i = 0; i < chunkDocs; ++i) {
-             numStoredFields[i] = (int) it.Next();
-           }
+         private static void SkipField(ByteArrayDataInput input, int bits)
+         {
+             switch (bits & TYPE_MASK)
+             {
+                 case BYTE_ARR:
+                 case STRING:
+                     int length = input.readVInt();
+                     input.skipBytes(length);
+                     break;
+                 case NUMERIC_INT:
+                 case NUMERIC_FLOAT:
+                     input.readInt();
+                     break;
+                 case NUMERIC_LONG:
+                 case NUMERIC_DOUBLE:
+                     input.readLong();
+                     break;
+                 default:
+                     throw new AssertionError("Unknown type flag: " + Integer.toHexString(bits));
+             }
          }
  
-         int bitsPerLength = _indexInput.ReadVInt();
-         if (bitsPerLength == 0) {
-             Arrays.Fill(lengths, 0, chunkDocs, _indexInput.ReadVInt());
-         } else if (bitsPerLength > 31) {
-           throw new CorruptIndexException("bitsPerLength=" + bitsPerLength);
-         } else {
-             PackedInts.ReaderIterator it = (PackedInts.ReaderIterator)PackedInts.GetReaderIteratorNoHeader(_indexInput, PackedInts.Format.PACKED, _packedIntsVersion, chunkDocs, bitsPerLength, 1);
-           for (int i = 0; i < chunkDocs; ++i) {
-             lengths[i] = (int) it.Next();
-           }
+         public override void VisitDocument(int docID, StoredFieldVisitor visitor)
+         {
+             fieldsStream.Seek(indexReader.getStartPointer(docID));
+ 
+             int docBase = fieldsStream.ReadVInt();
+             int chunkDocs = fieldsStream.ReadVInt();
+             if (docID < docBase
+                 || docID >= docBase + chunkDocs
+                 || docBase + chunkDocs > numDocs)
+             {
+                 throw new CorruptIndexException("Corrupted: docID=" + docID
+                     + ", docBase=" + docBase + ", chunkDocs=" + chunkDocs
+                     + ", numDocs=" + numDocs);
+             }
+ 
+             int numStoredFields, length, offset, totalLength;
+             if (chunkDocs == 1)
+             {
+                 numStoredFields = fieldsStream.ReadVInt();
+                 offset = 0;
+                 length = fieldsStream.ReadVInt();
+                 totalLength = length;
+             }
+             else
+             {
+                 int bitsPerStoredFields = fieldsStream.ReadVInt();
+                 if (bitsPerStoredFields == 0)
+                 {
+                     numStoredFields = fieldsStream.ReadVInt();
+                 }
+                 else if (bitsPerStoredFields > 31)
+                 {
+                     throw new CorruptIndexException("bitsPerStoredFields=" + bitsPerStoredFields);
+                 }
+                 else
+                 {
+                     long filePointer = fieldsStream.FilePointer;
+                     PackedInts.Reader reader = PackedInts.GetDirectReaderNoHeader(fieldsStream, PackedInts.Format.PACKED, packedIntsVersion, chunkDocs, bitsPerStoredFields);
+                     numStoredFields = (int)(reader.Get(docID - docBase));
+                     fieldsStream.Seek(filePointer + PackedInts.Format.PACKED.ByteCount(packedIntsVersion, chunkDocs, bitsPerStoredFields));
+                 }
+ 
+                 int bitsPerLength = fieldsStream.ReadVInt();
+                 if (bitsPerLength == 0)
+                 {
+                     length = fieldsStream.ReadVInt();
+                     offset = (docID - docBase) * length;
+                     totalLength = chunkDocs * length;
+                 }
+                 else if (bitsPerStoredFields > 31)
+                 {
+                     throw new CorruptIndexException("bitsPerLength=" + bitsPerLength);
+                 }
+                 else
+                 {
+                     PackedInts.ReaderIterator it = (PackedInts.ReaderIterator)PackedInts.GetReaderIteratorNoHeader(fieldsStream, PackedInts.Format.PACKED, packedIntsVersion, chunkDocs, bitsPerLength, 1);
+                     int off = 0;
+                     for (int i = 0; i < docID - docBase; ++i)
+                     {
+                         //TODO - HACKMP - Paul, this is a point of concern for me, in that everything from this file, and the 
+                         //decompressor.Decompress() contract is looking for int.  But, I don't want to simply cast from long to int here.
+                         off += it.Next();
+                     }
+                     offset = off;
+                     length = (int)it.Next();
+                     off += length;
+                     for (int i = docID - docBase + 1; i < chunkDocs; ++i)
+                     {
+                         off += it.Next();
+                     }
+                     totalLength = off;
+                 }
+             }
+ 
+             if ((length == 0) != (numStoredFields == 0))
+             {
+                 throw new CorruptIndexException("length=" + length + ", numStoredFields=" + numStoredFields);
+             }
+             if (numStoredFields == 0)
+             {
+                 // nothing to do
+                 return;
+             }
+ 
+             decompressor.Decompress(fieldsStream, totalLength, offset, length, bytes);
+ 
+             ByteArrayDataInput documentInput = new ByteArrayDataInput((byte[])(Array)bytes.bytes, bytes.offset, bytes.length);
+             for (int fieldIDX = 0; fieldIDX < numStoredFields; fieldIDX++)
+             {
+                 long infoAndBits = documentInput.ReadVLong();
+                 int fieldNumber = Number.URShift(infoAndBits, TYPE_BITS); // (infoAndBits >>> TYPE_BITS);
+                 FieldInfo fieldInfo = fieldInfos.FieldInfo(fieldNumber);
+ 
+                 int bits = (int)(infoAndBits & TYPE_MASK);
+ 
+                 switch (visitor.NeedsField(fieldInfo))
+                 {
+                     case StoredFieldVisitor.Status.YES:
+                         ReadField(documentInput, visitor, fieldInfo, bits);
+                         break;
+                     case StoredFieldVisitor.Status.NO:
+                         SkipField(documentInput, bits);
+                         break;
+                     case StoredFieldVisitor.Status.STOP:
+                         return;
+                 }
+             }
          }
-       }
-     }
  
-     /**
-      * Decompress the chunk.
-      */
-     void decompress(){
-       // decompress data
-       int chunkSize = this.chunkSize();
-       decompressor.Decompress(_indexInput, chunkSize, 0, chunkSize, bytes);
-       if (bytes.length != chunkSize) {
-         throw new CorruptIndexException("Corrupted: expected chunk size = " + this.chunkSize() + ", got " + bytes.length);
-       }
-     }
+         public override StoredFieldsReader Clone()
+         {
+             ensureOpen();
+             return new CompressingStoredFieldsReader(this);
+         }
  
-     /**
-      * Copy compressed data.
-      */
-     void copyCompressedData(DataOutput output){
-       long chunkEnd = docBase + chunkDocs == numDocs
-           ? fieldsStream.length()
-           : indexReader.getStartPointer(docBase + chunkDocs);
-       output.copyBytes(fieldsStream, chunkEnd - fieldsStream.getFilePointer());
-     }
+         public CompressionMode CompressionMode
+         {
+             get
+             {
+                 return compressionMode;
+             }
+         }
  
-   }
+         // .NET Port: renamed to GetChunkIterator to avoid conflict with nested type.
+         internal ChunkIterator GetChunkIterator(int startDocID)
+         {
+             ensureOpen();
+             fieldsStream.Seek(indexReader.getStartPointer(startDocID));
+             return new ChunkIterator(fieldsStream, indexReader, numDocs, packedIntsVersion, decompressor);
+         }
  
- }
+         internal sealed class ChunkIterator
+         {
+             private IndexInput _fieldsStream;
+             private CompressingStoredFieldsReader _indexReader;
+             private Decompressor _decompressor;
+             private int _numOfDocs;
+             private int _packedIntsVersion;
+             BytesRef bytes;
+             int docBase;
+             int chunkDocs;
+             int[] numStoredFields;
+             int[] lengths;
+ 
+             public ChunkIterator(IndexInput fieldsStream, CompressingStoredFieldsReader indexReader,
+                                     int numOfDocs, int packedIntsVersion, Decompressor decompressor)
+             {
+                 _indexReader = indexReader;
+                 _numOfDocs = numOfDocs;
+                 _packedIntsVersion = packedIntsVersion;
+                 _decompressor = decompressor;
+                 _fieldsStream = fieldsStream;
+                 this.docBase = -1;
+                 bytes = new BytesRef();
+                 numStoredFields = new int[1];
+                 lengths = new int[1];
+             }
+ 
+             /**
+              * Return the decompressed size of the chunk
+              */
+             public int ChunkSize()
+             {
+                 int sum = 0;
+                 for (int i = 0; i < chunkDocs; ++i)
+                 {
+                     sum += lengths[i];
+                 }
+                 return sum;
+             }
+ 
+             /**
+              * Go to the chunk containing the provided doc ID.
+              */
+             public void Next(int doc)
+             {
+                 _fieldsStream.Seek(_indexReader.getStartPointer(doc));
+ 
+                 int docBase = _fieldsStream.ReadVInt();
+                 int chunkDocs = _fieldsStream.ReadVInt();
+                 if (docBase < this.docBase + this.chunkDocs
+                     || docBase + chunkDocs > _numOfDocs)
+                 {
+                     throw new CorruptIndexException("Corrupted: current docBase=" + this.docBase
+                         + ", current numDocs=" + this.chunkDocs + ", new docBase=" + docBase
+                         + ", new numDocs=" + chunkDocs);
+                 }
+                 this.docBase = docBase;
+                 this.chunkDocs = chunkDocs;
+ 
+                 if (chunkDocs > numStoredFields.Length)
+                 {
+                     int newLength = ArrayUtil.Oversize(chunkDocs, 4);
+                     numStoredFields = new int[newLength];
+                     lengths = new int[newLength];
+                 }
+ 
+                 if (chunkDocs == 1)
+                 {
+                     numStoredFields[0] = _fieldsStream.ReadVInt();
+                     lengths[0] = _fieldsStream.ReadVInt();
+                 }
+                 else
+                 {
+                     int bitsPerStoredFields = _fieldsStream.ReadVInt();
+                     if (bitsPerStoredFields == 0)
+                     {
+                         Arrays.Fill(numStoredFields, 0, chunkDocs, _fieldsStream.ReadVInt());
+                     }
+                     else if (bitsPerStoredFields > 31)
+                     {
+                         throw new CorruptIndexException("bitsPerStoredFields=" + bitsPerStoredFields);
+                     }
+                     else
+                     {
+                         PackedInts.ReaderIterator it = (PackedInts.ReaderIterator)PackedInts.GetReaderIteratorNoHeader(_fieldsStream, PackedInts.Format.PACKED, _packedIntsVersion, chunkDocs, bitsPerStoredFields, 1);
+                         for (int i = 0; i < chunkDocs; ++i)
+                         {
+                             numStoredFields[i] = (int)it.Next();
+                         }
+                     }
+ 
+                     int bitsPerLength = _fieldsStream.ReadVInt();
+                     if (bitsPerLength == 0)
+                     {
+                         Arrays.Fill(lengths, 0, chunkDocs, _fieldsStream.ReadVInt());
+                     }
+                     else if (bitsPerLength > 31)
+                     {
+                         throw new CorruptIndexException("bitsPerLength=" + bitsPerLength);
+                     }
+                     else
+                     {
+                         PackedInts.ReaderIterator it = (PackedInts.ReaderIterator)PackedInts.GetReaderIteratorNoHeader(_fieldsStream, PackedInts.Format.PACKED, _packedIntsVersion, chunkDocs, bitsPerLength, 1);
+                         for (int i = 0; i < chunkDocs; ++i)
+                         {
+                             lengths[i] = (int)it.Next();
+                         }
+                     }
+                 }
+             }
+ 
+             /**
+              * Decompress the chunk.
+              */
+             public void Decompress()
+             {
+                 // decompress data
+                 int chunkSize = this.ChunkSize();
+                 _decompressor.Decompress(_fieldsStream, chunkSize, 0, chunkSize, bytes);
+                 if (bytes.length != chunkSize)
+                 {
+                     throw new CorruptIndexException("Corrupted: expected chunk size = " + this.ChunkSize() + ", got " + bytes.length);
+                 }
+             }
+ 
+             /**
+              * Copy compressed data.
+              */
+             public void CopyCompressedData(DataOutput output)
+             {
+                 long chunkEnd = docBase + chunkDocs == _numOfDocs
+                     ? _fieldsStream.Length
+                     : _indexReader.getStartPointer(docBase + chunkDocs);
+                 output.CopyBytes(_fieldsStream, chunkEnd - _fieldsStream.FilePointer);
+             }
+ 
+         }
+ 
+     }
  }