You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucenenet.apache.org by cc...@apache.org on 2013/04/03 19:39:56 UTC

[13/51] [partial] Mass convert mixed tabs to spaces

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/62f018ab/src/core/Index/FieldsReader.cs
----------------------------------------------------------------------
diff --git a/src/core/Index/FieldsReader.cs b/src/core/Index/FieldsReader.cs
index 8fa351d..d4973d9 100644
--- a/src/core/Index/FieldsReader.cs
+++ b/src/core/Index/FieldsReader.cs
@@ -28,150 +28,150 @@ using IndexInput = Lucene.Net.Store.IndexInput;
 
 namespace Lucene.Net.Index
 {
-	
-	/// <summary> Class responsible for access to stored document fields.
-	/// <p/>
-	/// It uses &lt;segment&gt;.fdt and &lt;segment&gt;.fdx; files.
-	/// 
-	/// </summary>
-	public sealed class FieldsReader : ICloneable, IDisposable
-	{
-		private readonly FieldInfos fieldInfos;
-		
-		// The main fieldStream, used only for cloning.
-		private readonly IndexInput cloneableFieldsStream;
-		
-		// This is a clone of cloneableFieldsStream used for reading documents.
-		// It should not be cloned outside of a synchronized context.
-		private readonly IndexInput fieldsStream;
-		
-		private readonly IndexInput cloneableIndexStream;
-		private readonly IndexInput indexStream;
-		private readonly int numTotalDocs;
-		private readonly int size;
-		private bool closed;
-		private readonly int format;
-		private readonly int formatSize;
-		
-		// The docID offset where our docs begin in the index
-		// file.  This will be 0 if we have our own private file.
-		private readonly int docStoreOffset;
-		
-		private readonly CloseableThreadLocal<IndexInput> fieldsStreamTL = new CloseableThreadLocal<IndexInput>();
-		private readonly bool isOriginal = false;
-		
-		/// <summary>Returns a cloned FieldsReader that shares open
-		/// IndexInputs with the original one.  It is the caller's
-		/// job not to close the original FieldsReader until all
-		/// clones are called (eg, currently SegmentReader manages
-		/// this logic). 
-		/// </summary>
-		public System.Object Clone()
-		{
-			EnsureOpen();
-			return new FieldsReader(fieldInfos, numTotalDocs, size, format, formatSize, docStoreOffset, cloneableFieldsStream, cloneableIndexStream);
-		}
-		
-		// Used only by clone
-		private FieldsReader(FieldInfos fieldInfos, int numTotalDocs, int size, int format, int formatSize, int docStoreOffset, IndexInput cloneableFieldsStream, IndexInput cloneableIndexStream)
-		{
-			this.fieldInfos = fieldInfos;
-			this.numTotalDocs = numTotalDocs;
-			this.size = size;
-			this.format = format;
-			this.formatSize = formatSize;
-			this.docStoreOffset = docStoreOffset;
-			this.cloneableFieldsStream = cloneableFieldsStream;
-			this.cloneableIndexStream = cloneableIndexStream;
-			fieldsStream = (IndexInput) cloneableFieldsStream.Clone();
-			indexStream = (IndexInput) cloneableIndexStream.Clone();
-		}
-		
-		public /*internal*/ FieldsReader(Directory d, String segment, FieldInfos fn):this(d, segment, fn, BufferedIndexInput.BUFFER_SIZE, - 1, 0)
-		{
-		}
-		
-		internal FieldsReader(Directory d, System.String segment, FieldInfos fn, int readBufferSize):this(d, segment, fn, readBufferSize, - 1, 0)
-		{
-		}
-		
-		internal FieldsReader(Directory d, System.String segment, FieldInfos fn, int readBufferSize, int docStoreOffset, int size)
-		{
-			bool success = false;
-			isOriginal = true;
-			try
-			{
-				fieldInfos = fn;
-				
-				cloneableFieldsStream = d.OpenInput(segment + "." + IndexFileNames.FIELDS_EXTENSION, readBufferSize);
-				cloneableIndexStream = d.OpenInput(segment + "." + IndexFileNames.FIELDS_INDEX_EXTENSION, readBufferSize);
-				
-				// First version of fdx did not include a format
-				// header, but, the first int will always be 0 in that
-				// case
-				int firstInt = cloneableIndexStream.ReadInt();
-				format = firstInt == 0 ? 0 : firstInt;
-				
-				if (format > FieldsWriter.FORMAT_CURRENT)
-					throw new CorruptIndexException("Incompatible format version: " + format + " expected " + FieldsWriter.FORMAT_CURRENT + " or lower");
-				
-				formatSize = format > FieldsWriter.FORMAT ? 4 : 0;
-				
-				if (format < FieldsWriter.FORMAT_VERSION_UTF8_LENGTH_IN_BYTES)
-					cloneableFieldsStream.SetModifiedUTF8StringsMode();
-				
-				fieldsStream = (IndexInput) cloneableFieldsStream.Clone();
-				
-				long indexSize = cloneableIndexStream.Length() - formatSize;
-				
-				if (docStoreOffset != - 1)
-				{
-					// We read only a slice out of this shared fields file
-					this.docStoreOffset = docStoreOffset;
-					this.size = size;
-					
-					// Verify the file is long enough to hold all of our
-					// docs
-					System.Diagnostics.Debug.Assert(((int)(indexSize / 8)) >= size + this.docStoreOffset, "indexSize=" + indexSize + " size=" + size + " docStoreOffset=" + docStoreOffset);
-				}
-				else
-				{
-					this.docStoreOffset = 0;
-					this.size = (int) (indexSize >> 3);
-				}
-				
-				indexStream = (IndexInput) cloneableIndexStream.Clone();
-				numTotalDocs = (int) (indexSize >> 3);
-				success = true;
-			}
-			finally
-			{
-				// With lock-less commits, it's entirely possible (and
-				// fine) to hit a FileNotFound exception above. In
-				// this case, we want to explicitly close any subset
-				// of things that were opened so that we don't have to
-				// wait for a GC to do so.
-				if (!success)
-				{
-					Dispose();
-				}
-			}
-		}
-		
-		/// <throws>  AlreadyClosedException if this FieldsReader is closed </throws>
-		internal void  EnsureOpen()
-		{
-			if (closed)
-			{
-				throw new AlreadyClosedException("this FieldsReader is closed");
-			}
-		}
-		
-		/// <summary> Closes the underlying <see cref="Lucene.Net.Store.IndexInput" /> streams, including any ones associated with a
-		/// lazy implementation of a Field.  This means that the Fields values will not be accessible.
-		/// 
-		/// </summary>
-		/// <throws>  IOException </throws>
+    
+    /// <summary> Class responsible for access to stored document fields.
+    /// <p/>
+    /// It uses &lt;segment&gt;.fdt and &lt;segment&gt;.fdx; files.
+    /// 
+    /// </summary>
+    public sealed class FieldsReader : ICloneable, IDisposable
+    {
+        private readonly FieldInfos fieldInfos;
+        
+        // The main fieldStream, used only for cloning.
+        private readonly IndexInput cloneableFieldsStream;
+        
+        // This is a clone of cloneableFieldsStream used for reading documents.
+        // It should not be cloned outside of a synchronized context.
+        private readonly IndexInput fieldsStream;
+        
+        private readonly IndexInput cloneableIndexStream;
+        private readonly IndexInput indexStream;
+        private readonly int numTotalDocs;
+        private readonly int size;
+        private bool closed;
+        private readonly int format;
+        private readonly int formatSize;
+        
+        // The docID offset where our docs begin in the index
+        // file.  This will be 0 if we have our own private file.
+        private readonly int docStoreOffset;
+        
+        private readonly CloseableThreadLocal<IndexInput> fieldsStreamTL = new CloseableThreadLocal<IndexInput>();
+        private readonly bool isOriginal = false;
+        
+        /// <summary>Returns a cloned FieldsReader that shares open
+        /// IndexInputs with the original one.  It is the caller's
+        /// job not to close the original FieldsReader until all
+        /// clones are called (eg, currently SegmentReader manages
+        /// this logic). 
+        /// </summary>
+        public System.Object Clone()
+        {
+            EnsureOpen();
+            return new FieldsReader(fieldInfos, numTotalDocs, size, format, formatSize, docStoreOffset, cloneableFieldsStream, cloneableIndexStream);
+        }
+        
+        // Used only by clone
+        private FieldsReader(FieldInfos fieldInfos, int numTotalDocs, int size, int format, int formatSize, int docStoreOffset, IndexInput cloneableFieldsStream, IndexInput cloneableIndexStream)
+        {
+            this.fieldInfos = fieldInfos;
+            this.numTotalDocs = numTotalDocs;
+            this.size = size;
+            this.format = format;
+            this.formatSize = formatSize;
+            this.docStoreOffset = docStoreOffset;
+            this.cloneableFieldsStream = cloneableFieldsStream;
+            this.cloneableIndexStream = cloneableIndexStream;
+            fieldsStream = (IndexInput) cloneableFieldsStream.Clone();
+            indexStream = (IndexInput) cloneableIndexStream.Clone();
+        }
+        
+        public /*internal*/ FieldsReader(Directory d, String segment, FieldInfos fn):this(d, segment, fn, BufferedIndexInput.BUFFER_SIZE, - 1, 0)
+        {
+        }
+        
+        internal FieldsReader(Directory d, System.String segment, FieldInfos fn, int readBufferSize):this(d, segment, fn, readBufferSize, - 1, 0)
+        {
+        }
+        
+        internal FieldsReader(Directory d, System.String segment, FieldInfos fn, int readBufferSize, int docStoreOffset, int size)
+        {
+            bool success = false;
+            isOriginal = true;
+            try
+            {
+                fieldInfos = fn;
+                
+                cloneableFieldsStream = d.OpenInput(segment + "." + IndexFileNames.FIELDS_EXTENSION, readBufferSize);
+                cloneableIndexStream = d.OpenInput(segment + "." + IndexFileNames.FIELDS_INDEX_EXTENSION, readBufferSize);
+                
+                // First version of fdx did not include a format
+                // header, but, the first int will always be 0 in that
+                // case
+                int firstInt = cloneableIndexStream.ReadInt();
+                format = firstInt == 0 ? 0 : firstInt;
+                
+                if (format > FieldsWriter.FORMAT_CURRENT)
+                    throw new CorruptIndexException("Incompatible format version: " + format + " expected " + FieldsWriter.FORMAT_CURRENT + " or lower");
+                
+                formatSize = format > FieldsWriter.FORMAT ? 4 : 0;
+                
+                if (format < FieldsWriter.FORMAT_VERSION_UTF8_LENGTH_IN_BYTES)
+                    cloneableFieldsStream.SetModifiedUTF8StringsMode();
+                
+                fieldsStream = (IndexInput) cloneableFieldsStream.Clone();
+                
+                long indexSize = cloneableIndexStream.Length() - formatSize;
+                
+                if (docStoreOffset != - 1)
+                {
+                    // We read only a slice out of this shared fields file
+                    this.docStoreOffset = docStoreOffset;
+                    this.size = size;
+                    
+                    // Verify the file is long enough to hold all of our
+                    // docs
+                    System.Diagnostics.Debug.Assert(((int)(indexSize / 8)) >= size + this.docStoreOffset, "indexSize=" + indexSize + " size=" + size + " docStoreOffset=" + docStoreOffset);
+                }
+                else
+                {
+                    this.docStoreOffset = 0;
+                    this.size = (int) (indexSize >> 3);
+                }
+                
+                indexStream = (IndexInput) cloneableIndexStream.Clone();
+                numTotalDocs = (int) (indexSize >> 3);
+                success = true;
+            }
+            finally
+            {
+                // With lock-less commits, it's entirely possible (and
+                // fine) to hit a FileNotFound exception above. In
+                // this case, we want to explicitly close any subset
+                // of things that were opened so that we don't have to
+                // wait for a GC to do so.
+                if (!success)
+                {
+                    Dispose();
+                }
+            }
+        }
+        
+        /// <throws>  AlreadyClosedException if this FieldsReader is closed </throws>
+        internal void  EnsureOpen()
+        {
+            if (closed)
+            {
+                throw new AlreadyClosedException("this FieldsReader is closed");
+            }
+        }
+        
+        /// <summary> Closes the underlying <see cref="Lucene.Net.Store.IndexInput" /> streams, including any ones associated with a
+        /// lazy implementation of a Field.  This means that the Fields values will not be accessible.
+        /// 
+        /// </summary>
+        /// <throws>  IOException </throws>
         public void Dispose()
         {
             // Move to protected method if class becomes unsealed
@@ -200,166 +200,166 @@ namespace Lucene.Net.Index
                 closed = true;
             }
         }
-		
-		public /*internal*/ int Size()
-		{
-			return size;
-		}
-		
-		private void  SeekIndex(int docID)
-		{
-			indexStream.Seek(formatSize + (docID + docStoreOffset) * 8L);
-		}
-		
-		internal bool CanReadRawDocs()
+        
+        public /*internal*/ int Size()
+        {
+            return size;
+        }
+        
+        private void  SeekIndex(int docID)
+        {
+            indexStream.Seek(formatSize + (docID + docStoreOffset) * 8L);
+        }
+        
+        internal bool CanReadRawDocs()
         {
             // Disable reading raw docs in 2.x format, because of the removal of compressed
             // fields in 3.0. We don't want rawDocs() to decode field bits to figure out
             // if a field was compressed, hence we enforce ordinary (non-raw) stored field merges
             // for <3.0 indexes.
-			return format >= FieldsWriter.FORMAT_LUCENE_3_0_NO_COMPRESSED_FIELDS;
-		}
-		
-		public /*internal*/ Document Doc(int n, FieldSelector fieldSelector)
-		{
-			SeekIndex(n);
-			long position = indexStream.ReadLong();
-			fieldsStream.Seek(position);
-			
-			var doc = new Document();
-			int numFields = fieldsStream.ReadVInt();
-			for (int i = 0; i < numFields; i++)
-			{
-				int fieldNumber = fieldsStream.ReadVInt();
-				FieldInfo fi = fieldInfos.FieldInfo(fieldNumber);
-				FieldSelectorResult acceptField = fieldSelector == null?FieldSelectorResult.LOAD:fieldSelector.Accept(fi.name);
-				
-				byte bits = fieldsStream.ReadByte();
-				System.Diagnostics.Debug.Assert(bits <= FieldsWriter.FIELD_IS_COMPRESSED + FieldsWriter.FIELD_IS_TOKENIZED + FieldsWriter.FIELD_IS_BINARY);
-				
-				bool compressed = (bits & FieldsWriter.FIELD_IS_COMPRESSED) != 0;
-			    System.Diagnostics.Debug.Assert(
-			        (!compressed || (format < FieldsWriter.FORMAT_LUCENE_3_0_NO_COMPRESSED_FIELDS)),
-			        "compressed fields are only allowed in indexes of version <= 2.9");
-				bool tokenize = (bits & FieldsWriter.FIELD_IS_TOKENIZED) != 0;
-				bool binary = (bits & FieldsWriter.FIELD_IS_BINARY) != 0;
-				//TODO: Find an alternative approach here if this list continues to grow beyond the
-				//list of 5 or 6 currently here.  See Lucene 762 for discussion
-				if (acceptField.Equals(FieldSelectorResult.LOAD))
-				{
-					AddField(doc, fi, binary, compressed, tokenize);
-				}
-				else if (acceptField.Equals(FieldSelectorResult.LOAD_AND_BREAK))
-				{
-					AddField(doc, fi, binary, compressed, tokenize);
-					break; //Get out of this loop
-				}
-				else if (acceptField.Equals(FieldSelectorResult.LAZY_LOAD))
-				{
-					AddFieldLazy(doc, fi, binary, compressed, tokenize);
-				}
-				else if (acceptField.Equals(FieldSelectorResult.SIZE))
-				{
-					SkipField(binary, compressed, AddFieldSize(doc, fi, binary, compressed));
-				}
-				else if (acceptField.Equals(FieldSelectorResult.SIZE_AND_BREAK))
-				{
-					AddFieldSize(doc, fi, binary, compressed);
-					break;
-				}
-				else
-				{
-					SkipField(binary, compressed);
-				}
-			}
-			
-			return doc;
-		}
-		
-		/// <summary>Returns the length in bytes of each raw document in a
-		/// contiguous range of length numDocs starting with
-		/// startDocID.  Returns the IndexInput (the fieldStream),
-		/// already seeked to the starting point for startDocID.
-		/// </summary>
-		internal IndexInput RawDocs(int[] lengths, int startDocID, int numDocs)
-		{
-			SeekIndex(startDocID);
-			long startOffset = indexStream.ReadLong();
-			long lastOffset = startOffset;
-			int count = 0;
-			while (count < numDocs)
-			{
-				long offset;
-				int docID = docStoreOffset + startDocID + count + 1;
-				System.Diagnostics.Debug.Assert(docID <= numTotalDocs);
-				if (docID < numTotalDocs)
-					offset = indexStream.ReadLong();
-				else
-					offset = fieldsStream.Length();
-				lengths[count++] = (int) (offset - lastOffset);
-				lastOffset = offset;
-			}
-			
-			fieldsStream.Seek(startOffset);
-			
-			return fieldsStream;
-		}
-		
-		/// <summary> Skip the field.  We still have to read some of the information about the field, but can skip past the actual content.
-		/// This will have the most payoff on large fields.
-		/// </summary>
-		private void  SkipField(bool binary, bool compressed)
-		{
-			SkipField(binary, compressed, fieldsStream.ReadVInt());
-		}
-		
-		private void  SkipField(bool binary, bool compressed, int toRead)
-		{
-			if (format >= FieldsWriter.FORMAT_VERSION_UTF8_LENGTH_IN_BYTES || binary || compressed)
-			{
-				fieldsStream.Seek(fieldsStream.FilePointer + toRead);
-			}
-			else
-			{
-				// We need to skip chars.  This will slow us down, but still better
-				fieldsStream.SkipChars(toRead);
-			}
-		}
-		
-		private void  AddFieldLazy(Document doc, FieldInfo fi, bool binary, bool compressed, bool tokenize)
-		{
-			if (binary)
-			{
-				int toRead = fieldsStream.ReadVInt();
-				long pointer = fieldsStream.FilePointer;
-				//was: doc.add(new Fieldable(fi.name, b, Fieldable.Store.YES));
-				doc.Add(new LazyField(this, fi.name, Field.Store.YES, toRead, pointer, binary, compressed));
+            return format >= FieldsWriter.FORMAT_LUCENE_3_0_NO_COMPRESSED_FIELDS;
+        }
+        
+        public /*internal*/ Document Doc(int n, FieldSelector fieldSelector)
+        {
+            SeekIndex(n);
+            long position = indexStream.ReadLong();
+            fieldsStream.Seek(position);
+            
+            var doc = new Document();
+            int numFields = fieldsStream.ReadVInt();
+            for (int i = 0; i < numFields; i++)
+            {
+                int fieldNumber = fieldsStream.ReadVInt();
+                FieldInfo fi = fieldInfos.FieldInfo(fieldNumber);
+                FieldSelectorResult acceptField = fieldSelector == null?FieldSelectorResult.LOAD:fieldSelector.Accept(fi.name);
+                
+                byte bits = fieldsStream.ReadByte();
+                System.Diagnostics.Debug.Assert(bits <= FieldsWriter.FIELD_IS_COMPRESSED + FieldsWriter.FIELD_IS_TOKENIZED + FieldsWriter.FIELD_IS_BINARY);
+                
+                bool compressed = (bits & FieldsWriter.FIELD_IS_COMPRESSED) != 0;
+                System.Diagnostics.Debug.Assert(
+                    (!compressed || (format < FieldsWriter.FORMAT_LUCENE_3_0_NO_COMPRESSED_FIELDS)),
+                    "compressed fields are only allowed in indexes of version <= 2.9");
+                bool tokenize = (bits & FieldsWriter.FIELD_IS_TOKENIZED) != 0;
+                bool binary = (bits & FieldsWriter.FIELD_IS_BINARY) != 0;
+                //TODO: Find an alternative approach here if this list continues to grow beyond the
+                //list of 5 or 6 currently here.  See Lucene 762 for discussion
+                if (acceptField.Equals(FieldSelectorResult.LOAD))
+                {
+                    AddField(doc, fi, binary, compressed, tokenize);
+                }
+                else if (acceptField.Equals(FieldSelectorResult.LOAD_AND_BREAK))
+                {
+                    AddField(doc, fi, binary, compressed, tokenize);
+                    break; //Get out of this loop
+                }
+                else if (acceptField.Equals(FieldSelectorResult.LAZY_LOAD))
+                {
+                    AddFieldLazy(doc, fi, binary, compressed, tokenize);
+                }
+                else if (acceptField.Equals(FieldSelectorResult.SIZE))
+                {
+                    SkipField(binary, compressed, AddFieldSize(doc, fi, binary, compressed));
+                }
+                else if (acceptField.Equals(FieldSelectorResult.SIZE_AND_BREAK))
+                {
+                    AddFieldSize(doc, fi, binary, compressed);
+                    break;
+                }
+                else
+                {
+                    SkipField(binary, compressed);
+                }
+            }
+            
+            return doc;
+        }
+        
+        /// <summary>Returns the length in bytes of each raw document in a
+        /// contiguous range of length numDocs starting with
+        /// startDocID.  Returns the IndexInput (the fieldStream),
+        /// already seeked to the starting point for startDocID.
+        /// </summary>
+        internal IndexInput RawDocs(int[] lengths, int startDocID, int numDocs)
+        {
+            SeekIndex(startDocID);
+            long startOffset = indexStream.ReadLong();
+            long lastOffset = startOffset;
+            int count = 0;
+            while (count < numDocs)
+            {
+                long offset;
+                int docID = docStoreOffset + startDocID + count + 1;
+                System.Diagnostics.Debug.Assert(docID <= numTotalDocs);
+                if (docID < numTotalDocs)
+                    offset = indexStream.ReadLong();
+                else
+                    offset = fieldsStream.Length();
+                lengths[count++] = (int) (offset - lastOffset);
+                lastOffset = offset;
+            }
+            
+            fieldsStream.Seek(startOffset);
+            
+            return fieldsStream;
+        }
+        
+        /// <summary> Skip the field.  We still have to read some of the information about the field, but can skip past the actual content.
+        /// This will have the most payoff on large fields.
+        /// </summary>
+        private void  SkipField(bool binary, bool compressed)
+        {
+            SkipField(binary, compressed, fieldsStream.ReadVInt());
+        }
+        
+        private void  SkipField(bool binary, bool compressed, int toRead)
+        {
+            if (format >= FieldsWriter.FORMAT_VERSION_UTF8_LENGTH_IN_BYTES || binary || compressed)
+            {
+                fieldsStream.Seek(fieldsStream.FilePointer + toRead);
+            }
+            else
+            {
+                // We need to skip chars.  This will slow us down, but still better
+                fieldsStream.SkipChars(toRead);
+            }
+        }
+        
+        private void  AddFieldLazy(Document doc, FieldInfo fi, bool binary, bool compressed, bool tokenize)
+        {
+            if (binary)
+            {
+                int toRead = fieldsStream.ReadVInt();
+                long pointer = fieldsStream.FilePointer;
+                //was: doc.add(new Fieldable(fi.name, b, Fieldable.Store.YES));
+                doc.Add(new LazyField(this, fi.name, Field.Store.YES, toRead, pointer, binary, compressed));
 
-				//Need to move the pointer ahead by toRead positions
-				fieldsStream.Seek(pointer + toRead);
-			}
-			else
-			{
-				const Field.Store store = Field.Store.YES;
-				Field.Index index = FieldExtensions.ToIndex(fi.isIndexed, tokenize);
-				Field.TermVector termVector = FieldExtensions.ToTermVector(fi.storeTermVector, fi.storeOffsetWithTermVector, fi.storePositionWithTermVector);
-				
-				AbstractField f;
-				if (compressed)
-				{
-					int toRead = fieldsStream.ReadVInt();
-					long pointer = fieldsStream.FilePointer;
-					f = new LazyField(this, fi.name, store, toRead, pointer, binary, compressed);
-					//skip over the part that we aren't loading
-					fieldsStream.Seek(pointer + toRead);
-					f.OmitNorms = fi.omitNorms;
-					f.OmitTermFreqAndPositions = fi.omitTermFreqAndPositions;
-				}
-				else
-				{
-					int length = fieldsStream.ReadVInt();
-					long pointer = fieldsStream.FilePointer;
-					//Skip ahead of where we are by the length of what is stored
+                //Need to move the pointer ahead by toRead positions
+                fieldsStream.Seek(pointer + toRead);
+            }
+            else
+            {
+                const Field.Store store = Field.Store.YES;
+                Field.Index index = FieldExtensions.ToIndex(fi.isIndexed, tokenize);
+                Field.TermVector termVector = FieldExtensions.ToTermVector(fi.storeTermVector, fi.storeOffsetWithTermVector, fi.storePositionWithTermVector);
+                
+                AbstractField f;
+                if (compressed)
+                {
+                    int toRead = fieldsStream.ReadVInt();
+                    long pointer = fieldsStream.FilePointer;
+                    f = new LazyField(this, fi.name, store, toRead, pointer, binary, compressed);
+                    //skip over the part that we aren't loading
+                    fieldsStream.Seek(pointer + toRead);
+                    f.OmitNorms = fi.omitNorms;
+                    f.OmitTermFreqAndPositions = fi.omitTermFreqAndPositions;
+                }
+                else
+                {
+                    int length = fieldsStream.ReadVInt();
+                    long pointer = fieldsStream.FilePointer;
+                    //Skip ahead of where we are by the length of what is stored
                     if (format >= FieldsWriter.FORMAT_VERSION_UTF8_LENGTH_IN_BYTES)
                     {
                         fieldsStream.Seek(pointer + length);
@@ -368,274 +368,274 @@ namespace Lucene.Net.Index
                     {
                         fieldsStream.SkipChars(length);
                     }
-					f = new LazyField(this, fi.name, store, index, termVector, length, pointer, binary, compressed)
-					    	{OmitNorms = fi.omitNorms, OmitTermFreqAndPositions = fi.omitTermFreqAndPositions};
-				}
+                    f = new LazyField(this, fi.name, store, index, termVector, length, pointer, binary, compressed)
+                            {OmitNorms = fi.omitNorms, OmitTermFreqAndPositions = fi.omitTermFreqAndPositions};
+                }
 
-				doc.Add(f);
-			}
-		}
+                doc.Add(f);
+            }
+        }
 
-		private void AddField(Document doc, FieldInfo fi, bool binary, bool compressed, bool tokenize)
-		{
-			//we have a binary stored field, and it may be compressed
-			if (binary)
-			{
-				int toRead = fieldsStream.ReadVInt();
-				var b = new byte[toRead];
-				fieldsStream.ReadBytes(b, 0, b.Length);
-				doc.Add(compressed ? new Field(fi.name, Uncompress(b), Field.Store.YES) : new Field(fi.name, b, Field.Store.YES));
-			}
-			else
-			{
-				const Field.Store store = Field.Store.YES;
-				Field.Index index = FieldExtensions.ToIndex(fi.isIndexed, tokenize);
-				Field.TermVector termVector = FieldExtensions.ToTermVector(fi.storeTermVector, fi.storeOffsetWithTermVector, fi.storePositionWithTermVector);
-				
-				AbstractField f;
-				if (compressed)
-				{
-					int toRead = fieldsStream.ReadVInt();
-					
-					var b = new byte[toRead];
-					fieldsStream.ReadBytes(b, 0, b.Length);
-					f = new Field(fi.name, false, System.Text.Encoding.GetEncoding("UTF-8").GetString(Uncompress(b)), store, index,
-					              termVector) {OmitTermFreqAndPositions = fi.omitTermFreqAndPositions, OmitNorms = fi.omitNorms};
-				}
-				else
-				{
-					f = new Field(fi.name, false, fieldsStream.ReadString(), store, index, termVector)
-					    	{OmitTermFreqAndPositions = fi.omitTermFreqAndPositions, OmitNorms = fi.omitNorms};
-				}
+        private void AddField(Document doc, FieldInfo fi, bool binary, bool compressed, bool tokenize)
+        {
+            //we have a binary stored field, and it may be compressed
+            if (binary)
+            {
+                int toRead = fieldsStream.ReadVInt();
+                var b = new byte[toRead];
+                fieldsStream.ReadBytes(b, 0, b.Length);
+                doc.Add(compressed ? new Field(fi.name, Uncompress(b), Field.Store.YES) : new Field(fi.name, b, Field.Store.YES));
+            }
+            else
+            {
+                const Field.Store store = Field.Store.YES;
+                Field.Index index = FieldExtensions.ToIndex(fi.isIndexed, tokenize);
+                Field.TermVector termVector = FieldExtensions.ToTermVector(fi.storeTermVector, fi.storeOffsetWithTermVector, fi.storePositionWithTermVector);
+                
+                AbstractField f;
+                if (compressed)
+                {
+                    int toRead = fieldsStream.ReadVInt();
+                    
+                    var b = new byte[toRead];
+                    fieldsStream.ReadBytes(b, 0, b.Length);
+                    f = new Field(fi.name, false, System.Text.Encoding.GetEncoding("UTF-8").GetString(Uncompress(b)), store, index,
+                                  termVector) {OmitTermFreqAndPositions = fi.omitTermFreqAndPositions, OmitNorms = fi.omitNorms};
+                }
+                else
+                {
+                    f = new Field(fi.name, false, fieldsStream.ReadString(), store, index, termVector)
+                            {OmitTermFreqAndPositions = fi.omitTermFreqAndPositions, OmitNorms = fi.omitNorms};
+                }
 
-				doc.Add(f);
-			}
-		}
-		
-		// Add the size of field as a byte[] containing the 4 bytes of the integer byte size (high order byte first; char = 2 bytes)
-		// Read just the size -- caller must skip the field content to continue reading fields
-		// Return the size in bytes or chars, depending on field type
-		private int AddFieldSize(Document doc, FieldInfo fi, bool binary, bool compressed)
-		{
-			int size = fieldsStream.ReadVInt(), bytesize = binary || compressed?size:2 * size;
-			var sizebytes = new byte[4];
-			sizebytes[0] = (byte) (Number.URShift(bytesize, 24));
-			sizebytes[1] = (byte) (Number.URShift(bytesize, 16));
-			sizebytes[2] = (byte) (Number.URShift(bytesize, 8));
-			sizebytes[3] = (byte) bytesize;
-			doc.Add(new Field(fi.name, sizebytes, Field.Store.YES));
-			return size;
-		}
-		
-		/// <summary> A Lazy implementation of Fieldable that differs loading of fields until asked for, instead of when the Document is
-		/// loaded.
-		/// </summary>
-		[Serializable]
-		private sealed class LazyField : AbstractField
-		{
-			private void  InitBlock(FieldsReader enclosingInstance)
-			{
-				this.Enclosing_Instance = enclosingInstance;
-			}
+                doc.Add(f);
+            }
+        }
+        
+        // Add the size of field as a byte[] containing the 4 bytes of the integer byte size (high order byte first; char = 2 bytes)
+        // Read just the size -- caller must skip the field content to continue reading fields
+        // Return the size in bytes or chars, depending on field type
+        private int AddFieldSize(Document doc, FieldInfo fi, bool binary, bool compressed)
+        {
+            int size = fieldsStream.ReadVInt(), bytesize = binary || compressed?size:2 * size;
+            var sizebytes = new byte[4];
+            sizebytes[0] = (byte) (Number.URShift(bytesize, 24));
+            sizebytes[1] = (byte) (Number.URShift(bytesize, 16));
+            sizebytes[2] = (byte) (Number.URShift(bytesize, 8));
+            sizebytes[3] = (byte) bytesize;
+            doc.Add(new Field(fi.name, sizebytes, Field.Store.YES));
+            return size;
+        }
+        
+        /// <summary> A Lazy implementation of Fieldable that differs loading of fields until asked for, instead of when the Document is
+        /// loaded.
+        /// </summary>
+        [Serializable]
+        private sealed class LazyField : AbstractField
+        {
+            private void  InitBlock(FieldsReader enclosingInstance)
+            {
+                this.Enclosing_Instance = enclosingInstance;
+            }
 
-			private FieldsReader Enclosing_Instance { get; set; }
+            private FieldsReader Enclosing_Instance { get; set; }
 
-			private int toRead;
-			private long pointer;
+            private int toRead;
+            private long pointer;
             [Obsolete("Only kept for backward-compatbility with <3.0 indexes. Will be removed in 4.0.")]
-		    private readonly Boolean isCompressed;
-			
-			public LazyField(FieldsReader enclosingInstance, System.String name, Field.Store store, int toRead, long pointer, bool isBinary, bool isCompressed):base(name, store, Field.Index.NO, Field.TermVector.NO)
-			{
-				InitBlock(enclosingInstance);
-				this.toRead = toRead;
-				this.pointer = pointer;
-				this.internalIsBinary = isBinary;
-				if (isBinary)
-					internalBinaryLength = toRead;
-				lazy = true;
-			    this.isCompressed = isCompressed;
-			}
-			
-			public LazyField(FieldsReader enclosingInstance, System.String name, Field.Store store, Field.Index index, Field.TermVector termVector, int toRead, long pointer, bool isBinary, bool isCompressed):base(name, store, index, termVector)
-			{
-				InitBlock(enclosingInstance);
-				this.toRead = toRead;
-				this.pointer = pointer;
-				this.internalIsBinary = isBinary;
-				if (isBinary)
-					internalBinaryLength = toRead;
-				lazy = true;
-			    this.isCompressed = isCompressed;
-			}
-			
-			private IndexInput GetFieldStream()
-			{
-				IndexInput localFieldsStream = Enclosing_Instance.fieldsStreamTL.Get();
-				if (localFieldsStream == null)
-				{
-					localFieldsStream = (IndexInput) Enclosing_Instance.cloneableFieldsStream.Clone();
-					Enclosing_Instance.fieldsStreamTL.Set(localFieldsStream);
-				}
-				return localFieldsStream;
-			}
+            private readonly Boolean isCompressed;
+            
+            public LazyField(FieldsReader enclosingInstance, System.String name, Field.Store store, int toRead, long pointer, bool isBinary, bool isCompressed):base(name, store, Field.Index.NO, Field.TermVector.NO)
+            {
+                InitBlock(enclosingInstance);
+                this.toRead = toRead;
+                this.pointer = pointer;
+                this.internalIsBinary = isBinary;
+                if (isBinary)
+                    internalBinaryLength = toRead;
+                lazy = true;
+                this.isCompressed = isCompressed;
+            }
+            
+            public LazyField(FieldsReader enclosingInstance, System.String name, Field.Store store, Field.Index index, Field.TermVector termVector, int toRead, long pointer, bool isBinary, bool isCompressed):base(name, store, index, termVector)
+            {
+                InitBlock(enclosingInstance);
+                this.toRead = toRead;
+                this.pointer = pointer;
+                this.internalIsBinary = isBinary;
+                if (isBinary)
+                    internalBinaryLength = toRead;
+                lazy = true;
+                this.isCompressed = isCompressed;
+            }
+            
+            private IndexInput GetFieldStream()
+            {
+                IndexInput localFieldsStream = Enclosing_Instance.fieldsStreamTL.Get();
+                if (localFieldsStream == null)
+                {
+                    localFieldsStream = (IndexInput) Enclosing_Instance.cloneableFieldsStream.Clone();
+                    Enclosing_Instance.fieldsStreamTL.Set(localFieldsStream);
+                }
+                return localFieldsStream;
+            }
 
-		    /// <summary>The value of the field as a Reader, or null.  If null, the String value,
-		    /// binary value, or TokenStream value is used.  Exactly one of StringValue(), 
-		    /// ReaderValue(), GetBinaryValue(), and TokenStreamValue() must be set. 
-		    /// </summary>
-		    public override TextReader ReaderValue
-		    {
-		        get
-		        {
-		            Enclosing_Instance.EnsureOpen();
-		            return null;
-		        }
-		    }
+            /// <summary>The value of the field as a Reader, or null.  If null, the String value,
+            /// binary value, or TokenStream value is used.  Exactly one of StringValue(), 
+            /// ReaderValue(), GetBinaryValue(), and TokenStreamValue() must be set. 
+            /// </summary>
+            public override TextReader ReaderValue
+            {
+                get
+                {
+                    Enclosing_Instance.EnsureOpen();
+                    return null;
+                }
+            }
 
-		    /// <summary>The value of the field as a TokenStream, or null.  If null, the Reader value,
-		    /// String value, or binary value is used. Exactly one of StringValue(), 
-		    /// ReaderValue(), GetBinaryValue(), and TokenStreamValue() must be set. 
-		    /// </summary>
-		    public override TokenStream TokenStreamValue
-		    {
-		        get
-		        {
-		            Enclosing_Instance.EnsureOpen();
-		            return null;
-		        }
-		    }
+            /// <summary>The value of the field as a TokenStream, or null.  If null, the Reader value,
+            /// String value, or binary value is used. Exactly one of StringValue(), 
+            /// ReaderValue(), GetBinaryValue(), and TokenStreamValue() must be set. 
+            /// </summary>
+            public override TokenStream TokenStreamValue
+            {
+                get
+                {
+                    Enclosing_Instance.EnsureOpen();
+                    return null;
+                }
+            }
 
-		    /// <summary>The value of the field as a String, or null.  If null, the Reader value,
-		    /// binary value, or TokenStream value is used.  Exactly one of StringValue(), 
-		    /// ReaderValue(), GetBinaryValue(), and TokenStreamValue() must be set. 
-		    /// </summary>
-		    public override string StringValue
-		    {
-		        get
-		        {
-		            Enclosing_Instance.EnsureOpen();
-		            if (internalIsBinary)
-		                return null;
+            /// <summary>The value of the field as a String, or null.  If null, the Reader value,
+            /// binary value, or TokenStream value is used.  Exactly one of StringValue(), 
+            /// ReaderValue(), GetBinaryValue(), and TokenStreamValue() must be set. 
+            /// </summary>
+            public override string StringValue
+            {
+                get
+                {
+                    Enclosing_Instance.EnsureOpen();
+                    if (internalIsBinary)
+                        return null;
 
-		        	if (fieldsData == null)
-		        	{
-		        		IndexInput localFieldsStream = GetFieldStream();
-		        		try
-		        		{
-		        			localFieldsStream.Seek(pointer);
-		        			if (isCompressed)
-		        			{
-		        				var b = new byte[toRead];
-		        				localFieldsStream.ReadBytes(b, 0, b.Length);
-		        				fieldsData =
-		        					System.Text.Encoding.GetEncoding("UTF-8").GetString(Enclosing_Instance.Uncompress(b));
-		        			}
-		        			else
-		        			{
-		        				if (Enclosing_Instance.format >= FieldsWriter.FORMAT_VERSION_UTF8_LENGTH_IN_BYTES)
-		        				{
-		        					var bytes = new byte[toRead];
-		        					localFieldsStream.ReadBytes(bytes, 0, toRead);
-		        					fieldsData = System.Text.Encoding.GetEncoding("UTF-8").GetString(bytes);
-		        				}
-		        				else
-		        				{
-		        					//read in chars b/c we already know the length we need to read
-		        					var chars = new char[toRead];
-		        					localFieldsStream.ReadChars(chars, 0, toRead);
-		        					fieldsData = new System.String(chars);
-		        				}
-		        			}
-		        		}
-		        		catch (System.IO.IOException e)
-		        		{
-		        			throw new FieldReaderException(e);
-		        		}
-		        	}
-		        	return (System.String) fieldsData;
-		        }
-		    }
+                    if (fieldsData == null)
+                    {
+                        IndexInput localFieldsStream = GetFieldStream();
+                        try
+                        {
+                            localFieldsStream.Seek(pointer);
+                            if (isCompressed)
+                            {
+                                var b = new byte[toRead];
+                                localFieldsStream.ReadBytes(b, 0, b.Length);
+                                fieldsData =
+                                    System.Text.Encoding.GetEncoding("UTF-8").GetString(Enclosing_Instance.Uncompress(b));
+                            }
+                            else
+                            {
+                                if (Enclosing_Instance.format >= FieldsWriter.FORMAT_VERSION_UTF8_LENGTH_IN_BYTES)
+                                {
+                                    var bytes = new byte[toRead];
+                                    localFieldsStream.ReadBytes(bytes, 0, toRead);
+                                    fieldsData = System.Text.Encoding.GetEncoding("UTF-8").GetString(bytes);
+                                }
+                                else
+                                {
+                                    //read in chars b/c we already know the length we need to read
+                                    var chars = new char[toRead];
+                                    localFieldsStream.ReadChars(chars, 0, toRead);
+                                    fieldsData = new System.String(chars);
+                                }
+                            }
+                        }
+                        catch (System.IO.IOException e)
+                        {
+                            throw new FieldReaderException(e);
+                        }
+                    }
+                    return (System.String) fieldsData;
+                }
+            }
 
-		    public long Pointer
-		    {
-		        get
-		        {
-		            Enclosing_Instance.EnsureOpen();
-		            return pointer;
-		        }
-		        set
-		        {
-		            Enclosing_Instance.EnsureOpen();
-		            this.pointer = value;
-		        }
-		    }
+            public long Pointer
+            {
+                get
+                {
+                    Enclosing_Instance.EnsureOpen();
+                    return pointer;
+                }
+                set
+                {
+                    Enclosing_Instance.EnsureOpen();
+                    this.pointer = value;
+                }
+            }
 
-		    public int ToRead
-		    {
-		        get
-		        {
-		            Enclosing_Instance.EnsureOpen();
-		            return toRead;
-		        }
-		        set
-		        {
-		            Enclosing_Instance.EnsureOpen();
-		            this.toRead = value;
-		        }
-		    }
+            public int ToRead
+            {
+                get
+                {
+                    Enclosing_Instance.EnsureOpen();
+                    return toRead;
+                }
+                set
+                {
+                    Enclosing_Instance.EnsureOpen();
+                    this.toRead = value;
+                }
+            }
 
-		    public override byte[] GetBinaryValue(byte[] result)
-			{
-				Enclosing_Instance.EnsureOpen();
-				
-				if (internalIsBinary)
-				{
-					if (fieldsData == null)
-					{
-						// Allocate new buffer if result is null or too small
-						byte[] b;
-						if (result == null || result.Length < toRead)
-							b = new byte[toRead];
-						else
-							b = result;
-						
-						IndexInput localFieldsStream = GetFieldStream();
-						
-						// Throw this IOException since IndexReader.document does so anyway, so probably not that big of a change for people
-						// since they are already handling this exception when getting the document
-						try
-						{
-							localFieldsStream.Seek(pointer);
-							localFieldsStream.ReadBytes(b, 0, toRead);
-							fieldsData = isCompressed ? Enclosing_Instance.Uncompress(b) : b;
-						}
-						catch (IOException e)
-						{
-							throw new FieldReaderException(e);
-						}
-						
-						internalbinaryOffset = 0;
-						internalBinaryLength = toRead;
-					}
-					
-					return (byte[]) fieldsData;
-				}
-		    	return null;
-			}
-		}
-		
-		private byte[] Uncompress(byte[] b)
-		{
-			try
-			{
-				return CompressionTools.Decompress(b);
-			}
-			catch (Exception e)
-			{
-				// this will happen if the field is not compressed
-				throw new CorruptIndexException("field data are in wrong format: " + e, e);
-			}
-		}
-	}
+            public override byte[] GetBinaryValue(byte[] result)
+            {
+                Enclosing_Instance.EnsureOpen();
+                
+                if (internalIsBinary)
+                {
+                    if (fieldsData == null)
+                    {
+                        // Allocate new buffer if result is null or too small
+                        byte[] b;
+                        if (result == null || result.Length < toRead)
+                            b = new byte[toRead];
+                        else
+                            b = result;
+                        
+                        IndexInput localFieldsStream = GetFieldStream();
+                        
+                        // Throw this IOException since IndexReader.document does so anyway, so probably not that big of a change for people
+                        // since they are already handling this exception when getting the document
+                        try
+                        {
+                            localFieldsStream.Seek(pointer);
+                            localFieldsStream.ReadBytes(b, 0, toRead);
+                            fieldsData = isCompressed ? Enclosing_Instance.Uncompress(b) : b;
+                        }
+                        catch (IOException e)
+                        {
+                            throw new FieldReaderException(e);
+                        }
+                        
+                        internalbinaryOffset = 0;
+                        internalBinaryLength = toRead;
+                    }
+                    
+                    return (byte[]) fieldsData;
+                }
+                return null;
+            }
+        }
+        
+        private byte[] Uncompress(byte[] b)
+        {
+            try
+            {
+                return CompressionTools.Decompress(b);
+            }
+            catch (Exception e)
+            {
+                // this will happen if the field is not compressed
+                throw new CorruptIndexException("field data are in wrong format: " + e, e);
+            }
+        }
+    }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/62f018ab/src/core/Index/FieldsWriter.cs
----------------------------------------------------------------------
diff --git a/src/core/Index/FieldsWriter.cs b/src/core/Index/FieldsWriter.cs
index 9244195..d34a662 100644
--- a/src/core/Index/FieldsWriter.cs
+++ b/src/core/Index/FieldsWriter.cs
@@ -26,265 +26,265 @@ using RAMOutputStream = Lucene.Net.Store.RAMOutputStream;
 
 namespace Lucene.Net.Index
 {
-	
-	sealed class FieldsWriter : IDisposable
-	{
-		internal const byte FIELD_IS_TOKENIZED = (0x1);
-		internal const byte FIELD_IS_BINARY = (0x2);
+    
+    sealed class FieldsWriter : IDisposable
+    {
+        internal const byte FIELD_IS_TOKENIZED = (0x1);
+        internal const byte FIELD_IS_BINARY = (0x2);
         [Obsolete("Kept for backwards-compatibility with <3.0 indexes; will be removed in 4.0")]
-		internal const byte FIELD_IS_COMPRESSED = (0x4);
-		
-		// Original format
-		internal const int FORMAT = 0;
-		
-		// Changed strings to UTF8
-		internal const int FORMAT_VERSION_UTF8_LENGTH_IN_BYTES = 1;
+        internal const byte FIELD_IS_COMPRESSED = (0x4);
+        
+        // Original format
+        internal const int FORMAT = 0;
+        
+        // Changed strings to UTF8
+        internal const int FORMAT_VERSION_UTF8_LENGTH_IN_BYTES = 1;
                  
         // Lucene 3.0: Removal of compressed fields
         internal static int FORMAT_LUCENE_3_0_NO_COMPRESSED_FIELDS = 2;
-		
-		// NOTE: if you introduce a new format, make it 1 higher
-		// than the current one, and always change this if you
-		// switch to a new format!
+        
+        // NOTE: if you introduce a new format, make it 1 higher
+        // than the current one, and always change this if you
+        // switch to a new format!
         internal static readonly int FORMAT_CURRENT = FORMAT_LUCENE_3_0_NO_COMPRESSED_FIELDS;
-		
-		private readonly FieldInfos fieldInfos;
-		
-		private IndexOutput fieldsStream;
-		
-		private IndexOutput indexStream;
-		
-		private readonly bool doClose;
-		
-		internal FieldsWriter(Directory d, System.String segment, FieldInfos fn)
-		{
-			fieldInfos = fn;
-			
-			bool success = false;
-			String fieldsName = segment + "." + IndexFileNames.FIELDS_EXTENSION;
-			try
-			{
-				fieldsStream = d.CreateOutput(fieldsName);
-				fieldsStream.WriteInt(FORMAT_CURRENT);
-				success = true;
-			}
-			finally
-			{
-				if (!success)
-				{
-					try
-					{
-						Dispose();
-					}
-					catch (System.Exception)
-					{
-						// Suppress so we keep throwing the original exception
-					}
-					try
-					{
-						d.DeleteFile(fieldsName);
-					}
-					catch (System.Exception)
-					{
-						// Suppress so we keep throwing the original exception
-					}
-				}
-			}
-			
-			success = false;
-			String indexName = segment + "." + IndexFileNames.FIELDS_INDEX_EXTENSION;
-			try
-			{
-				indexStream = d.CreateOutput(indexName);
-				indexStream.WriteInt(FORMAT_CURRENT);
-				success = true;
-			}
-			finally
-			{
-				if (!success)
-				{
-					try
-					{
-						Dispose();
-					}
-					catch (System.IO.IOException)
-					{
-					}
-					try
-					{
-						d.DeleteFile(fieldsName);
-					}
-					catch (System.Exception)
-					{
-						// Suppress so we keep throwing the original exception
-					}
-					try
-					{
-						d.DeleteFile(indexName);
-					}
-					catch (System.Exception)
-					{
-						// Suppress so we keep throwing the original exception
-					}
-				}
-			}
-			
-			doClose = true;
-		}
-		
-		internal FieldsWriter(IndexOutput fdx, IndexOutput fdt, FieldInfos fn)
-		{
-			fieldInfos = fn;
-			fieldsStream = fdt;
-			indexStream = fdx;
-			doClose = false;
-		}
-		
-		internal void  SetFieldsStream(IndexOutput stream)
-		{
-			this.fieldsStream = stream;
-		}
-		
-		// Writes the contents of buffer into the fields stream
-		// and adds a new entry for this document into the index
-		// stream.  This assumes the buffer was already written
-		// in the correct fields format.
-		internal void  FlushDocument(int numStoredFields, RAMOutputStream buffer)
-		{
-			indexStream.WriteLong(fieldsStream.FilePointer);
-			fieldsStream.WriteVInt(numStoredFields);
-			buffer.WriteTo(fieldsStream);
-		}
-		
-		internal void  SkipDocument()
-		{
-			indexStream.WriteLong(fieldsStream.FilePointer);
-			fieldsStream.WriteVInt(0);
-		}
-		
-		internal void  Flush()
-		{
-			indexStream.Flush();
-			fieldsStream.Flush();
-		}
-		
-		public void Dispose()
-		{
+        
+        private readonly FieldInfos fieldInfos;
+        
+        private IndexOutput fieldsStream;
+        
+        private IndexOutput indexStream;
+        
+        private readonly bool doClose;
+        
+        internal FieldsWriter(Directory d, System.String segment, FieldInfos fn)
+        {
+            fieldInfos = fn;
+            
+            bool success = false;
+            String fieldsName = segment + "." + IndexFileNames.FIELDS_EXTENSION;
+            try
+            {
+                fieldsStream = d.CreateOutput(fieldsName);
+                fieldsStream.WriteInt(FORMAT_CURRENT);
+                success = true;
+            }
+            finally
+            {
+                if (!success)
+                {
+                    try
+                    {
+                        Dispose();
+                    }
+                    catch (System.Exception)
+                    {
+                        // Suppress so we keep throwing the original exception
+                    }
+                    try
+                    {
+                        d.DeleteFile(fieldsName);
+                    }
+                    catch (System.Exception)
+                    {
+                        // Suppress so we keep throwing the original exception
+                    }
+                }
+            }
+            
+            success = false;
+            String indexName = segment + "." + IndexFileNames.FIELDS_INDEX_EXTENSION;
+            try
+            {
+                indexStream = d.CreateOutput(indexName);
+                indexStream.WriteInt(FORMAT_CURRENT);
+                success = true;
+            }
+            finally
+            {
+                if (!success)
+                {
+                    try
+                    {
+                        Dispose();
+                    }
+                    catch (System.IO.IOException)
+                    {
+                    }
+                    try
+                    {
+                        d.DeleteFile(fieldsName);
+                    }
+                    catch (System.Exception)
+                    {
+                        // Suppress so we keep throwing the original exception
+                    }
+                    try
+                    {
+                        d.DeleteFile(indexName);
+                    }
+                    catch (System.Exception)
+                    {
+                        // Suppress so we keep throwing the original exception
+                    }
+                }
+            }
+            
+            doClose = true;
+        }
+        
+        internal FieldsWriter(IndexOutput fdx, IndexOutput fdt, FieldInfos fn)
+        {
+            fieldInfos = fn;
+            fieldsStream = fdt;
+            indexStream = fdx;
+            doClose = false;
+        }
+        
+        internal void  SetFieldsStream(IndexOutput stream)
+        {
+            this.fieldsStream = stream;
+        }
+        
+        // Writes the contents of buffer into the fields stream
+        // and adds a new entry for this document into the index
+        // stream.  This assumes the buffer was already written
+        // in the correct fields format.
+        internal void  FlushDocument(int numStoredFields, RAMOutputStream buffer)
+        {
+            indexStream.WriteLong(fieldsStream.FilePointer);
+            fieldsStream.WriteVInt(numStoredFields);
+            buffer.WriteTo(fieldsStream);
+        }
+        
+        internal void  SkipDocument()
+        {
+            indexStream.WriteLong(fieldsStream.FilePointer);
+            fieldsStream.WriteVInt(0);
+        }
+        
+        internal void  Flush()
+        {
+            indexStream.Flush();
+            fieldsStream.Flush();
+        }
+        
+        public void Dispose()
+        {
             // Move to protected method if class becomes unsealed
-			if (doClose)
-			{
-				try
-				{
-					if (fieldsStream != null)
-					{
-						try
-						{
-							fieldsStream.Close();
-						}
-						finally
-						{
-							fieldsStream = null;
-						}
-					}
-				}
-				catch (System.IO.IOException)
-				{
-					try
-					{
-						if (indexStream != null)
-						{
-							try
-							{
-								indexStream.Close();
-							}
-							finally
-							{
-								indexStream = null;
-							}
-						}
-					}
-					catch (System.IO.IOException)
-					{
-						// Ignore so we throw only first IOException hit
-					}
-					throw;
-				}
-				finally
-				{
-					if (indexStream != null)
-					{
-						try
-						{
-							indexStream.Close();
-						}
-						finally
-						{
-							indexStream = null;
-						}
-					}
-				}
-			}
-		}
-		
-		internal void  WriteField(FieldInfo fi, IFieldable field)
-		{
-			fieldsStream.WriteVInt(fi.number);
-			byte bits = 0;
-			if (field.IsTokenized)
-				bits |= FieldsWriter.FIELD_IS_TOKENIZED;
-			if (field.IsBinary)
-				bits |= FieldsWriter.FIELD_IS_BINARY;
-			
-			fieldsStream.WriteByte(bits);
-			
-			// compression is disabled for the current field
-			if (field.IsBinary)
-			{
-				byte[] data = field.GetBinaryValue();
-				int len = field.BinaryLength;
-				int offset = field.BinaryOffset;
-					
-				fieldsStream.WriteVInt(len);
-				fieldsStream.WriteBytes(data, offset, len);
-			}
-			else
-			{
-				fieldsStream.WriteString(field.StringValue);
-			}
-		}
-		
-		/// <summary>Bulk write a contiguous series of documents.  The
-		/// lengths array is the length (in bytes) of each raw
-		/// document.  The stream IndexInput is the
-		/// fieldsStream from which we should bulk-copy all
-		/// bytes. 
-		/// </summary>
-		internal void  AddRawDocuments(IndexInput stream, int[] lengths, int numDocs)
-		{
-			long position = fieldsStream.FilePointer;
-			long start = position;
-			for (int i = 0; i < numDocs; i++)
-			{
-				indexStream.WriteLong(position);
-				position += lengths[i];
-			}
-			fieldsStream.CopyBytes(stream, position - start);
-			System.Diagnostics.Debug.Assert(fieldsStream.FilePointer == position);
-		}
-		
-		internal void  AddDocument(Document doc)
-		{
-			indexStream.WriteLong(fieldsStream.FilePointer);
+            if (doClose)
+            {
+                try
+                {
+                    if (fieldsStream != null)
+                    {
+                        try
+                        {
+                            fieldsStream.Close();
+                        }
+                        finally
+                        {
+                            fieldsStream = null;
+                        }
+                    }
+                }
+                catch (System.IO.IOException)
+                {
+                    try
+                    {
+                        if (indexStream != null)
+                        {
+                            try
+                            {
+                                indexStream.Close();
+                            }
+                            finally
+                            {
+                                indexStream = null;
+                            }
+                        }
+                    }
+                    catch (System.IO.IOException)
+                    {
+                        // Ignore so we throw only first IOException hit
+                    }
+                    throw;
+                }
+                finally
+                {
+                    if (indexStream != null)
+                    {
+                        try
+                        {
+                            indexStream.Close();
+                        }
+                        finally
+                        {
+                            indexStream = null;
+                        }
+                    }
+                }
+            }
+        }
+        
+        internal void  WriteField(FieldInfo fi, IFieldable field)
+        {
+            fieldsStream.WriteVInt(fi.number);
+            byte bits = 0;
+            if (field.IsTokenized)
+                bits |= FieldsWriter.FIELD_IS_TOKENIZED;
+            if (field.IsBinary)
+                bits |= FieldsWriter.FIELD_IS_BINARY;
+            
+            fieldsStream.WriteByte(bits);
+            
+            // compression is disabled for the current field
+            if (field.IsBinary)
+            {
+                byte[] data = field.GetBinaryValue();
+                int len = field.BinaryLength;
+                int offset = field.BinaryOffset;
+                    
+                fieldsStream.WriteVInt(len);
+                fieldsStream.WriteBytes(data, offset, len);
+            }
+            else
+            {
+                fieldsStream.WriteString(field.StringValue);
+            }
+        }
+        
+        /// <summary>Bulk write a contiguous series of documents.  The
+        /// lengths array is the length (in bytes) of each raw
+        /// document.  The stream IndexInput is the
+        /// fieldsStream from which we should bulk-copy all
+        /// bytes. 
+        /// </summary>
+        internal void  AddRawDocuments(IndexInput stream, int[] lengths, int numDocs)
+        {
+            long position = fieldsStream.FilePointer;
+            long start = position;
+            for (int i = 0; i < numDocs; i++)
+            {
+                indexStream.WriteLong(position);
+                position += lengths[i];
+            }
+            fieldsStream.CopyBytes(stream, position - start);
+            System.Diagnostics.Debug.Assert(fieldsStream.FilePointer == position);
+        }
+        
+        internal void  AddDocument(Document doc)
+        {
+            indexStream.WriteLong(fieldsStream.FilePointer);
 
-			System.Collections.Generic.IList<IFieldable> fields = doc.GetFields();
-			int storedCount = fields.Count(field => field.IsStored);
-			fieldsStream.WriteVInt(storedCount);
-			
-			foreach(IFieldable field in fields)
-			{
-				if (field.IsStored)
-					WriteField(fieldInfos.FieldInfo(field.Name), field);
-			}
-		}
-	}
+            System.Collections.Generic.IList<IFieldable> fields = doc.GetFields();
+            int storedCount = fields.Count(field => field.IsStored);
+            fieldsStream.WriteVInt(storedCount);
+            
+            foreach(IFieldable field in fields)
+            {
+                if (field.IsStored)
+                    WriteField(fieldInfos.FieldInfo(field.Name), field);
+            }
+        }
+    }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/62f018ab/src/core/Index/FilterIndexReader.cs
----------------------------------------------------------------------
diff --git a/src/core/Index/FilterIndexReader.cs b/src/core/Index/FilterIndexReader.cs
index dc61613..ced4220 100644
--- a/src/core/Index/FilterIndexReader.cs
+++ b/src/core/Index/FilterIndexReader.cs
@@ -23,37 +23,37 @@ using Directory = Lucene.Net.Store.Directory;
 
 namespace Lucene.Net.Index
 {
-	
-	/// <summary>A <c>FilterIndexReader</c> contains another IndexReader, which it
-	/// uses as its basic source of data, possibly transforming the data along the
-	/// way or providing additional functionality. The class
-	/// <c>FilterIndexReader</c> itself simply implements all abstract methods
-	/// of <c>IndexReader</c> with versions that pass all requests to the
-	/// contained index reader. Subclasses of <c>FilterIndexReader</c> may
-	/// further override some of these methods and may also provide additional
-	/// methods and fields.
-	/// </summary>
-	public class FilterIndexReader:IndexReader
-	{
+    
+    /// <summary>A <c>FilterIndexReader</c> contains another IndexReader, which it
+    /// uses as its basic source of data, possibly transforming the data along the
+    /// way or providing additional functionality. The class
+    /// <c>FilterIndexReader</c> itself simply implements all abstract methods
+    /// of <c>IndexReader</c> with versions that pass all requests to the
+    /// contained index reader. Subclasses of <c>FilterIndexReader</c> may
+    /// further override some of these methods and may also provide additional
+    /// methods and fields.
+    /// </summary>
+    public class FilterIndexReader:IndexReader
+    {
 
         /// <summary>Base class for filtering <see cref="Lucene.Net.Index.TermDocs" /> implementations. </summary>
-		public class FilterTermDocs : TermDocs
-		{
-			protected internal TermDocs in_Renamed;
-			
-			public FilterTermDocs(TermDocs in_Renamed)
-			{
-				this.in_Renamed = in_Renamed;
-			}
-			
-			public virtual void  Seek(Term term)
-			{
-				in_Renamed.Seek(term);
-			}
-			public virtual void  Seek(TermEnum termEnum)
-			{
-				in_Renamed.Seek(termEnum);
-			}
+        public class FilterTermDocs : TermDocs
+        {
+            protected internal TermDocs in_Renamed;
+            
+            public FilterTermDocs(TermDocs in_Renamed)
+            {
+                this.in_Renamed = in_Renamed;
+            }
+            
+            public virtual void  Seek(Term term)
+            {
+                in_Renamed.Seek(term);
+            }
+            public virtual void  Seek(TermEnum termEnum)
+            {
+                in_Renamed.Seek(termEnum);
+            }
 
             public virtual int Doc
             {
@@ -66,22 +66,22 @@ namespace Lucene.Net.Index
             }
 
             public virtual bool Next()
-			{
-				return in_Renamed.Next();
-			}
-			public virtual int Read(int[] docs, int[] freqs)
-			{
-				return in_Renamed.Read(docs, freqs);
-			}
-			public virtual bool SkipTo(int i)
-			{
-				return in_Renamed.SkipTo(i);
-			}
+            {
+                return in_Renamed.Next();
+            }
+            public virtual int Read(int[] docs, int[] freqs)
+            {
+                return in_Renamed.Read(docs, freqs);
+            }
+            public virtual bool SkipTo(int i)
+            {
+                return in_Renamed.SkipTo(i);
+            }
 
-			public void Close()
-			{
-				Dispose();
-			}
+            public void Close()
+            {
+                Dispose();
+            }
 
             public void Dispose()
             {
@@ -95,64 +95,64 @@ namespace Lucene.Net.Index
                     in_Renamed.Close();
                 }
             }
-		}
-		
-		/// <summary>Base class for filtering <see cref="TermPositions" /> implementations. </summary>
-		public class FilterTermPositions:FilterTermDocs, TermPositions
-		{
-			
-			public FilterTermPositions(TermPositions in_Renamed):base(in_Renamed)
-			{
-			}
-			
-			public virtual int NextPosition()
-			{
-				return ((TermPositions) this.in_Renamed).NextPosition();
-			}
+        }
+        
+        /// <summary>Base class for filtering <see cref="TermPositions" /> implementations. </summary>
+        public class FilterTermPositions:FilterTermDocs, TermPositions
+        {
+            
+            public FilterTermPositions(TermPositions in_Renamed):base(in_Renamed)
+            {
+            }
+            
+            public virtual int NextPosition()
+            {
+                return ((TermPositions) this.in_Renamed).NextPosition();
+            }
 
-		    public virtual int PayloadLength
-		    {
-		        get { return ((TermPositions) this.in_Renamed).PayloadLength; }
-		    }
+            public virtual int PayloadLength
+            {
+                get { return ((TermPositions) this.in_Renamed).PayloadLength; }
+            }
 
-		    public virtual byte[] GetPayload(byte[] data, int offset)
-			{
-				return ((TermPositions) this.in_Renamed).GetPayload(data, offset);
-			}
-			
-			
-			// TODO: Remove warning after API has been finalized
+            public virtual byte[] GetPayload(byte[] data, int offset)
+            {
+                return ((TermPositions) this.in_Renamed).GetPayload(data, offset);
+            }
+            
+            
+            // TODO: Remove warning after API has been finalized
 
-		    public virtual bool IsPayloadAvailable
-		    {
-		        get { return ((TermPositions) this.in_Renamed).IsPayloadAvailable; }
-		    }
-		}
-		
-		/// <summary>Base class for filtering <see cref="TermEnum" /> implementations. </summary>
-		public class FilterTermEnum:TermEnum
-		{
-			protected internal TermEnum in_Renamed;
-			
-			public FilterTermEnum(TermEnum in_Renamed)
-			{
-				this.in_Renamed = in_Renamed;
-			}
-			
-			public override bool Next()
-			{
-				return in_Renamed.Next();
-			}
+            public virtual bool IsPayloadAvailable
+            {
+                get { return ((TermPositions) this.in_Renamed).IsPayloadAvailable; }
+            }
+        }
+        
+        /// <summary>Base class for filtering <see cref="TermEnum" /> implementations. </summary>
+        public class FilterTermEnum:TermEnum
+        {
+            protected internal TermEnum in_Renamed;
+            
+            public FilterTermEnum(TermEnum in_Renamed)
+            {
+                this.in_Renamed = in_Renamed;
+            }
+            
+            public override bool Next()
+            {
+                return in_Renamed.Next();
+            }
 
-		    public override Term Term
-		    {
-		        get { return in_Renamed.Term; }
-		    }
+            public override Term Term
+            {
+                get { return in_Renamed.Term; }
+            }
 
-		    public override int DocFreq()
-			{
-				return in_Renamed.DocFreq();
-			}
+            public override int DocFreq()
+            {
+                return in_Renamed.DocFreq();
+            }
 
             protected override void Dispose(bool disposing)
             {
@@ -161,228 +161,228 @@ namespace Lucene.Net.Index
                     in_Renamed.Close();
                 }
             }
-		}
-		
-		protected internal IndexReader in_Renamed;
-		
-		/// <summary> <p/>Construct a FilterIndexReader based on the specified base reader.
-		/// Directory locking for delete, undeleteAll, and setNorm operations is
-		/// left to the base reader.<p/>
-		/// <p/>Note that base reader is closed if this FilterIndexReader is closed.<p/>
-		/// </summary>
-		///  <param name="in_Renamed">specified base reader.
-		/// </param>
-		public FilterIndexReader(IndexReader in_Renamed):base()
-		{
-			this.in_Renamed = in_Renamed;
-		}
-		
-		public override Directory Directory()
-		{
-			return in_Renamed.Directory();
-		}
-		
-		public override ITermFreqVector[] GetTermFreqVectors(int docNumber)
-		{
-			EnsureOpen();
-			return in_Renamed.GetTermFreqVectors(docNumber);
-		}
-		
-		public override ITermFreqVector GetTermFreqVector(int docNumber, System.String field)
-		{
-			EnsureOpen();
-			return in_Renamed.GetTermFreqVector(docNumber, field);
-		}
-		
-		
-		public override void  GetTermFreqVector(int docNumber, System.String field, TermVectorMapper mapper)
-		{
-			EnsureOpen();
-			in_Renamed.GetTermFreqVector(docNumber, field, mapper);
-		}
-		
-		public override void  GetTermFreqVector(int docNumber, TermVectorMapper mapper)
-		{
-			EnsureOpen();
-			in_Renamed.GetTermFreqVector(docNumber, mapper);
-		}
+        }
+        
+        protected internal IndexReader in_Renamed;
+        
+        /// <summary> <p/>Construct a FilterIndexReader based on the specified base reader.
+        /// Directory locking for delete, undeleteAll, and setNorm operations is
+        /// left to the base reader.<p/>
+        /// <p/>Note that base reader is closed if this FilterIndexReader is closed.<p/>
+        /// </summary>
+        ///  <param name="in_Renamed">specified base reader.
+        /// </param>
+        public FilterIndexReader(IndexReader in_Renamed):base()
+        {
+            this.in_Renamed = in_Renamed;
+        }
+        
+        public override Directory Directory()
+        {
+            return in_Renamed.Directory();
+        }
+        
+        public override ITermFreqVector[] GetTermFreqVectors(int docNumber)
+        {
+            EnsureOpen();
+            return in_Renamed.GetTermFreqVectors(docNumber);
+        }
+        
+        public override ITermFreqVector GetTermFreqVector(int docNumber, System.String field)
+        {
+            EnsureOpen();
+            return in_Renamed.GetTermFreqVector(docNumber, field);
+        }
+        
+        
+        public override void  GetTermFreqVector(int docNumber, System.String field, TermVectorMapper mapper)
+        {
+            EnsureOpen();
+            in_Renamed.GetTermFreqVector(docNumber, field, mapper);
+        }
+        
+        public override void  GetTermFreqVector(int docNumber, TermVectorMapper mapper)
+        {
+            EnsureOpen();
+            in_Renamed.GetTermFreqVector(docNumber, mapper);
+        }
 
-	    public override int NumDocs()
-	    {
-	        // Don't call ensureOpen() here (it could affect performance)
-	        return in_Renamed.NumDocs();
-	    }
+        public override int NumDocs()
+        {
+            // Don't call ensureOpen() here (it could affect performance)
+            return in_Renamed.NumDocs();
+        }
 
-	    public override int MaxDoc
-	    {
-	        get
-	        {
-	            // Don't call ensureOpen() here (it could affect performance)
-	            return in_Renamed.MaxDoc;
-	        }
-	    }
+        public override int MaxDoc
+        {
+            get
+            {
+                // Don't call ensureOpen() here (it could affect performance)
+                return in_Renamed.MaxDoc;
+            }
+        }
 
-	    public override Document Document(int n, FieldSelector fieldSelector)
-		{
-			EnsureOpen();
-			return in_Renamed.Document(n, fieldSelector);
-		}
-		
-		public override bool IsDeleted(int n)
-		{
-			// Don't call ensureOpen() here (it could affect performance)
-			return in_Renamed.IsDeleted(n);
-		}
+        public override Document Document(int n, FieldSelector fieldSelector)
+        {
+            EnsureOpen();
+            return in_Renamed.Document(n, fieldSelector);
+        }
+        
+        public override bool IsDeleted(int n)
+        {
+            // Don't call ensureOpen() here (it could affect performance)
+            return in_Renamed.IsDeleted(n);
+        }
 
-	    public override bool HasDeletions
-	    {
-	        get
-	        {
-	            // Don't call ensureOpen() here (it could affect performance)
-	            return in_Renamed.HasDeletions;
-	        }
-	    }
+        public override bool HasDeletions
+        {
+            get
+            {
+                // Don't call ensureOpen() here (it could affect performance)
+                return in_Renamed.HasDeletions;
+            }
+        }
 
-	    protected internal override void  DoUndeleteAll()
-		{
-			in_Renamed.UndeleteAll();
-		}
-		
-		public override bool HasNorms(System.String field)
-		{
-			EnsureOpen();
-			return in_Renamed.HasNorms(field);
-		}
-		
-		public override byte[] Norms(System.String f)
-		{
-			EnsureOpen();
-			return in_Renamed.Norms(f);
-		}
-		
-		public override void  Norms(System.String f, byte[] bytes, int offset)
-		{
-			EnsureOpen();
-			in_Renamed.Norms(f, bytes, offset);
-		}
-		
-		protected internal override void  DoSetNorm(int d, System.String f, byte b)
-		{
-			in_Renamed.SetNorm(d, f, b);
-		}
-		
-		public override TermEnum Terms()
-		{
-			EnsureOpen();
-			return in_Renamed.Terms();
-		}
-		
-		public override TermEnum Terms(Term t)
-		{
-			EnsureOpen();
-			return in_Renamed.Terms(t);
-		}
-		
-		public override int DocFreq(Term t)
-		{
-			EnsureOpen();
-			return in_Renamed.DocFreq(t);
-		}
-		
-		public override TermDocs TermDocs()
-		{
-			EnsureOpen();
-			return in_Renamed.TermDocs();
-		}
-		
-		public override TermDocs TermDocs(Term term)
-		{
-			EnsureOpen();
-			return in_Renamed.TermDocs(term);
-		}
-		
-		public override TermPositions TermPositions()
-		{
-			EnsureOpen();
-			return in_Renamed.TermPositions();
-		}
-		
-		protected internal override void  DoDelete(int n)
-		{
-			in_Renamed.DeleteDocument(n);
-		}
+        protected internal override void  DoUndeleteAll()
+        {
+            in_Renamed.UndeleteAll();
+        }
+        
+        public override bool HasNorms(System.String field)
+        {
+            EnsureOpen();
+            return in_Renamed.HasNorms(field);
+        }
+        
+        public override byte[] Norms(System.String f)
+        {
+            EnsureOpen();
+            return in_Renamed.Norms(f);
+        }
+        
+        public override void  Norms(System.String f, byte[] bytes, int offset)
+        {
+            EnsureOpen();
+            in_Renamed.Norms(f, bytes, offset);
+        }
+        
+        protected internal override void  DoSetNorm(int d, System.String f, byte b)
+        {
+            in_Renamed.SetNorm(d, f, b);
+        }
+        
+        public override TermEnum Terms()
+        {
+            EnsureOpen();
+            return in_Renamed.Terms();
+        }
+        
+        public override TermEnum Terms(Term t)
+        {
+            EnsureOpen();
+            return in_Renamed.Terms(t);
+        }
+        
+        public override int DocFreq(Term t)
+        {
+            EnsureOpen();
+            return in_Renamed.DocFreq(t);
+        }
+        
+        public override TermDocs TermDocs()
+        {
+            EnsureOpen();
+            return in_Renamed.TermDocs();
+        }
+        
+        public override TermDocs TermDocs(Term term)
+        {
+            EnsureOpen();
+            return in_Renamed.TermDocs(term);
+        }
+        
+        public override TermPositions TermPositions()
+        {
+            EnsureOpen();
+            return in_Renamed.TermPositions();
+        }
+        
+        protected internal override void  DoDelete(int n)
+        {
+            in_Renamed.DeleteDocument(n);
+        }
 
         protected internal override void DoCommit(System.Collections.Generic.IDictionary<string, string> commitUserData)
-		{
-			in_Renamed.Commit(commitUserData);
-		}
-		
-		protected internal override void  DoClose()
-		{
-			in_Renamed.Close();
+        {
+            in_Renamed.Commit(commitUserData);
+        }
+        
+        protected internal override void  DoClose()
+        {
+            in_Renamed.Close();
             // NOTE: only needed in case someone had asked for
             // FieldCache for top-level reader (which is generally
             // not a good idea):
             Lucene.Net.Search.FieldCache_Fields.DEFAULT.Purge(this);
-		}
+        }
 
 
         public override System.Collections.Generic.ICollection<string> GetFieldNames(IndexReader.FieldOption fieldNames)
-		{
-			EnsureOpen();
-			return in_Renamed.GetFieldNames(fieldNames);
-		}
+        {
+            EnsureOpen();
+            return in_Renamed.GetFieldNames(fieldNames);
+        }
 
-	    public override long Version
-	    {
-	        get
-	        {
-	            EnsureOpen();
-	            return in_Renamed.Version;
-	        }
-	    }
+        public override long Version
+        {
+            get
+            {
+                EnsureOpen();
+                return in_Renamed.Version;
+            }
+        }
 
-	    public override bool IsCurrent()
-	    {
-	        EnsureOpen();
-	        return in_Renamed.IsCurrent();
-	    }
+        public override bool IsCurrent()
+        {
+            EnsureOpen();
+            return in_Renamed.IsCurrent();
+        }
 
-	    public override bool IsOptimized()
-	    {
-	        EnsureOpen();
-	        return in_Renamed.IsOptimized();
-	    }
+        public override bool IsOptimized()
+        {
+            EnsureOpen();
+            return in_Renamed.IsOptimized();
+        }
 
-	    public override IndexReader[] GetSequentialSubReaders()
-	    {
-	        return in_Renamed.GetSequentialSubReaders();
-	    }
+        public override IndexReader[] GetSequentialSubReaders()
+        {
+            return in_Renamed.GetSequentialSubReaders();
+        }
 
-	    override public System.Object Clone()
-		{
+        override public System.Object Clone()
+        {
             System.Diagnostics.Debug.Fail("Port issue:", "Lets see if we need this FilterIndexReader.Clone()"); // {{Aroush-2.9}}
-			return null;
-		}
+            return null;
+        }
 
-	    /// <summary>
-	    /// If the subclass of FilteredIndexReader modifies the
-	    /// contents of the FieldCache, you must override this
-	    /// method to provide a different key */
-	    ///</summary>
-	    public override object FieldCacheKey
-	    {
-	        get { return in_Renamed.FieldCacheKey; }
-	    }
+        /// <summary>
+        /// If the subclass of FilteredIndexReader modifies the
+        /// contents of the FieldCache, you must override this
+        /// method to provide a different key */
+        ///</summary>
+        public override object FieldCacheKey
+        {
+            get { return in_Renamed.FieldCacheKey; }
+        }
 
-	    /// <summary>
-	    /// If the subclass of FilteredIndexReader modifies the
-	    /// deleted docs, you must override this method to provide
-	    /// a different key */
-	    /// </summary>
-	    public override object DeletesCacheKey
-	    {
-	        get { return in_Renamed.DeletesCacheKey; }
-	    }
-	}
+        /// <summary>
+        /// If the subclass of FilteredIndexReader modifies the
+        /// deleted docs, you must override this method to provide
+        /// a different key */
+        /// </summary>
+        public override object DeletesCacheKey
+        {
+            get { return in_Renamed.DeletesCacheKey; }
+        }
+    }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/62f018ab/src/core/Index/FormatPostingsDocsConsumer.cs
----------------------------------------------------------------------
diff --git a/src/core/Index/FormatPostingsDocsConsumer.cs b/src/core/Index/FormatPostingsDocsConsumer.cs
index 29c0558..74efb0d 100644
--- a/src/core/Index/FormatPostingsDocsConsumer.cs
+++ b/src/core/Index/FormatPostingsDocsConsumer.cs
@@ -19,18 +19,18 @@ using System;
 
 namespace Lucene.Net.Index
 {
-	
-	/// <summary> NOTE: this API is experimental and will likely change</summary>
-	
-	abstract class FormatPostingsDocsConsumer
-	{
-		
-		/// <summary>Adds a new doc in this term.  If this returns null
-		/// then we just skip consuming positions/payloads. 
-		/// </summary>
-		internal abstract FormatPostingsPositionsConsumer AddDoc(int docID, int termDocFreq);
-		
-		/// <summary>Called when we are done adding docs to this term </summary>
-		internal abstract void  Finish();
-	}
+    
+    /// <summary> NOTE: this API is experimental and will likely change</summary>
+    
+    abstract class FormatPostingsDocsConsumer
+    {
+        
+        /// <summary>Adds a new doc in this term.  If this returns null
+        /// then we just skip consuming positions/payloads. 
+        /// </summary>
+        internal abstract FormatPostingsPositionsConsumer AddDoc(int docID, int termDocFreq);
+        
+        /// <summary>Called when we are done adding docs to this term </summary>
+        internal abstract void  Finish();
+    }
 }
\ No newline at end of file