You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucenenet.apache.org by cc...@apache.org on 2013/04/03 19:39:49 UTC

[06/51] [partial] Mass convert mixed tabs to spaces

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/62f018ab/src/core/Index/ParallelReader.cs
----------------------------------------------------------------------
diff --git a/src/core/Index/ParallelReader.cs b/src/core/Index/ParallelReader.cs
index e0b4b04..34e7033 100644
--- a/src/core/Index/ParallelReader.cs
+++ b/src/core/Index/ParallelReader.cs
@@ -25,581 +25,581 @@ using FieldSelectorResult = Lucene.Net.Documents.FieldSelectorResult;
 
 namespace Lucene.Net.Index
 {
-	/// <summary>An IndexReader which reads multiple, parallel indexes.  Each index added
-	/// must have the same number of documents, but typically each contains
-	/// different fields.  Each document contains the union of the fields of all
-	/// documents with the same document number.  When searching, matches for a
-	/// query term are from the first index added that has the field.
-	/// 
-	/// <p/>This is useful, e.g., with collections that have large fields which
-	/// change rarely and small fields that change more frequently.  The smaller
-	/// fields may be re-indexed in a new index and both indexes may be searched
-	/// together.
-	/// 
-	/// <p/><strong>Warning:</strong> It is up to you to make sure all indexes
-	/// are created and modified the same way. For example, if you add
-	/// documents to one index, you need to add the same documents in the
-	/// same order to the other indexes. <em>Failure to do so will result in
-	/// undefined behavior</em>.
-	/// </summary>
-	public class ParallelReader:IndexReader, System.ICloneable
-	{
+    /// <summary>An IndexReader which reads multiple, parallel indexes.  Each index added
+    /// must have the same number of documents, but typically each contains
+    /// different fields.  Each document contains the union of the fields of all
+    /// documents with the same document number.  When searching, matches for a
+    /// query term are from the first index added that has the field.
+    /// 
+    /// <p/>This is useful, e.g., with collections that have large fields which
+    /// change rarely and small fields that change more frequently.  The smaller
+    /// fields may be re-indexed in a new index and both indexes may be searched
+    /// together.
+    /// 
+    /// <p/><strong>Warning:</strong> It is up to you to make sure all indexes
+    /// are created and modified the same way. For example, if you add
+    /// documents to one index, you need to add the same documents in the
+    /// same order to the other indexes. <em>Failure to do so will result in
+    /// undefined behavior</em>.
+    /// </summary>
+    public class ParallelReader:IndexReader, System.ICloneable
+    {
         private List<IndexReader> readers = new List<IndexReader>();
         private List<bool> decrefOnClose = new List<bool>(); // remember which subreaders to decRef on close
-		internal bool incRefReaders = false;
-		private SortedDictionary<string, IndexReader> fieldToReader = new SortedDictionary<string, IndexReader>();
-		private IDictionary<IndexReader, ICollection<string>> readerToFields = new HashMap<IndexReader, ICollection<string>>();
+        internal bool incRefReaders = false;
+        private SortedDictionary<string, IndexReader> fieldToReader = new SortedDictionary<string, IndexReader>();
+        private IDictionary<IndexReader, ICollection<string>> readerToFields = new HashMap<IndexReader, ICollection<string>>();
         private List<IndexReader> storedFieldReaders = new List<IndexReader>();
-		
-		private int maxDoc;
-		private int numDocs;
-		private bool hasDeletions;
-		
-		/// <summary>Construct a ParallelReader. 
-		/// <p/>Note that all subreaders are closed if this ParallelReader is closed.<p/>
-		/// </summary>
-		public ParallelReader():this(true)
-		{
-		}
-		
-		/// <summary>Construct a ParallelReader. </summary>
-		/// <param name="closeSubReaders">indicates whether the subreaders should be closed
-		/// when this ParallelReader is closed
-		/// </param>
-		public ParallelReader(bool closeSubReaders):base()
-		{
-			this.incRefReaders = !closeSubReaders;
-		}
-		
-		/// <summary>Add an IndexReader.</summary>
-		/// <throws>  IOException if there is a low-level IO error </throws>
-		public virtual void  Add(IndexReader reader)
-		{
-			EnsureOpen();
-			Add(reader, false);
-		}
-		
-		/// <summary>Add an IndexReader whose stored fields will not be returned.  This can
-		/// accellerate search when stored fields are only needed from a subset of
-		/// the IndexReaders.
-		/// 
-		/// </summary>
-		/// <throws>  IllegalArgumentException if not all indexes contain the same number </throws>
-		/// <summary>     of documents
-		/// </summary>
-		/// <throws>  IllegalArgumentException if not all indexes have the same value </throws>
-		/// <summary>     of <see cref="IndexReader.MaxDoc" />
-		/// </summary>
-		/// <throws>  IOException if there is a low-level IO error </throws>
-		public virtual void  Add(IndexReader reader, bool ignoreStoredFields)
-		{
-			
-			EnsureOpen();
-			if (readers.Count == 0)
-			{
-				this.maxDoc = reader.MaxDoc;
-				this.numDocs = reader.NumDocs();
-				this.hasDeletions = reader.HasDeletions;
-			}
-			
-			if (reader.MaxDoc != maxDoc)
-			// check compatibility
-				throw new System.ArgumentException("All readers must have same maxDoc: " + maxDoc + "!=" + reader.MaxDoc);
-			if (reader.NumDocs() != numDocs)
-				throw new System.ArgumentException("All readers must have same numDocs: " + numDocs + "!=" + reader.NumDocs());
-			
-			ICollection<string> fields = reader.GetFieldNames(IndexReader.FieldOption.ALL);
-			readerToFields[reader] = fields;
-			foreach(var field in fields)
-			{
-				// update fieldToReader map
+        
+        private int maxDoc;
+        private int numDocs;
+        private bool hasDeletions;
+        
+        /// <summary>Construct a ParallelReader. 
+        /// <p/>Note that all subreaders are closed if this ParallelReader is closed.<p/>
+        /// </summary>
+        public ParallelReader():this(true)
+        {
+        }
+        
+        /// <summary>Construct a ParallelReader. </summary>
+        /// <param name="closeSubReaders">indicates whether the subreaders should be closed
+        /// when this ParallelReader is closed
+        /// </param>
+        public ParallelReader(bool closeSubReaders):base()
+        {
+            this.incRefReaders = !closeSubReaders;
+        }
+        
+        /// <summary>Add an IndexReader.</summary>
+        /// <throws>  IOException if there is a low-level IO error </throws>
+        public virtual void  Add(IndexReader reader)
+        {
+            EnsureOpen();
+            Add(reader, false);
+        }
+        
+        /// <summary>Add an IndexReader whose stored fields will not be returned.  This can
+        /// accellerate search when stored fields are only needed from a subset of
+        /// the IndexReaders.
+        /// 
+        /// </summary>
+        /// <throws>  IllegalArgumentException if not all indexes contain the same number </throws>
+        /// <summary>     of documents
+        /// </summary>
+        /// <throws>  IllegalArgumentException if not all indexes have the same value </throws>
+        /// <summary>     of <see cref="IndexReader.MaxDoc" />
+        /// </summary>
+        /// <throws>  IOException if there is a low-level IO error </throws>
+        public virtual void  Add(IndexReader reader, bool ignoreStoredFields)
+        {
+            
+            EnsureOpen();
+            if (readers.Count == 0)
+            {
+                this.maxDoc = reader.MaxDoc;
+                this.numDocs = reader.NumDocs();
+                this.hasDeletions = reader.HasDeletions;
+            }
+            
+            if (reader.MaxDoc != maxDoc)
+            // check compatibility
+                throw new System.ArgumentException("All readers must have same maxDoc: " + maxDoc + "!=" + reader.MaxDoc);
+            if (reader.NumDocs() != numDocs)
+                throw new System.ArgumentException("All readers must have same numDocs: " + numDocs + "!=" + reader.NumDocs());
+            
+            ICollection<string> fields = reader.GetFieldNames(IndexReader.FieldOption.ALL);
+            readerToFields[reader] = fields;
+            foreach(var field in fields)
+            {
+                // update fieldToReader map
                 // Do a containskey firt to mimic java behavior
-				if (!fieldToReader.ContainsKey(field) || fieldToReader[field] == null)
-					fieldToReader[field] = reader;
-			}
-			
-			if (!ignoreStoredFields)
-				storedFieldReaders.Add(reader); // add to storedFieldReaders
-			readers.Add(reader);
-			
-			if (incRefReaders)
-			{
-				reader.IncRef();
-			}
-			decrefOnClose.Add(incRefReaders);
-		}
-		
-		public override System.Object Clone()
-		{
-			try
-			{
-				return DoReopen(true);
-			}
-			catch (System.Exception ex)
-			{
-				throw new System.SystemException(ex.Message, ex);
-			}
-		}
-		
-		/// <summary> Tries to reopen the subreaders.
-		/// <br/>
-		/// If one or more subreaders could be re-opened (i. e. subReader.reopen() 
-		/// returned a new instance != subReader), then a new ParallelReader instance 
-		/// is returned, otherwise this instance is returned.
-		/// <p/>
-		/// A re-opened instance might share one or more subreaders with the old 
-		/// instance. Index modification operations result in undefined behavior
-		/// when performed before the old instance is closed.
-		/// (see <see cref="IndexReader.Reopen()" />).
-		/// <p/>
-		/// If subreaders are shared, then the reference count of those
-		/// readers is increased to ensure that the subreaders remain open
-		/// until the last referring reader is closed.
-		/// 
-		/// </summary>
-		/// <throws>  CorruptIndexException if the index is corrupt </throws>
-		/// <throws>  IOException if there is a low-level IO error  </throws>
-		public override IndexReader Reopen()
-		{
-			lock (this)
-			{
-				return DoReopen(false);
-			}
-		}
-		
-		protected internal virtual IndexReader DoReopen(bool doClone)
-		{
-			EnsureOpen();
-			
-			bool reopened = false;
+                if (!fieldToReader.ContainsKey(field) || fieldToReader[field] == null)
+                    fieldToReader[field] = reader;
+            }
+            
+            if (!ignoreStoredFields)
+                storedFieldReaders.Add(reader); // add to storedFieldReaders
+            readers.Add(reader);
+            
+            if (incRefReaders)
+            {
+                reader.IncRef();
+            }
+            decrefOnClose.Add(incRefReaders);
+        }
+        
+        public override System.Object Clone()
+        {
+            try
+            {
+                return DoReopen(true);
+            }
+            catch (System.Exception ex)
+            {
+                throw new System.SystemException(ex.Message, ex);
+            }
+        }
+        
+        /// <summary> Tries to reopen the subreaders.
+        /// <br/>
+        /// If one or more subreaders could be re-opened (i. e. subReader.reopen() 
+        /// returned a new instance != subReader), then a new ParallelReader instance 
+        /// is returned, otherwise this instance is returned.
+        /// <p/>
+        /// A re-opened instance might share one or more subreaders with the old 
+        /// instance. Index modification operations result in undefined behavior
+        /// when performed before the old instance is closed.
+        /// (see <see cref="IndexReader.Reopen()" />).
+        /// <p/>
+        /// If subreaders are shared, then the reference count of those
+        /// readers is increased to ensure that the subreaders remain open
+        /// until the last referring reader is closed.
+        /// 
+        /// </summary>
+        /// <throws>  CorruptIndexException if the index is corrupt </throws>
+        /// <throws>  IOException if there is a low-level IO error  </throws>
+        public override IndexReader Reopen()
+        {
+            lock (this)
+            {
+                return DoReopen(false);
+            }
+        }
+        
+        protected internal virtual IndexReader DoReopen(bool doClone)
+        {
+            EnsureOpen();
+            
+            bool reopened = false;
             IList<IndexReader> newReaders = new List<IndexReader>();
-			
-			bool success = false;
-			
-			try
-			{
-				foreach(var oldReader in readers)
-				{
-					IndexReader newReader = null;
-					if (doClone)
-					{
-						newReader = (IndexReader) oldReader.Clone();
-					}
-					else
-					{
-						newReader = oldReader.Reopen();
-					}
-					newReaders.Add(newReader);
-					// if at least one of the subreaders was updated we remember that
-					// and return a new ParallelReader
-					if (newReader != oldReader)
-					{
-						reopened = true;
-					}
-				}
-				success = true;
-			}
-			finally
-			{
-				if (!success && reopened)
-				{
-					for (int i = 0; i < newReaders.Count; i++)
-					{
-						IndexReader r = newReaders[i];
-						if (r != readers[i])
-						{
-							try
-							{
-								r.Close();
-							}
-							catch (System.IO.IOException)
-							{
-								// keep going - we want to clean up as much as possible
-							}
-						}
-					}
-				}
-			}
-			
-			if (reopened)
-			{
+            
+            bool success = false;
+            
+            try
+            {
+                foreach(var oldReader in readers)
+                {
+                    IndexReader newReader = null;
+                    if (doClone)
+                    {
+                        newReader = (IndexReader) oldReader.Clone();
+                    }
+                    else
+                    {
+                        newReader = oldReader.Reopen();
+                    }
+                    newReaders.Add(newReader);
+                    // if at least one of the subreaders was updated we remember that
+                    // and return a new ParallelReader
+                    if (newReader != oldReader)
+                    {
+                        reopened = true;
+                    }
+                }
+                success = true;
+            }
+            finally
+            {
+                if (!success && reopened)
+                {
+                    for (int i = 0; i < newReaders.Count; i++)
+                    {
+                        IndexReader r = newReaders[i];
+                        if (r != readers[i])
+                        {
+                            try
+                            {
+                                r.Close();
+                            }
+                            catch (System.IO.IOException)
+                            {
+                                // keep going - we want to clean up as much as possible
+                            }
+                        }
+                    }
+                }
+            }
+            
+            if (reopened)
+            {
                 List<bool> newDecrefOnClose = new List<bool>();
-				ParallelReader pr = new ParallelReader();
-				for (int i = 0; i < readers.Count; i++)
-				{
-					IndexReader oldReader = readers[i];
-					IndexReader newReader = newReaders[i];
-					if (newReader == oldReader)
-					{
-						newDecrefOnClose.Add(true);
-						newReader.IncRef();
-					}
-					else
-					{
-						// this is a new subreader instance, so on close() we don't
-						// decRef but close it 
-						newDecrefOnClose.Add(false);
-					}
-					pr.Add(newReader, !storedFieldReaders.Contains(oldReader));
-				}
-				pr.decrefOnClose = newDecrefOnClose;
-				pr.incRefReaders = incRefReaders;
-				return pr;
-			}
-			else
-			{
-				// No subreader was refreshed
-				return this;
-			}
-		}
-
-
-	    public override int NumDocs()
-	    {
-	        // Don't call ensureOpen() here (it could affect performance)
-	        return numDocs;
-	    }
-
-	    public override int MaxDoc
-	    {
-	        get
-	        {
-	            // Don't call ensureOpen() here (it could affect performance)
-	            return maxDoc;
-	        }
-	    }
-
-	    public override bool HasDeletions
-	    {
-	        get
-	        {
-	            // Don't call ensureOpen() here (it could affect performance)
-	            return hasDeletions;
-	        }
-	    }
-
-	    // check first reader
-		public override bool IsDeleted(int n)
-		{
-			// Don't call ensureOpen() here (it could affect performance)
-			if (readers.Count > 0)
-				return readers[0].IsDeleted(n);
-			return false;
-		}
-		
-		// delete in all readers
-		protected internal override void  DoDelete(int n)
-		{
-			foreach(var reader in readers)
-			{
-				reader.DeleteDocument(n);
-			}
-			hasDeletions = true;
-		}
-		
-		// undeleteAll in all readers
-		protected internal override void  DoUndeleteAll()
-		{
-			foreach(var reader in readers)
-			{
-				reader.UndeleteAll();
-			}
-			hasDeletions = false;
-		}
-		
-		// append fields from storedFieldReaders
-		public override Document Document(int n, FieldSelector fieldSelector)
-		{
-			EnsureOpen();
-			Document result = new Document();
-			foreach(IndexReader reader in storedFieldReaders)
-			{
-				bool include = (fieldSelector == null);
-				if (!include)
-				{
-				    var fields = readerToFields[reader];
-					foreach(var field in fields)
-					{
+                ParallelReader pr = new ParallelReader();
+                for (int i = 0; i < readers.Count; i++)
+                {
+                    IndexReader oldReader = readers[i];
+                    IndexReader newReader = newReaders[i];
+                    if (newReader == oldReader)
+                    {
+                        newDecrefOnClose.Add(true);
+                        newReader.IncRef();
+                    }
+                    else
+                    {
+                        // this is a new subreader instance, so on close() we don't
+                        // decRef but close it 
+                        newDecrefOnClose.Add(false);
+                    }
+                    pr.Add(newReader, !storedFieldReaders.Contains(oldReader));
+                }
+                pr.decrefOnClose = newDecrefOnClose;
+                pr.incRefReaders = incRefReaders;
+                return pr;
+            }
+            else
+            {
+                // No subreader was refreshed
+                return this;
+            }
+        }
+
+
+        public override int NumDocs()
+        {
+            // Don't call ensureOpen() here (it could affect performance)
+            return numDocs;
+        }
+
+        public override int MaxDoc
+        {
+            get
+            {
+                // Don't call ensureOpen() here (it could affect performance)
+                return maxDoc;
+            }
+        }
+
+        public override bool HasDeletions
+        {
+            get
+            {
+                // Don't call ensureOpen() here (it could affect performance)
+                return hasDeletions;
+            }
+        }
+
+        // check first reader
+        public override bool IsDeleted(int n)
+        {
+            // Don't call ensureOpen() here (it could affect performance)
+            if (readers.Count > 0)
+                return readers[0].IsDeleted(n);
+            return false;
+        }
+        
+        // delete in all readers
+        protected internal override void  DoDelete(int n)
+        {
+            foreach(var reader in readers)
+            {
+                reader.DeleteDocument(n);
+            }
+            hasDeletions = true;
+        }
+        
+        // undeleteAll in all readers
+        protected internal override void  DoUndeleteAll()
+        {
+            foreach(var reader in readers)
+            {
+                reader.UndeleteAll();
+            }
+            hasDeletions = false;
+        }
+        
+        // append fields from storedFieldReaders
+        public override Document Document(int n, FieldSelector fieldSelector)
+        {
+            EnsureOpen();
+            Document result = new Document();
+            foreach(IndexReader reader in storedFieldReaders)
+            {
+                bool include = (fieldSelector == null);
+                if (!include)
+                {
+                    var fields = readerToFields[reader];
+                    foreach(var field in fields)
+                    {
                         if (fieldSelector.Accept(field) != FieldSelectorResult.NO_LOAD)
-						{
-							include = true;
-							break;
-						}
-					}
-				}
-				if (include)
-				{
-				    var fields = reader.Document(n, fieldSelector).GetFields();
-					foreach(var field in fields)
-					{
+                        {
+                            include = true;
+                            break;
+                        }
+                    }
+                }
+                if (include)
+                {
+                    var fields = reader.Document(n, fieldSelector).GetFields();
+                    foreach(var field in fields)
+                    {
                         result.Add(field);
-					}
-				}
-			}
-			return result;
-		}
-		
-		// get all vectors
-		public override ITermFreqVector[] GetTermFreqVectors(int n)
-		{
-			EnsureOpen();
-			IList<ITermFreqVector> results = new List<ITermFreqVector>();
+                    }
+                }
+            }
+            return result;
+        }
+        
+        // get all vectors
+        public override ITermFreqVector[] GetTermFreqVectors(int n)
+        {
+            EnsureOpen();
+            IList<ITermFreqVector> results = new List<ITermFreqVector>();
             foreach(var e in fieldToReader)
-			{
-				System.String field = e.Key;
-				IndexReader reader = e.Value;
-
-				ITermFreqVector vector = reader.GetTermFreqVector(n, field);
-				if (vector != null)
-					results.Add(vector);
-			}
-			return results.ToArray();
-		}
-		
-		public override ITermFreqVector GetTermFreqVector(int n, System.String field)
-		{
-			EnsureOpen();
-			IndexReader reader = (fieldToReader[field]);
-			return reader == null?null:reader.GetTermFreqVector(n, field);
-		}
-		
-		
-		public override void  GetTermFreqVector(int docNumber, System.String field, TermVectorMapper mapper)
-		{
-			EnsureOpen();
-			IndexReader reader = (fieldToReader[field]);
-			if (reader != null)
-			{
-				reader.GetTermFreqVector(docNumber, field, mapper);
-			}
-		}
-		
-		public override void  GetTermFreqVector(int docNumber, TermVectorMapper mapper)
-		{
-			EnsureOpen();
+            {
+                System.String field = e.Key;
+                IndexReader reader = e.Value;
+
+                ITermFreqVector vector = reader.GetTermFreqVector(n, field);
+                if (vector != null)
+                    results.Add(vector);
+            }
+            return results.ToArray();
+        }
+        
+        public override ITermFreqVector GetTermFreqVector(int n, System.String field)
+        {
+            EnsureOpen();
+            IndexReader reader = (fieldToReader[field]);
+            return reader == null?null:reader.GetTermFreqVector(n, field);
+        }
+        
+        
+        public override void  GetTermFreqVector(int docNumber, System.String field, TermVectorMapper mapper)
+        {
+            EnsureOpen();
+            IndexReader reader = (fieldToReader[field]);
+            if (reader != null)
+            {
+                reader.GetTermFreqVector(docNumber, field, mapper);
+            }
+        }
+        
+        public override void  GetTermFreqVector(int docNumber, TermVectorMapper mapper)
+        {
+            EnsureOpen();
 
             foreach(var e in fieldToReader)
-			{
-				System.String field = e.Key;
-				IndexReader reader = e.Value;
-				reader.GetTermFreqVector(docNumber, field, mapper);
-			}
-		}
-		
-		public override bool HasNorms(System.String field)
-		{
-			EnsureOpen();
-			IndexReader reader = fieldToReader[field];
-		    return reader != null && reader.HasNorms(field);
-		}
-		
-		public override byte[] Norms(System.String field)
-		{
-			EnsureOpen();
-			IndexReader reader = fieldToReader[field];
-			return reader == null?null:reader.Norms(field);
-		}
-		
-		public override void  Norms(System.String field, byte[] result, int offset)
-		{
-			EnsureOpen();
-			IndexReader reader = fieldToReader[field];
-			if (reader != null)
-				reader.Norms(field, result, offset);
-		}
-		
-		protected internal override void  DoSetNorm(int n, System.String field, byte value_Renamed)
-		{
-			IndexReader reader = fieldToReader[field];
-			if (reader != null)
-				reader.DoSetNorm(n, field, value_Renamed);
-		}
-		
-		public override TermEnum Terms()
-		{
-			EnsureOpen();
-			return new ParallelTermEnum(this);
-		}
-		
-		public override TermEnum Terms(Term term)
-		{
-			EnsureOpen();
-			return new ParallelTermEnum(this, term);
-		}
-		
-		public override int DocFreq(Term term)
-		{
-			EnsureOpen();
-			IndexReader reader = fieldToReader[term.Field];
-			return reader == null?0:reader.DocFreq(term);
-		}
-		
-		public override TermDocs TermDocs(Term term)
-		{
-			EnsureOpen();
-			return new ParallelTermDocs(this, term);
-		}
-		
-		public override TermDocs TermDocs()
-		{
-			EnsureOpen();
-			return new ParallelTermDocs(this);
-		}
-		
-		public override TermPositions TermPositions(Term term)
-		{
-			EnsureOpen();
-			return new ParallelTermPositions(this, term);
-		}
-		
-		public override TermPositions TermPositions()
-		{
-			EnsureOpen();
-			return new ParallelTermPositions(this);
-		}
-
-	    /// <summary> Checks recursively if all subreaders are up to date. </summary>
-	    public override bool IsCurrent()
-	    {
-	        foreach (var reader in readers)
-	        {
-	            if (!reader.IsCurrent())
-	            {
-	                return false;
-	            }
-	        }
-
-	        // all subreaders are up to date
-	        return true;
-	    }
-
-	    /// <summary> Checks recursively if all subindexes are optimized </summary>
-	    public override bool IsOptimized()
-	    {
-	        foreach (var reader in readers)
-	        {
-	            if (!reader.IsOptimized())
-	            {
-	                return false;
-	            }
-	        }
-
-	        // all subindexes are optimized
-	        return true;
-	    }
-
-
-	    /// <summary>Not implemented.</summary>
-	    /// <throws>  UnsupportedOperationException </throws>
-	    public override long Version
-	    {
-	        get { throw new System.NotSupportedException("ParallelReader does not support this method."); }
-	    }
-
-	    // for testing
-		public /*internal*/ virtual IndexReader[] GetSubReaders()
-		{
-			return readers.ToArray();
-		}
+            {
+                System.String field = e.Key;
+                IndexReader reader = e.Value;
+                reader.GetTermFreqVector(docNumber, field, mapper);
+            }
+        }
+        
+        public override bool HasNorms(System.String field)
+        {
+            EnsureOpen();
+            IndexReader reader = fieldToReader[field];
+            return reader != null && reader.HasNorms(field);
+        }
+        
+        public override byte[] Norms(System.String field)
+        {
+            EnsureOpen();
+            IndexReader reader = fieldToReader[field];
+            return reader == null?null:reader.Norms(field);
+        }
+        
+        public override void  Norms(System.String field, byte[] result, int offset)
+        {
+            EnsureOpen();
+            IndexReader reader = fieldToReader[field];
+            if (reader != null)
+                reader.Norms(field, result, offset);
+        }
+        
+        protected internal override void  DoSetNorm(int n, System.String field, byte value_Renamed)
+        {
+            IndexReader reader = fieldToReader[field];
+            if (reader != null)
+                reader.DoSetNorm(n, field, value_Renamed);
+        }
+        
+        public override TermEnum Terms()
+        {
+            EnsureOpen();
+            return new ParallelTermEnum(this);
+        }
+        
+        public override TermEnum Terms(Term term)
+        {
+            EnsureOpen();
+            return new ParallelTermEnum(this, term);
+        }
+        
+        public override int DocFreq(Term term)
+        {
+            EnsureOpen();
+            IndexReader reader = fieldToReader[term.Field];
+            return reader == null?0:reader.DocFreq(term);
+        }
+        
+        public override TermDocs TermDocs(Term term)
+        {
+            EnsureOpen();
+            return new ParallelTermDocs(this, term);
+        }
+        
+        public override TermDocs TermDocs()
+        {
+            EnsureOpen();
+            return new ParallelTermDocs(this);
+        }
+        
+        public override TermPositions TermPositions(Term term)
+        {
+            EnsureOpen();
+            return new ParallelTermPositions(this, term);
+        }
+        
+        public override TermPositions TermPositions()
+        {
+            EnsureOpen();
+            return new ParallelTermPositions(this);
+        }
+
+        /// <summary> Checks recursively if all subreaders are up to date. </summary>
+        public override bool IsCurrent()
+        {
+            foreach (var reader in readers)
+            {
+                if (!reader.IsCurrent())
+                {
+                    return false;
+                }
+            }
+
+            // all subreaders are up to date
+            return true;
+        }
+
+        /// <summary> Checks recursively if all subindexes are optimized </summary>
+        public override bool IsOptimized()
+        {
+            foreach (var reader in readers)
+            {
+                if (!reader.IsOptimized())
+                {
+                    return false;
+                }
+            }
+
+            // all subindexes are optimized
+            return true;
+        }
+
+
+        /// <summary>Not implemented.</summary>
+        /// <throws>  UnsupportedOperationException </throws>
+        public override long Version
+        {
+            get { throw new System.NotSupportedException("ParallelReader does not support this method."); }
+        }
+
+        // for testing
+        public /*internal*/ virtual IndexReader[] GetSubReaders()
+        {
+            return readers.ToArray();
+        }
 
         protected internal override void DoCommit(IDictionary<string, string> commitUserData)
-		{
-			foreach(var reader in readers)
-				reader.Commit(commitUserData);
-		}
-		
-		protected internal override void  DoClose()
-		{
-			lock (this)
-			{
-				for (int i = 0; i < readers.Count; i++)
-				{
-					if (decrefOnClose[i])
-					{
-						readers[i].DecRef();
-					}
-					else
-					{
-						readers[i].Close();
-					}
-				}
-			}
+        {
+            foreach(var reader in readers)
+                reader.Commit(commitUserData);
+        }
+        
+        protected internal override void  DoClose()
+        {
+            lock (this)
+            {
+                for (int i = 0; i < readers.Count; i++)
+                {
+                    if (decrefOnClose[i])
+                    {
+                        readers[i].DecRef();
+                    }
+                    else
+                    {
+                        readers[i].Close();
+                    }
+                }
+            }
 
             Lucene.Net.Search.FieldCache_Fields.DEFAULT.Purge(this);
-		}
+        }
 
         public override System.Collections.Generic.ICollection<string> GetFieldNames(IndexReader.FieldOption fieldNames)
-		{
-			EnsureOpen();
+        {
+            EnsureOpen();
             ISet<string> fieldSet = Lucene.Net.Support.Compatibility.SetFactory.CreateHashSet<string>();
-			foreach(var reader in readers)
-			{
-				ICollection<string> names = reader.GetFieldNames(fieldNames);
+            foreach(var reader in readers)
+            {
+                ICollection<string> names = reader.GetFieldNames(fieldNames);
                 fieldSet.UnionWith(names);
-			}
-			return fieldSet;
-		}
-		
-		private class ParallelTermEnum : TermEnum
-		{
-			private void  InitBlock(ParallelReader enclosingInstance)
-			{
-				this.enclosingInstance = enclosingInstance;
-			}
-			private ParallelReader enclosingInstance;
-			public ParallelReader Enclosing_Instance
-			{
-				get
-				{
-					return enclosingInstance;
-				}
-				
-			}
-			private System.String field;
-			private IEnumerator<string> fieldIterator;
-			private TermEnum termEnum;
-
-		    private bool isDisposed;
-			
-			public ParallelTermEnum(ParallelReader enclosingInstance)
-			{
-				InitBlock(enclosingInstance);
-				try
-				{
-					field = Enclosing_Instance.fieldToReader.Keys.First();
-				}
-				catch (ArgumentOutOfRangeException)
-				{
-					// No fields, so keep field == null, termEnum == null
-					return;
-				}
-				if (field != null)
-					termEnum = Enclosing_Instance.fieldToReader[field].Terms();
-			}
-			
-			public ParallelTermEnum(ParallelReader enclosingInstance, Term term)
-			{
-				InitBlock(enclosingInstance);
-				field = term.Field;
-				IndexReader reader = Enclosing_Instance.fieldToReader[field];
-				if (reader != null)
-					termEnum = reader.Terms(term);
-			}
-			
-			public override bool Next()
-			{
-				if (termEnum == null)
-					return false;
-				
-				// another term in this field?
-				if (termEnum.Next() && (System.Object) termEnum.Term.Field == (System.Object) field)
-					return true; // yes, keep going
-				
-				termEnum.Close(); // close old termEnum
-				
-				// find the next field with terms, if any
-				if (fieldIterator == null)
-				{
+            }
+            return fieldSet;
+        }
+        
+        private class ParallelTermEnum : TermEnum
+        {
+            private void  InitBlock(ParallelReader enclosingInstance)
+            {
+                this.enclosingInstance = enclosingInstance;
+            }
+            private ParallelReader enclosingInstance;
+            public ParallelReader Enclosing_Instance
+            {
+                get
+                {
+                    return enclosingInstance;
+                }
+                
+            }
+            private System.String field;
+            private IEnumerator<string> fieldIterator;
+            private TermEnum termEnum;
+
+            private bool isDisposed;
+            
+            public ParallelTermEnum(ParallelReader enclosingInstance)
+            {
+                InitBlock(enclosingInstance);
+                try
+                {
+                    field = Enclosing_Instance.fieldToReader.Keys.First();
+                }
+                catch (ArgumentOutOfRangeException)
+                {
+                    // No fields, so keep field == null, termEnum == null
+                    return;
+                }
+                if (field != null)
+                    termEnum = Enclosing_Instance.fieldToReader[field].Terms();
+            }
+            
+            public ParallelTermEnum(ParallelReader enclosingInstance, Term term)
+            {
+                InitBlock(enclosingInstance);
+                field = term.Field;
+                IndexReader reader = Enclosing_Instance.fieldToReader[field];
+                if (reader != null)
+                    termEnum = reader.Terms(term);
+            }
+            
+            public override bool Next()
+            {
+                if (termEnum == null)
+                    return false;
+                
+                // another term in this field?
+                if (termEnum.Next() && (System.Object) termEnum.Term.Field == (System.Object) field)
+                    return true; // yes, keep going
+                
+                termEnum.Close(); // close old termEnum
+                
+                // find the next field with terms, if any
+                if (fieldIterator == null)
+                {
                     var newList = new List<string>();  
                     if (Enclosing_Instance.fieldToReader != null && Enclosing_Instance.fieldToReader.Count > 0)
                     {
@@ -609,39 +609,39 @@ namespace Lucene.Net.Index
                     }
 
                     fieldIterator = newList.Skip(1).GetEnumerator(); // Skip field to get next one
-				}
-				while (fieldIterator.MoveNext())
-				{
-					field = fieldIterator.Current;
-					termEnum = Enclosing_Instance.fieldToReader[field].Terms(new Term(field));
-					Term term = termEnum.Term;
-					if (term != null && (System.Object) term.Field == (System.Object) field)
-						return true;
-					else
-						termEnum.Close();
-				}
-				
-				return false; // no more fields
-			}
-
-		    public override Term Term
-		    {
-		        get
-		        {
-		            if (termEnum == null)
-		                return null;
-
-		            return termEnum.Term;
-		        }
-		    }
-
-		    public override int DocFreq()
-			{
-				if (termEnum == null)
-					return 0;
-				
-				return termEnum.DocFreq();
-			}
+                }
+                while (fieldIterator.MoveNext())
+                {
+                    field = fieldIterator.Current;
+                    termEnum = Enclosing_Instance.fieldToReader[field].Terms(new Term(field));
+                    Term term = termEnum.Term;
+                    if (term != null && (System.Object) term.Field == (System.Object) field)
+                        return true;
+                    else
+                        termEnum.Close();
+                }
+                
+                return false; // no more fields
+            }
+
+            public override Term Term
+            {
+                get
+                {
+                    if (termEnum == null)
+                        return null;
+
+                    return termEnum.Term;
+                }
+            }
+
+            public override int DocFreq()
+            {
+                if (termEnum == null)
+                    return 0;
+                
+                return termEnum.DocFreq();
+            }
 
             protected override void Dispose(bool disposing)
             {
@@ -655,87 +655,87 @@ namespace Lucene.Net.Index
 
                 isDisposed = true;
             }
-		}
-		
-		// wrap a TermDocs in order to support seek(Term)
-		private class ParallelTermDocs : TermDocs
-		{
-			private void  InitBlock(ParallelReader enclosingInstance)
-			{
-				this.enclosingInstance = enclosingInstance;
-			}
-			private ParallelReader enclosingInstance;
-			public ParallelReader Enclosing_Instance
-			{
-				get
-				{
-					return enclosingInstance;
-				}
-				
-			}
-			protected internal TermDocs termDocs;
-
-		    private bool isDisposed;
-			
-			public ParallelTermDocs(ParallelReader enclosingInstance)
-			{
-				InitBlock(enclosingInstance);
-			}
-			public ParallelTermDocs(ParallelReader enclosingInstance, Term term)
-			{
-				InitBlock(enclosingInstance);
+        }
+        
+        // wrap a TermDocs in order to support seek(Term)
+        private class ParallelTermDocs : TermDocs
+        {
+            private void  InitBlock(ParallelReader enclosingInstance)
+            {
+                this.enclosingInstance = enclosingInstance;
+            }
+            private ParallelReader enclosingInstance;
+            public ParallelReader Enclosing_Instance
+            {
+                get
+                {
+                    return enclosingInstance;
+                }
+                
+            }
+            protected internal TermDocs termDocs;
+
+            private bool isDisposed;
+            
+            public ParallelTermDocs(ParallelReader enclosingInstance)
+            {
+                InitBlock(enclosingInstance);
+            }
+            public ParallelTermDocs(ParallelReader enclosingInstance, Term term)
+            {
+                InitBlock(enclosingInstance);
                 if(term == null)
                     termDocs = (Enclosing_Instance.readers.Count == 0)
                                    ? null
                                    : Enclosing_Instance.readers[0].TermDocs(null);
                 else
                     Seek(term);
-			}
-
-		    public virtual int Doc
-		    {
-		        get { return termDocs.Doc; }
-		    }
-
-		    public virtual int Freq
-		    {
-		        get { return termDocs.Freq; }
-		    }
-
-		    public virtual void  Seek(Term term)
-			{
-				IndexReader reader = Enclosing_Instance.fieldToReader[term.Field];
-				termDocs = reader != null?reader.TermDocs(term):null;
-			}
-			
-			public virtual void  Seek(TermEnum termEnum)
-			{
-				Seek(termEnum.Term);
-			}
-			
-			public virtual bool Next()
-			{
-				if (termDocs == null)
-					return false;
-				
-				return termDocs.Next();
-			}
-			
-			public virtual int Read(int[] docs, int[] freqs)
-			{
-				if (termDocs == null)
-					return 0;
-				
-				return termDocs.Read(docs, freqs);
-			}
-			
-			public virtual bool SkipTo(int target)
-			{
-				if (termDocs == null)
-					return false;
-				
-				return termDocs.SkipTo(target);
-			}
+            }
+
+            public virtual int Doc
+            {
+                get { return termDocs.Doc; }
+            }
+
+            public virtual int Freq
+            {
+                get { return termDocs.Freq; }
+            }
+
+            public virtual void  Seek(Term term)
+            {
+                IndexReader reader = Enclosing_Instance.fieldToReader[term.Field];
+                termDocs = reader != null?reader.TermDocs(term):null;
+            }
+            
+            public virtual void  Seek(TermEnum termEnum)
+            {
+                Seek(termEnum.Term);
+            }
+            
+            public virtual bool Next()
+            {
+                if (termDocs == null)
+                    return false;
+                
+                return termDocs.Next();
+            }
+            
+            public virtual int Read(int[] docs, int[] freqs)
+            {
+                if (termDocs == null)
+                    return 0;
+                
+                return termDocs.Read(docs, freqs);
+            }
+            
+            public virtual bool SkipTo(int target)
+            {
+                if (termDocs == null)
+                    return false;
+                
+                return termDocs.SkipTo(target);
+            }
 
             [Obsolete("Use Dispose() instead")]
             public virtual void Close()
@@ -743,10 +743,10 @@ namespace Lucene.Net.Index
                 Dispose();
             }
 
-		    public void Dispose()
-		    {
-		        Dispose(true);
-		    }
+            public void Dispose()
+            {
+                Dispose(true);
+            }
 
             protected virtual void Dispose(bool disposing)
             {
@@ -760,63 +760,63 @@ namespace Lucene.Net.Index
 
                 isDisposed = true;
             }
-		}
-		
-		private class ParallelTermPositions:ParallelTermDocs, TermPositions
-		{
-			private void  InitBlock(ParallelReader enclosingInstance)
-			{
-				this.enclosingInstance = enclosingInstance;
-			}
-			private ParallelReader enclosingInstance;
-			public new ParallelReader Enclosing_Instance
-			{
-				get
-				{
-					return enclosingInstance;
-				}
-				
-			}
-			
-			public ParallelTermPositions(ParallelReader enclosingInstance):base(enclosingInstance)
-			{
-				InitBlock(enclosingInstance);
-			}
-			public ParallelTermPositions(ParallelReader enclosingInstance, Term term):base(enclosingInstance)
-			{
-				InitBlock(enclosingInstance);
-				Seek(term);
-			}
-			
-			public override void  Seek(Term term)
-			{
-				IndexReader reader = Enclosing_Instance.fieldToReader[term.Field];
-				termDocs = reader != null?reader.TermPositions(term):null;
-			}
-			
-			public virtual int NextPosition()
-			{
-				// It is an error to call this if there is no next position, e.g. if termDocs==null
-				return ((TermPositions) termDocs).NextPosition();
-			}
-
-		    public virtual int PayloadLength
-		    {
-		        get { return ((TermPositions) termDocs).PayloadLength; }
-		    }
-
-		    public virtual byte[] GetPayload(byte[] data, int offset)
-			{
-				return ((TermPositions) termDocs).GetPayload(data, offset);
-			}
-			
-			
-			// TODO: Remove warning after API has been finalized
-
-		    public virtual bool IsPayloadAvailable
-		    {
-		        get { return ((TermPositions) termDocs).IsPayloadAvailable; }
-		    }
-		}
-	}
+        }
+        
+        private class ParallelTermPositions:ParallelTermDocs, TermPositions
+        {
+            private void  InitBlock(ParallelReader enclosingInstance)
+            {
+                this.enclosingInstance = enclosingInstance;
+            }
+            private ParallelReader enclosingInstance;
+            public new ParallelReader Enclosing_Instance
+            {
+                get
+                {
+                    return enclosingInstance;
+                }
+                
+            }
+            
+            public ParallelTermPositions(ParallelReader enclosingInstance):base(enclosingInstance)
+            {
+                InitBlock(enclosingInstance);
+            }
+            public ParallelTermPositions(ParallelReader enclosingInstance, Term term):base(enclosingInstance)
+            {
+                InitBlock(enclosingInstance);
+                Seek(term);
+            }
+            
+            public override void  Seek(Term term)
+            {
+                IndexReader reader = Enclosing_Instance.fieldToReader[term.Field];
+                termDocs = reader != null?reader.TermPositions(term):null;
+            }
+            
+            public virtual int NextPosition()
+            {
+                // It is an error to call this if there is no next position, e.g. if termDocs==null
+                return ((TermPositions) termDocs).NextPosition();
+            }
+
+            public virtual int PayloadLength
+            {
+                get { return ((TermPositions) termDocs).PayloadLength; }
+            }
+
+            public virtual byte[] GetPayload(byte[] data, int offset)
+            {
+                return ((TermPositions) termDocs).GetPayload(data, offset);
+            }
+            
+            
+            // TODO: Remove warning after API has been finalized
+
+            public virtual bool IsPayloadAvailable
+            {
+                get { return ((TermPositions) termDocs).IsPayloadAvailable; }
+            }
+        }
+    }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/62f018ab/src/core/Index/Payload.cs
----------------------------------------------------------------------
diff --git a/src/core/Index/Payload.cs b/src/core/Index/Payload.cs
index a6f391a..9c00c52 100644
--- a/src/core/Index/Payload.cs
+++ b/src/core/Index/Payload.cs
@@ -22,196 +22,196 @@ using ArrayUtil = Lucene.Net.Util.ArrayUtil;
 
 namespace Lucene.Net.Index
 {
-	
-	/// <summary>  A Payload is metadata that can be stored together with each occurrence 
-	/// of a term. This metadata is stored inline in the posting list of the
-	/// specific term.  
-	/// <p/>
-	/// To store payloads in the index a <see cref="TokenStream"/> has to be used that
-	/// produces payload data.
-	/// <p/>
+    
+    /// <summary>  A Payload is metadata that can be stored together with each occurrence 
+    /// of a term. This metadata is stored inline in the posting list of the
+    /// specific term.  
+    /// <p/>
+    /// To store payloads in the index a <see cref="TokenStream"/> has to be used that
+    /// produces payload data.
+    /// <p/>
     /// Use <see cref="TermPositions.PayloadLength"/> and <see cref="TermPositions.GetPayload(byte[], int)"/>
-	/// to retrieve the payloads from the index.<br/>
-	/// 
-	/// </summary>
-	[Serializable]
-	public class Payload : System.ICloneable
-	{
-		/// <summary>the byte array containing the payload data </summary>
-		protected internal byte[] data;
-		
-		/// <summary>the offset within the byte array </summary>
-		protected internal int internalOffset;
-		
-		/// <summary>the length of the payload data </summary>
-		protected internal int internalLength;
-		
-		/// <summary>Creates an empty payload and does not allocate a byte array. </summary>
-		public Payload()
-		{
-			// nothing to do
-		}
-		
-		/// <summary> Creates a new payload with the the given array as data.
-		/// A reference to the passed-in array is held, i. e. no 
-		/// copy is made.
-		/// 
-		/// </summary>
-		/// <param name="data">the data of this payload
-		/// </param>
-		public Payload(byte[] data):this(data, 0, data.Length)
-		{
-		}
-		
-		/// <summary> Creates a new payload with the the given array as data. 
-		/// A reference to the passed-in array is held, i. e. no 
-		/// copy is made.
-		/// 
-		/// </summary>
-		/// <param name="data">the data of this payload
-		/// </param>
-		/// <param name="offset">the offset in the data byte array
-		/// </param>
-		/// <param name="length">the length of the data
-		/// </param>
-		public Payload(byte[] data, int offset, int length)
-		{
-			if (offset < 0 || offset + length > data.Length)
-			{
-				throw new System.ArgumentException();
-			}
-			this.data = data;
-			this.internalOffset = offset;
-			this.internalLength = length;
-		}
+    /// to retrieve the payloads from the index.<br/>
+    /// 
+    /// </summary>
+    [Serializable]
+    public class Payload : System.ICloneable
+    {
+        /// <summary>the byte array containing the payload data </summary>
+        protected internal byte[] data;
+        
+        /// <summary>the offset within the byte array </summary>
+        protected internal int internalOffset;
+        
+        /// <summary>the length of the payload data </summary>
+        protected internal int internalLength;
+        
+        /// <summary>Creates an empty payload and does not allocate a byte array. </summary>
+        public Payload()
+        {
+            // nothing to do
+        }
+        
+        /// <summary> Creates a new payload with the the given array as data.
+        /// A reference to the passed-in array is held, i. e. no 
+        /// copy is made.
+        /// 
+        /// </summary>
+        /// <param name="data">the data of this payload
+        /// </param>
+        public Payload(byte[] data):this(data, 0, data.Length)
+        {
+        }
+        
+        /// <summary> Creates a new payload with the the given array as data. 
+        /// A reference to the passed-in array is held, i. e. no 
+        /// copy is made.
+        /// 
+        /// </summary>
+        /// <param name="data">the data of this payload
+        /// </param>
+        /// <param name="offset">the offset in the data byte array
+        /// </param>
+        /// <param name="length">the length of the data
+        /// </param>
+        public Payload(byte[] data, int offset, int length)
+        {
+            if (offset < 0 || offset + length > data.Length)
+            {
+                throw new System.ArgumentException();
+            }
+            this.data = data;
+            this.internalOffset = offset;
+            this.internalLength = length;
+        }
 
-	    /// <summary> Sets this payloads data. 
-		/// A reference to the passed-in array is held, i. e. no 
-		/// copy is made.
-		/// </summary>
-		public virtual void  SetData(byte[] value, int offset, int length)
-		{
-			this.data = value;
-			this.internalOffset = offset;
-			this.internalLength = length;
-		}
+        /// <summary> Sets this payloads data. 
+        /// A reference to the passed-in array is held, i. e. no 
+        /// copy is made.
+        /// </summary>
+        public virtual void  SetData(byte[] value, int offset, int length)
+        {
+            this.data = value;
+            this.internalOffset = offset;
+            this.internalLength = length;
+        }
 
-	    /// <summary> Gets or sets a reference to the underlying byte array
-	    /// that holds this payloads data.  Data is not copied.
-	    /// </summary>
-	    public virtual void SetData(byte[] value)
-	    {
-	        SetData(value, 0, value.Length);
-	    }
+        /// <summary> Gets or sets a reference to the underlying byte array
+        /// that holds this payloads data.  Data is not copied.
+        /// </summary>
+        public virtual void SetData(byte[] value)
+        {
+            SetData(value, 0, value.Length);
+        }
 
-	    /// <summary> Gets or sets a reference to the underlying byte array
-	    /// that holds this payloads data.  Data is not copied.
-	    /// </summary>
-	    public virtual byte[] GetData()
-	    {
-	        return this.data;
-	    }
+        /// <summary> Gets or sets a reference to the underlying byte array
+        /// that holds this payloads data.  Data is not copied.
+        /// </summary>
+        public virtual byte[] GetData()
+        {
+            return this.data;
+        }
 
-	    /// <summary> Returns the offset in the underlying byte array </summary>
-	    public virtual int Offset
-	    {
-	        get { return this.internalOffset; }
-	    }
+        /// <summary> Returns the offset in the underlying byte array </summary>
+        public virtual int Offset
+        {
+            get { return this.internalOffset; }
+        }
 
-	    /// <summary> Returns the length of the payload data. </summary>
-	    public virtual int Length
-	    {
-	        get { return this.internalLength; }
-	    }
+        /// <summary> Returns the length of the payload data. </summary>
+        public virtual int Length
+        {
+            get { return this.internalLength; }
+        }
 
-	    /// <summary> Returns the byte at the given index.</summary>
-		public virtual byte ByteAt(int index)
-		{
-			if (0 <= index && index < this.internalLength)
-			{
-				return this.data[this.internalOffset + index];
-			}
-			throw new System. IndexOutOfRangeException("Index of bound " + index);
-		}
-		
-		/// <summary> Allocates a new byte array, copies the payload data into it and returns it. </summary>
-		public virtual byte[] ToByteArray()
-		{
-			byte[] retArray = new byte[this.internalLength];
-			Array.Copy(this.data, this.internalOffset, retArray, 0, this.internalLength);
-			return retArray;
-		}
-		
-		/// <summary> Copies the payload data to a byte array.
-		/// 
-		/// </summary>
-		/// <param name="target">the target byte array
-		/// </param>
-		/// <param name="targetOffset">the offset in the target byte array
-		/// </param>
-		public virtual void  CopyTo(byte[] target, int targetOffset)
-		{
-			if (this.internalLength > target.Length + targetOffset)
-			{
-				throw new System.IndexOutOfRangeException();
-			}
-			Array.Copy(this.data, this.internalOffset, target, targetOffset, this.internalLength);
-		}
-		
-		/// <summary> Clones this payload by creating a copy of the underlying
-		/// byte array.
-		/// </summary>
-		public virtual System.Object Clone()
-		{
-			try
-			{
-				// Start with a shallow copy of data
-				Payload clone = (Payload) base.MemberwiseClone();
-				// Only copy the part of data that belongs to this Payload
-				if (internalOffset == 0 && internalLength == data.Length)
-				{
-					// It is the whole thing, so just clone it.
-					clone.data = new byte[data.Length];
-					data.CopyTo(clone.data, 0);
-				}
-				else
-				{
-					// Just get the part
-					clone.data = this.ToByteArray();
-					clone.internalOffset = 0;
-				}
-				return clone;
-			}
-			catch (System.Exception e)
-			{
-				throw new System.SystemException(e.Message, e); // shouldn't happen
-			}
-		}
-		
-		public  override bool Equals(System.Object obj)
-		{
-			if (obj == this)
-				return true;
-			if (obj is Payload)
-			{
-				Payload other = (Payload) obj;
-				if (internalLength == other.internalLength)
-				{
-					for (int i = 0; i < internalLength; i++)
-						if (data[internalOffset + i] != other.data[other.internalOffset + i])
-							return false;
-					return true;
-				}
-				else
-					return false;
-			}
-			else
-				return false;
-		}
-		
-		public override int GetHashCode()
-		{
-			return ArrayUtil.HashCode(data, internalOffset, internalOffset + internalLength);
-		}
-	}
+        /// <summary> Returns the byte at the given index.</summary>
+        public virtual byte ByteAt(int index)
+        {
+            if (0 <= index && index < this.internalLength)
+            {
+                return this.data[this.internalOffset + index];
+            }
+            throw new System. IndexOutOfRangeException("Index of bound " + index);
+        }
+        
+        /// <summary> Allocates a new byte array, copies the payload data into it and returns it. </summary>
+        public virtual byte[] ToByteArray()
+        {
+            byte[] retArray = new byte[this.internalLength];
+            Array.Copy(this.data, this.internalOffset, retArray, 0, this.internalLength);
+            return retArray;
+        }
+        
+        /// <summary> Copies the payload data to a byte array.
+        /// 
+        /// </summary>
+        /// <param name="target">the target byte array
+        /// </param>
+        /// <param name="targetOffset">the offset in the target byte array
+        /// </param>
+        public virtual void  CopyTo(byte[] target, int targetOffset)
+        {
+            if (this.internalLength > target.Length + targetOffset)
+            {
+                throw new System.IndexOutOfRangeException();
+            }
+            Array.Copy(this.data, this.internalOffset, target, targetOffset, this.internalLength);
+        }
+        
+        /// <summary> Clones this payload by creating a copy of the underlying
+        /// byte array.
+        /// </summary>
+        public virtual System.Object Clone()
+        {
+            try
+            {
+                // Start with a shallow copy of data
+                Payload clone = (Payload) base.MemberwiseClone();
+                // Only copy the part of data that belongs to this Payload
+                if (internalOffset == 0 && internalLength == data.Length)
+                {
+                    // It is the whole thing, so just clone it.
+                    clone.data = new byte[data.Length];
+                    data.CopyTo(clone.data, 0);
+                }
+                else
+                {
+                    // Just get the part
+                    clone.data = this.ToByteArray();
+                    clone.internalOffset = 0;
+                }
+                return clone;
+            }
+            catch (System.Exception e)
+            {
+                throw new System.SystemException(e.Message, e); // shouldn't happen
+            }
+        }
+        
+        public  override bool Equals(System.Object obj)
+        {
+            if (obj == this)
+                return true;
+            if (obj is Payload)
+            {
+                Payload other = (Payload) obj;
+                if (internalLength == other.internalLength)
+                {
+                    for (int i = 0; i < internalLength; i++)
+                        if (data[internalOffset + i] != other.data[other.internalOffset + i])
+                            return false;
+                    return true;
+                }
+                else
+                    return false;
+            }
+            else
+                return false;
+        }
+        
+        public override int GetHashCode()
+        {
+            return ArrayUtil.HashCode(data, internalOffset, internalOffset + internalLength);
+        }
+    }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/62f018ab/src/core/Index/PositionBasedTermVectorMapper.cs
----------------------------------------------------------------------
diff --git a/src/core/Index/PositionBasedTermVectorMapper.cs b/src/core/Index/PositionBasedTermVectorMapper.cs
index af548a7..3504262 100644
--- a/src/core/Index/PositionBasedTermVectorMapper.cs
+++ b/src/core/Index/PositionBasedTermVectorMapper.cs
@@ -21,156 +21,156 @@ using Lucene.Net.Support;
 
 namespace Lucene.Net.Index
 {
-	
-	/// <summary> For each Field, store position by position information.  It ignores frequency information
-	/// <p/>
-	/// This is not thread-safe.
-	/// </summary>
-	public class PositionBasedTermVectorMapper:TermVectorMapper
-	{
-		private IDictionary<string, IDictionary<int, TVPositionInfo>> fieldToTerms;
-		
-		private System.String currentField;
-		/// <summary> A Map of Integer and TVPositionInfo</summary>
+    
+    /// <summary> For each Field, store position by position information.  It ignores frequency information
+    /// <p/>
+    /// This is not thread-safe.
+    /// </summary>
+    public class PositionBasedTermVectorMapper:TermVectorMapper
+    {
+        private IDictionary<string, IDictionary<int, TVPositionInfo>> fieldToTerms;
+        
+        private System.String currentField;
+        /// <summary> A Map of Integer and TVPositionInfo</summary>
         private IDictionary<int, TVPositionInfo> currentPositions;
-		private bool storeOffsets;
-		
-		public PositionBasedTermVectorMapper():base(false, false)
-		{
-		}
-		
-		public PositionBasedTermVectorMapper(bool ignoringOffsets):base(false, ignoringOffsets)
-		{
-		}
+        private bool storeOffsets;
+        
+        public PositionBasedTermVectorMapper():base(false, false)
+        {
+        }
+        
+        public PositionBasedTermVectorMapper(bool ignoringOffsets):base(false, ignoringOffsets)
+        {
+        }
 
-	    /// <summary> Never ignores positions.  This mapper doesn't make much sense unless there are positions</summary>
-	    /// <value> false </value>
-	    public override bool IsIgnoringPositions
-	    {
-	        get { return false; }
-	    }
+        /// <summary> Never ignores positions.  This mapper doesn't make much sense unless there are positions</summary>
+        /// <value> false </value>
+        public override bool IsIgnoringPositions
+        {
+            get { return false; }
+        }
 
-	    /// <summary> Callback for the TermVectorReader. </summary>
-		/// <param name="term">
-		/// </param>
-		/// <param name="frequency">
-		/// </param>
-		/// <param name="offsets">
-		/// </param>
-		/// <param name="positions">
-		/// </param>
-		public override void  Map(System.String term, int frequency, TermVectorOffsetInfo[] offsets, int[] positions)
-		{
-			for (int i = 0; i < positions.Length; i++)
-			{
-				System.Int32 posVal =  positions[i];
-				TVPositionInfo pos = currentPositions[posVal];
-				if (pos == null)
-				{
-					pos = new TVPositionInfo(positions[i], storeOffsets);
-					currentPositions[posVal] = pos;
-				}
-				pos.addTerm(term, offsets != null ? offsets[i] : TermVectorOffsetInfo.Null);
-			}
-		}
-		
-		/// <summary> Callback mechanism used by the TermVectorReader</summary>
-		/// <param name="field"> The field being read
-		/// </param>
-		/// <param name="numTerms">The number of terms in the vector
-		/// </param>
-		/// <param name="storeOffsets">Whether offsets are available
-		/// </param>
-		/// <param name="storePositions">Whether positions are available
-		/// </param>
-		public override void  SetExpectations(System.String field, int numTerms, bool storeOffsets, bool storePositions)
-		{
-			if (storePositions == false)
-			{
-				throw new System.SystemException("You must store positions in order to use this Mapper");
-			}
-			if (storeOffsets == true)
-			{
-				//ignoring offsets
-			}
-			fieldToTerms = new HashMap<string, IDictionary<int, TVPositionInfo>>(numTerms);
-			this.storeOffsets = storeOffsets;
-			currentField = field;
-			currentPositions = new HashMap<int, TVPositionInfo>();
-			fieldToTerms[currentField] = currentPositions;
-		}
+        /// <summary> Callback for the TermVectorReader. </summary>
+        /// <param name="term">
+        /// </param>
+        /// <param name="frequency">
+        /// </param>
+        /// <param name="offsets">
+        /// </param>
+        /// <param name="positions">
+        /// </param>
+        public override void  Map(System.String term, int frequency, TermVectorOffsetInfo[] offsets, int[] positions)
+        {
+            for (int i = 0; i < positions.Length; i++)
+            {
+                System.Int32 posVal =  positions[i];
+                TVPositionInfo pos = currentPositions[posVal];
+                if (pos == null)
+                {
+                    pos = new TVPositionInfo(positions[i], storeOffsets);
+                    currentPositions[posVal] = pos;
+                }
+                pos.addTerm(term, offsets != null ? offsets[i] : TermVectorOffsetInfo.Null);
+            }
+        }
+        
+        /// <summary> Callback mechanism used by the TermVectorReader</summary>
+        /// <param name="field"> The field being read
+        /// </param>
+        /// <param name="numTerms">The number of terms in the vector
+        /// </param>
+        /// <param name="storeOffsets">Whether offsets are available
+        /// </param>
+        /// <param name="storePositions">Whether positions are available
+        /// </param>
+        public override void  SetExpectations(System.String field, int numTerms, bool storeOffsets, bool storePositions)
+        {
+            if (storePositions == false)
+            {
+                throw new System.SystemException("You must store positions in order to use this Mapper");
+            }
+            if (storeOffsets == true)
+            {
+                //ignoring offsets
+            }
+            fieldToTerms = new HashMap<string, IDictionary<int, TVPositionInfo>>(numTerms);
+            this.storeOffsets = storeOffsets;
+            currentField = field;
+            currentPositions = new HashMap<int, TVPositionInfo>();
+            fieldToTerms[currentField] = currentPositions;
+        }
 
-	    /// <summary> Get the mapping between fields and terms, sorted by the comparator
-	    /// 
-	    /// </summary>
-	    /// <value> A map between field names and a Map. The sub-Map key is the position as the integer, the value is &lt;see cref=&quot;Lucene.Net.Index.PositionBasedTermVectorMapper.TVPositionInfo&quot; /&gt;. </value>
-	    public virtual IDictionary<string, IDictionary<int, TVPositionInfo>> FieldToTerms
-	    {
-	        get { return fieldToTerms; }
-	    }
+        /// <summary> Get the mapping between fields and terms, sorted by the comparator
+        /// 
+        /// </summary>
+        /// <value> A map between field names and a Map. The sub-Map key is the position as the integer, the value is &lt;see cref=&quot;Lucene.Net.Index.PositionBasedTermVectorMapper.TVPositionInfo&quot; /&gt;. </value>
+        public virtual IDictionary<string, IDictionary<int, TVPositionInfo>> FieldToTerms
+        {
+            get { return fieldToTerms; }
+        }
 
-	    /// <summary> Container for a term at a position</summary>
-		public class TVPositionInfo
-		{
-			/// <summary> </summary>
-			/// <returns> The position of the term
-			/// </returns>
-			virtual public int Position
-			{
-				get
-				{
-					return position;
-				}
-				
-			}
-			/// <summary> Note, there may be multiple terms at the same position</summary>
-			/// <returns> A List of Strings
-			/// </returns>
-			virtual public IList<String> Terms
-			{
-				get
-				{
-					return terms;
-				}
-				
-			}
-			/// <summary> Parallel list (to <see cref="Terms" />) of TermVectorOffsetInfo objects.  
-			/// There may be multiple entries since there may be multiple terms at a position</summary>
-			/// <returns> A List of TermVectorOffsetInfo objects, if offsets are store.
-			/// </returns>
-			virtual public IList<TermVectorOffsetInfo> Offsets
-			{
-				get
-				{
-					return offsets;
-				}
-				
-			}
-			private int position;
-			//a list of Strings
-			private IList<string> terms;
-			//A list of TermVectorOffsetInfo
-			private IList<TermVectorOffsetInfo> offsets;
-			
-			
-			public TVPositionInfo(int position, bool storeOffsets)
-			{
-				this.position = position;
-				terms = new List<string>();
-				if (storeOffsets)
-				{
-					offsets = new List<TermVectorOffsetInfo>();
-				}
-			}
-			
-			internal virtual void  addTerm(System.String term, TermVectorOffsetInfo info)
-			{
-				terms.Add(term);
-				if (offsets != null)
-				{
-					offsets.Add(info);
-				}
-			}
-		}
-	}
+        /// <summary> Container for a term at a position</summary>
+        public class TVPositionInfo
+        {
+            /// <summary> </summary>
+            /// <returns> The position of the term
+            /// </returns>
+            virtual public int Position
+            {
+                get
+                {
+                    return position;
+                }
+                
+            }
+            /// <summary> Note, there may be multiple terms at the same position</summary>
+            /// <returns> A List of Strings
+            /// </returns>
+            virtual public IList<String> Terms
+            {
+                get
+                {
+                    return terms;
+                }
+                
+            }
+            /// <summary> Parallel list (to <see cref="Terms" />) of TermVectorOffsetInfo objects.  
+            /// There may be multiple entries since there may be multiple terms at a position</summary>
+            /// <returns> A List of TermVectorOffsetInfo objects, if offsets are store.
+            /// </returns>
+            virtual public IList<TermVectorOffsetInfo> Offsets
+            {
+                get
+                {
+                    return offsets;
+                }
+                
+            }
+            private int position;
+            //a list of Strings
+            private IList<string> terms;
+            //A list of TermVectorOffsetInfo
+            private IList<TermVectorOffsetInfo> offsets;
+            
+            
+            public TVPositionInfo(int position, bool storeOffsets)
+            {
+                this.position = position;
+                terms = new List<string>();
+                if (storeOffsets)
+                {
+                    offsets = new List<TermVectorOffsetInfo>();
+                }
+            }
+            
+            internal virtual void  addTerm(System.String term, TermVectorOffsetInfo info)
+            {
+                terms.Add(term);
+                if (offsets != null)
+                {
+                    offsets.Add(info);
+                }
+            }
+        }
+    }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/62f018ab/src/core/Index/RawPostingList.cs
----------------------------------------------------------------------
diff --git a/src/core/Index/RawPostingList.cs b/src/core/Index/RawPostingList.cs
index bffc2de..c3646b2 100644
--- a/src/core/Index/RawPostingList.cs
+++ b/src/core/Index/RawPostingList.cs
@@ -19,28 +19,28 @@ using System;
 
 namespace Lucene.Net.Index
 {
-	
-	
-	/// <summary>This is the base class for an in-memory posting list,
-	/// keyed by a Token.  <see cref="TermsHash" /> maintains a hash
-	/// table holding one instance of this per unique Token.
-	/// Consumers of TermsHash (<see cref="TermsHashConsumer" />) must
-	/// subclass this class with its own concrete class.
-	/// FreqProxTermsWriter.PostingList is a private inner class used 
-	/// for the freq/prox postings, and 
-	/// TermVectorsTermsWriter.PostingList is a private inner class
-	/// used to hold TermVectors postings. 
-	/// </summary>
-	
-	abstract class RawPostingList
-	{
-		internal static readonly int BYTES_SIZE;
-		internal int textStart;
-		internal int intStart;
-		internal int byteStart;
-		static RawPostingList()
-		{
-			BYTES_SIZE = DocumentsWriter.OBJECT_HEADER_BYTES + 3 * DocumentsWriter.INT_NUM_BYTE;
-		}
-	}
+    
+    
+    /// <summary>This is the base class for an in-memory posting list,
+    /// keyed by a Token.  <see cref="TermsHash" /> maintains a hash
+    /// table holding one instance of this per unique Token.
+    /// Consumers of TermsHash (<see cref="TermsHashConsumer" />) must
+    /// subclass this class with its own concrete class.
+    /// FreqProxTermsWriter.PostingList is a private inner class used 
+    /// for the freq/prox postings, and 
+    /// TermVectorsTermsWriter.PostingList is a private inner class
+    /// used to hold TermVectors postings. 
+    /// </summary>
+    
+    abstract class RawPostingList
+    {
+        internal static readonly int BYTES_SIZE;
+        internal int textStart;
+        internal int intStart;
+        internal int byteStart;
+        static RawPostingList()
+        {
+            BYTES_SIZE = DocumentsWriter.OBJECT_HEADER_BYTES + 3 * DocumentsWriter.INT_NUM_BYTE;
+        }
+    }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/62f018ab/src/core/Index/ReadOnlyDirectoryReader.cs
----------------------------------------------------------------------
diff --git a/src/core/Index/ReadOnlyDirectoryReader.cs b/src/core/Index/ReadOnlyDirectoryReader.cs
index 8f0f3b7..be168d6 100644
--- a/src/core/Index/ReadOnlyDirectoryReader.cs
+++ b/src/core/Index/ReadOnlyDirectoryReader.cs
@@ -21,25 +21,25 @@ using Directory = Lucene.Net.Store.Directory;
 
 namespace Lucene.Net.Index
 {
-	
-	public class ReadOnlyDirectoryReader:DirectoryReader
-	{
-		internal ReadOnlyDirectoryReader(Directory directory, SegmentInfos sis, IndexDeletionPolicy deletionPolicy, int termInfosIndexDivisor):base(directory, sis, deletionPolicy, true, termInfosIndexDivisor)
-		{
-		}
+    
+    public class ReadOnlyDirectoryReader:DirectoryReader
+    {
+        internal ReadOnlyDirectoryReader(Directory directory, SegmentInfos sis, IndexDeletionPolicy deletionPolicy, int termInfosIndexDivisor):base(directory, sis, deletionPolicy, true, termInfosIndexDivisor)
+        {
+        }
 
         internal ReadOnlyDirectoryReader(Directory directory, SegmentInfos infos, SegmentReader[] oldReaders, int[] oldStarts, System.Collections.Generic.IDictionary<string, byte[]> oldNormsCache, bool doClone, int termInfosIndexDivisor)
             : base(directory, infos, oldReaders, oldStarts, oldNormsCache, true, doClone, termInfosIndexDivisor)
         {
         }
 
-	    internal ReadOnlyDirectoryReader(IndexWriter writer, SegmentInfos infos, int termInfosIndexDivisor):base(writer, infos, termInfosIndexDivisor)
-		{
-		}
-		
-		protected internal override void  AcquireWriteLock()
-		{
-			ReadOnlySegmentReader.NoWrite();
-		}
-	}
+        internal ReadOnlyDirectoryReader(IndexWriter writer, SegmentInfos infos, int termInfosIndexDivisor):base(writer, infos, termInfosIndexDivisor)
+        {
+        }
+        
+        protected internal override void  AcquireWriteLock()
+        {
+            ReadOnlySegmentReader.NoWrite();
+        }
+    }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/62f018ab/src/core/Index/ReadOnlySegmentReader.cs
----------------------------------------------------------------------
diff --git a/src/core/Index/ReadOnlySegmentReader.cs b/src/core/Index/ReadOnlySegmentReader.cs
index 3c7c916..bd204c8 100644
--- a/src/core/Index/ReadOnlySegmentReader.cs
+++ b/src/core/Index/ReadOnlySegmentReader.cs
@@ -19,24 +19,24 @@ using System;
 
 namespace Lucene.Net.Index
 {
-	
-	public class ReadOnlySegmentReader:SegmentReader
-	{
-		
-		internal static void  NoWrite()
-		{
-			throw new System.NotSupportedException("This IndexReader cannot make any changes to the index (it was opened with readOnly = true)");
-		}
-		
-		protected internal override void  AcquireWriteLock()
-		{
-			NoWrite();
-		}
-		
-		// Not synchronized
-		public override bool IsDeleted(int n)
-		{
-			return deletedDocs != null && deletedDocs.Get(n);
-		}
-	}
+    
+    public class ReadOnlySegmentReader:SegmentReader
+    {
+        
+        internal static void  NoWrite()
+        {
+            throw new System.NotSupportedException("This IndexReader cannot make any changes to the index (it was opened with readOnly = true)");
+        }
+        
+        protected internal override void  AcquireWriteLock()
+        {
+            NoWrite();
+        }
+        
+        // Not synchronized
+        public override bool IsDeleted(int n)
+        {
+            return deletedDocs != null && deletedDocs.Get(n);
+        }
+    }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/62f018ab/src/core/Index/ReusableStringReader.cs
----------------------------------------------------------------------
diff --git a/src/core/Index/ReusableStringReader.cs b/src/core/Index/ReusableStringReader.cs
index 54c1b7d..5a3c86e 100644
--- a/src/core/Index/ReusableStringReader.cs
+++ b/src/core/Index/ReusableStringReader.cs
@@ -20,11 +20,11 @@ using Lucene.Net.Support;
 
 namespace Lucene.Net.Index
 {
-	
-	/// <summary>Used by DocumentsWriter to implemented a StringReader
-	/// that can be reset to a new string; we use this when
-	/// tokenizing the string value from a Field. 
-	/// </summary>
+    
+    /// <summary>Used by DocumentsWriter to implemented a StringReader
+    /// that can be reset to a new string; we use this when
+    /// tokenizing the string value from a Field. 
+    /// </summary>
     sealed class ReusableStringReader : System.IO.TextReader
     {
         internal int upto;