You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucenenet.apache.org by cc...@apache.org on 2013/04/03 19:40:01 UTC

[18/51] [partial] Mass convert mixed tabs to spaces

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/62f018ab/src/core/Index/CompoundFileReader.cs
----------------------------------------------------------------------
diff --git a/src/core/Index/CompoundFileReader.cs b/src/core/Index/CompoundFileReader.cs
index 74f4fb4..3b7f4f3 100644
--- a/src/core/Index/CompoundFileReader.cs
+++ b/src/core/Index/CompoundFileReader.cs
@@ -25,101 +25,101 @@ using Lock = Lucene.Net.Store.Lock;
 
 namespace Lucene.Net.Index
 {
-	
-	
-	/// <summary> Class for accessing a compound stream.
-	/// This class implements a directory, but is limited to only read operations.
-	/// Directory methods that would normally modify data throw an exception.
-	/// </summary>
-	public class CompoundFileReader : Directory
-	{
-		
-		private readonly int readBufferSize;
-		
-		private sealed class FileEntry
-		{
-			internal long offset;
-			internal long length;
-		}
+    
+    
+    /// <summary> Class for accessing a compound stream.
+    /// This class implements a directory, but is limited to only read operations.
+    /// Directory methods that would normally modify data throw an exception.
+    /// </summary>
+    public class CompoundFileReader : Directory
+    {
+        
+        private readonly int readBufferSize;
+        
+        private sealed class FileEntry
+        {
+            internal long offset;
+            internal long length;
+        }
 
-	    private bool isDisposed;
-		
-		// Base info
-		private readonly Directory directory;
-		private readonly System.String fileName;
-		
-		private IndexInput stream;
-		private HashMap<string, FileEntry> entries = new HashMap<string, FileEntry>();
-		
-		
-		public CompoundFileReader(Directory dir, System.String name):this(dir, name, BufferedIndexInput.BUFFER_SIZE)
-		{
-		}
-		
-		public CompoundFileReader(Directory dir, System.String name, int readBufferSize)
-		{
-			directory = dir;
-			fileName = name;
-			this.readBufferSize = readBufferSize;
-			
-			bool success = false;
-			
-			try
-			{
-				stream = dir.OpenInput(name, readBufferSize);
-				
-				// read the directory and init files
-				int count = stream.ReadVInt();
-				FileEntry entry = null;
-				for (int i = 0; i < count; i++)
-				{
-					long offset = stream.ReadLong();
-					System.String id = stream.ReadString();
-					
-					if (entry != null)
-					{
-						// set length of the previous entry
-						entry.length = offset - entry.offset;
-					}
+        private bool isDisposed;
+        
+        // Base info
+        private readonly Directory directory;
+        private readonly System.String fileName;
+        
+        private IndexInput stream;
+        private HashMap<string, FileEntry> entries = new HashMap<string, FileEntry>();
+        
+        
+        public CompoundFileReader(Directory dir, System.String name):this(dir, name, BufferedIndexInput.BUFFER_SIZE)
+        {
+        }
+        
+        public CompoundFileReader(Directory dir, System.String name, int readBufferSize)
+        {
+            directory = dir;
+            fileName = name;
+            this.readBufferSize = readBufferSize;
+            
+            bool success = false;
+            
+            try
+            {
+                stream = dir.OpenInput(name, readBufferSize);
+                
+                // read the directory and init files
+                int count = stream.ReadVInt();
+                FileEntry entry = null;
+                for (int i = 0; i < count; i++)
+                {
+                    long offset = stream.ReadLong();
+                    System.String id = stream.ReadString();
+                    
+                    if (entry != null)
+                    {
+                        // set length of the previous entry
+                        entry.length = offset - entry.offset;
+                    }
 
-					entry = new FileEntry {offset = offset};
-					entries[id] = entry;
-				}
-				
-				// set the length of the final entry
-				if (entry != null)
-				{
-					entry.length = stream.Length() - entry.offset;
-				}
-				
-				success = true;
-			}
-			finally
-			{
-				if (!success && (stream != null))
-				{
-					try
-					{
-						stream.Close();
-					}
-					catch (System.IO.IOException)
-					{
-					}
-				}
-			}
-		}
+                    entry = new FileEntry {offset = offset};
+                    entries[id] = entry;
+                }
+                
+                // set the length of the final entry
+                if (entry != null)
+                {
+                    entry.length = stream.Length() - entry.offset;
+                }
+                
+                success = true;
+            }
+            finally
+            {
+                if (!success && (stream != null))
+                {
+                    try
+                    {
+                        stream.Close();
+                    }
+                    catch (System.IO.IOException)
+                    {
+                    }
+                }
+            }
+        }
 
-	    public virtual Directory Directory
-	    {
-	        get { return directory; }
-	    }
+        public virtual Directory Directory
+        {
+            get { return directory; }
+        }
 
-	    public virtual string Name
-	    {
-	        get { return fileName; }
-	    }
+        public virtual string Name
+        {
+            get { return fileName; }
+        }
 
-	    protected override void Dispose(bool disposing)
+        protected override void Dispose(bool disposing)
         {
             lock (this)
             {
@@ -141,152 +141,152 @@ namespace Lucene.Net.Index
                 isDisposed = true;
             }
         }
-		
-		public override IndexInput OpenInput(System.String id)
-		{
-			lock (this)
-			{
-				// Default to readBufferSize passed in when we were opened
-				return OpenInput(id, readBufferSize);
-			}
-		}
-		
-		public override IndexInput OpenInput(System.String id, int readBufferSize)
-		{
-			lock (this)
-			{
-				if (stream == null)
-					throw new System.IO.IOException("Stream closed");
-				
-				FileEntry entry = entries[id];
-				if (entry == null)
-					throw new System.IO.IOException("No sub-file with id " + id + " found");
-				
-				return new CSIndexInput(stream, entry.offset, entry.length, readBufferSize);
-			}
-		}
-		
-		/// <summary>Returns an array of strings, one for each file in the directory. </summary>
-		public override System.String[] ListAll()
-		{
-		    return entries.Keys.ToArray();
-		}
-		
-		/// <summary>Returns true iff a file with the given name exists. </summary>
-		public override bool FileExists(System.String name)
-		{
-			return entries.ContainsKey(name);
-		}
-		
-		/// <summary>Returns the time the compound file was last modified. </summary>
-		public override long FileModified(System.String name)
-		{
-			return directory.FileModified(fileName);
-		}
-		
-		/// <summary>Set the modified time of the compound file to now. </summary>
-		public override void  TouchFile(System.String name)
-		{
-			directory.TouchFile(fileName);
-		}
-		
-		/// <summary>Not implemented</summary>
-		/// <throws>  UnsupportedOperationException  </throws>
-		public override void  DeleteFile(System.String name)
-		{
-			throw new System.NotSupportedException();
-		}
-		
-		/// <summary>Not implemented</summary>
-		/// <throws>  UnsupportedOperationException  </throws>
-		public void RenameFile(System.String from, System.String to)
-		{
-			throw new System.NotSupportedException();
-		}
-		
-		/// <summary>Returns the length of a file in the directory.</summary>
-		/// <throws>  IOException if the file does not exist  </throws>
-		public override long FileLength(System.String name)
-		{
-			FileEntry e = entries[name];
-			if (e == null)
-				throw new System.IO.IOException("File " + name + " does not exist");
-			return e.length;
-		}
-		
-		/// <summary>Not implemented</summary>
-		/// <throws>  UnsupportedOperationException  </throws>
-		public override IndexOutput CreateOutput(System.String name)
-		{
-			throw new System.NotSupportedException();
-		}
-		
-		/// <summary>Not implemented</summary>
-		/// <throws>  UnsupportedOperationException  </throws>
-		public override Lock MakeLock(System.String name)
-		{
-			throw new System.NotSupportedException();
-		}
-		
-		/// <summary>Implementation of an IndexInput that reads from a portion of the
-		/// compound file. The visibility is left as "package" *only* because
-		/// this helps with testing since JUnit test cases in a different class
-		/// can then access package fields of this class.
-		/// </summary>
-		public /*internal*/ sealed class CSIndexInput : BufferedIndexInput
-		{
-			internal IndexInput base_Renamed;
-			internal long fileOffset;
-			internal long length;
+        
+        public override IndexInput OpenInput(System.String id)
+        {
+            lock (this)
+            {
+                // Default to readBufferSize passed in when we were opened
+                return OpenInput(id, readBufferSize);
+            }
+        }
+        
+        public override IndexInput OpenInput(System.String id, int readBufferSize)
+        {
+            lock (this)
+            {
+                if (stream == null)
+                    throw new System.IO.IOException("Stream closed");
+                
+                FileEntry entry = entries[id];
+                if (entry == null)
+                    throw new System.IO.IOException("No sub-file with id " + id + " found");
+                
+                return new CSIndexInput(stream, entry.offset, entry.length, readBufferSize);
+            }
+        }
+        
+        /// <summary>Returns an array of strings, one for each file in the directory. </summary>
+        public override System.String[] ListAll()
+        {
+            return entries.Keys.ToArray();
+        }
+        
+        /// <summary>Returns true iff a file with the given name exists. </summary>
+        public override bool FileExists(System.String name)
+        {
+            return entries.ContainsKey(name);
+        }
+        
+        /// <summary>Returns the time the compound file was last modified. </summary>
+        public override long FileModified(System.String name)
+        {
+            return directory.FileModified(fileName);
+        }
+        
+        /// <summary>Set the modified time of the compound file to now. </summary>
+        public override void  TouchFile(System.String name)
+        {
+            directory.TouchFile(fileName);
+        }
+        
+        /// <summary>Not implemented</summary>
+        /// <throws>  UnsupportedOperationException  </throws>
+        public override void  DeleteFile(System.String name)
+        {
+            throw new System.NotSupportedException();
+        }
+        
+        /// <summary>Not implemented</summary>
+        /// <throws>  UnsupportedOperationException  </throws>
+        public void RenameFile(System.String from, System.String to)
+        {
+            throw new System.NotSupportedException();
+        }
+        
+        /// <summary>Returns the length of a file in the directory.</summary>
+        /// <throws>  IOException if the file does not exist  </throws>
+        public override long FileLength(System.String name)
+        {
+            FileEntry e = entries[name];
+            if (e == null)
+                throw new System.IO.IOException("File " + name + " does not exist");
+            return e.length;
+        }
+        
+        /// <summary>Not implemented</summary>
+        /// <throws>  UnsupportedOperationException  </throws>
+        public override IndexOutput CreateOutput(System.String name)
+        {
+            throw new System.NotSupportedException();
+        }
+        
+        /// <summary>Not implemented</summary>
+        /// <throws>  UnsupportedOperationException  </throws>
+        public override Lock MakeLock(System.String name)
+        {
+            throw new System.NotSupportedException();
+        }
+        
+        /// <summary>Implementation of an IndexInput that reads from a portion of the
+        /// compound file. The visibility is left as "package" *only* because
+        /// this helps with testing since JUnit test cases in a different class
+        /// can then access package fields of this class.
+        /// </summary>
+        public /*internal*/ sealed class CSIndexInput : BufferedIndexInput
+        {
+            internal IndexInput base_Renamed;
+            internal long fileOffset;
+            internal long length;
 
-		    private bool isDisposed;
-			
-			internal CSIndexInput(IndexInput @base, long fileOffset, long length):this(@base, fileOffset, length, BufferedIndexInput.BUFFER_SIZE)
-			{
-			}
-			
-			internal CSIndexInput(IndexInput @base, long fileOffset, long length, int readBufferSize):base(readBufferSize)
-			{
-				this.base_Renamed = (IndexInput) @base.Clone();
-				this.fileOffset = fileOffset;
-				this.length = length;
-			}
-			
-			public override System.Object Clone()
-			{
-				var clone = (CSIndexInput) base.Clone();
-				clone.base_Renamed = (IndexInput) base_Renamed.Clone();
-				clone.fileOffset = fileOffset;
-				clone.length = length;
-				return clone;
-			}
-			
-			/// <summary>Expert: implements buffer refill.  Reads bytes from the current
-			/// position in the input.
-			/// </summary>
-			/// <param name="b">the array to read bytes into
-			/// </param>
-			/// <param name="offset">the offset in the array to start storing bytes
-			/// </param>
-			/// <param name="len">the number of bytes to read
-			/// </param>
-			public override void  ReadInternal(byte[] b, int offset, int len)
-			{
-				long start = FilePointer;
-				if (start + len > length)
-					throw new System.IO.IOException("read past EOF");
-				base_Renamed.Seek(fileOffset + start);
-				base_Renamed.ReadBytes(b, offset, len, false);
-			}
-			
-			/// <summary>Expert: implements seek.  Sets current position in this file, where
-			/// the next <see cref="ReadInternal(byte[],int,int)" /> will occur.
-			/// </summary>
-			/// <seealso cref="ReadInternal(byte[],int,int)">
-			/// </seealso>
-			public override void  SeekInternal(long pos)
-			{
-			}
+            private bool isDisposed;
+            
+            internal CSIndexInput(IndexInput @base, long fileOffset, long length):this(@base, fileOffset, length, BufferedIndexInput.BUFFER_SIZE)
+            {
+            }
+            
+            internal CSIndexInput(IndexInput @base, long fileOffset, long length, int readBufferSize):base(readBufferSize)
+            {
+                this.base_Renamed = (IndexInput) @base.Clone();
+                this.fileOffset = fileOffset;
+                this.length = length;
+            }
+            
+            public override System.Object Clone()
+            {
+                var clone = (CSIndexInput) base.Clone();
+                clone.base_Renamed = (IndexInput) base_Renamed.Clone();
+                clone.fileOffset = fileOffset;
+                clone.length = length;
+                return clone;
+            }
+            
+            /// <summary>Expert: implements buffer refill.  Reads bytes from the current
+            /// position in the input.
+            /// </summary>
+            /// <param name="b">the array to read bytes into
+            /// </param>
+            /// <param name="offset">the offset in the array to start storing bytes
+            /// </param>
+            /// <param name="len">the number of bytes to read
+            /// </param>
+            public override void  ReadInternal(byte[] b, int offset, int len)
+            {
+                long start = FilePointer;
+                if (start + len > length)
+                    throw new System.IO.IOException("read past EOF");
+                base_Renamed.Seek(fileOffset + start);
+                base_Renamed.ReadBytes(b, offset, len, false);
+            }
+            
+            /// <summary>Expert: implements seek.  Sets current position in this file, where
+            /// the next <see cref="ReadInternal(byte[],int,int)" /> will occur.
+            /// </summary>
+            /// <seealso cref="ReadInternal(byte[],int,int)">
+            /// </seealso>
+            public override void  SeekInternal(long pos)
+            {
+            }
 
             protected override void Dispose(bool disposing)
             {
@@ -302,16 +302,16 @@ namespace Lucene.Net.Index
                 
                 isDisposed = true;
             }
-			
-			public override long Length()
-			{
-				return length;
-			}
+            
+            public override long Length()
+            {
+                return length;
+            }
 
             public IndexInput base_Renamed_ForNUnit
             {
                 get { return base_Renamed; }
             }
-		}
-	}
+        }
+    }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/62f018ab/src/core/Index/CompoundFileWriter.cs
----------------------------------------------------------------------
diff --git a/src/core/Index/CompoundFileWriter.cs b/src/core/Index/CompoundFileWriter.cs
index e2905e1..ff25487 100644
--- a/src/core/Index/CompoundFileWriter.cs
+++ b/src/core/Index/CompoundFileWriter.cs
@@ -23,118 +23,118 @@ using IndexOutput = Lucene.Net.Store.IndexOutput;
 
 namespace Lucene.Net.Index
 {
-	
-	
-	/// <summary> Combines multiple files into a single compound file.
-	/// The file format:<br/>
-	/// <list type="bullet">
-	/// <item>VInt fileCount</item>
-	/// <item>{Directory}
-	/// fileCount entries with the following structure:</item>
-	/// <list type="bullet">
-	/// <item>long dataOffset</item>
-	/// <item>String fileName</item>
-	/// </list>
-	/// <item>{File Data}
-	/// fileCount entries with the raw data of the corresponding file</item>
-	/// </list>
-	/// 
-	/// The fileCount integer indicates how many files are contained in this compound
-	/// file. The {directory} that follows has that many entries. Each directory entry
-	/// contains a long pointer to the start of this file's data section, and a String
-	/// with that file's name.
-	/// </summary>
-	public sealed class CompoundFileWriter : IDisposable
-	{
-		
-		private sealed class FileEntry
-		{
-			/// <summary>source file </summary>
-			internal System.String file;
-			
-			/// <summary>temporary holder for the start of directory entry for this file </summary>
-			internal long directoryOffset;
-			
-			/// <summary>temporary holder for the start of this file's data section </summary>
-			internal long dataOffset;
-		}
-		
-		
-		private readonly Directory directory;
-		private readonly String fileName;
+    
+    
+    /// <summary> Combines multiple files into a single compound file.
+    /// The file format:<br/>
+    /// <list type="bullet">
+    /// <item>VInt fileCount</item>
+    /// <item>{Directory}
+    /// fileCount entries with the following structure:</item>
+    /// <list type="bullet">
+    /// <item>long dataOffset</item>
+    /// <item>String fileName</item>
+    /// </list>
+    /// <item>{File Data}
+    /// fileCount entries with the raw data of the corresponding file</item>
+    /// </list>
+    /// 
+    /// The fileCount integer indicates how many files are contained in this compound
+    /// file. The {directory} that follows has that many entries. Each directory entry
+    /// contains a long pointer to the start of this file's data section, and a String
+    /// with that file's name.
+    /// </summary>
+    public sealed class CompoundFileWriter : IDisposable
+    {
+        
+        private sealed class FileEntry
+        {
+            /// <summary>source file </summary>
+            internal System.String file;
+            
+            /// <summary>temporary holder for the start of directory entry for this file </summary>
+            internal long directoryOffset;
+            
+            /// <summary>temporary holder for the start of this file's data section </summary>
+            internal long dataOffset;
+        }
+        
+        
+        private readonly Directory directory;
+        private readonly String fileName;
         private readonly HashSet<string> ids;
-		private readonly LinkedList<FileEntry> entries;
-		private bool merged = false;
-		private readonly SegmentMerger.CheckAbort checkAbort;
-		
-		/// <summary>Create the compound stream in the specified file. The file name is the
-		/// entire name (no extensions are added).
-		/// </summary>
-		/// <throws>  NullPointerException if <c>dir</c> or <c>name</c> is null </throws>
-		public CompoundFileWriter(Directory dir, System.String name):this(dir, name, null)
-		{
-		}
-		
-		internal CompoundFileWriter(Directory dir, System.String name, SegmentMerger.CheckAbort checkAbort)
-		{
-			if (dir == null)
-				throw new ArgumentNullException("dir");
-			if (name == null)
-				throw new ArgumentNullException("name");
-			this.checkAbort = checkAbort;
-			directory = dir;
-			fileName = name;
+        private readonly LinkedList<FileEntry> entries;
+        private bool merged = false;
+        private readonly SegmentMerger.CheckAbort checkAbort;
+        
+        /// <summary>Create the compound stream in the specified file. The file name is the
+        /// entire name (no extensions are added).
+        /// </summary>
+        /// <throws>  NullPointerException if <c>dir</c> or <c>name</c> is null </throws>
+        public CompoundFileWriter(Directory dir, System.String name):this(dir, name, null)
+        {
+        }
+        
+        internal CompoundFileWriter(Directory dir, System.String name, SegmentMerger.CheckAbort checkAbort)
+        {
+            if (dir == null)
+                throw new ArgumentNullException("dir");
+            if (name == null)
+                throw new ArgumentNullException("name");
+            this.checkAbort = checkAbort;
+            directory = dir;
+            fileName = name;
             ids = new HashSet<string>();
-			entries = new LinkedList<FileEntry>();
-		}
+            entries = new LinkedList<FileEntry>();
+        }
 
-	    /// <summary>Returns the directory of the compound file. </summary>
-	    public Directory Directory
-	    {
-	        get { return directory; }
-	    }
+        /// <summary>Returns the directory of the compound file. </summary>
+        public Directory Directory
+        {
+            get { return directory; }
+        }
 
-	    /// <summary>Returns the name of the compound file. </summary>
-	    public string Name
-	    {
-	        get { return fileName; }
-	    }
+        /// <summary>Returns the name of the compound file. </summary>
+        public string Name
+        {
+            get { return fileName; }
+        }
 
-	    /// <summary>Add a source stream. <c>file</c> is the string by which the 
-		/// sub-stream will be known in the compound stream.
-		/// 
-		/// </summary>
-		/// <throws>  IllegalStateException if this writer is closed </throws>
-		/// <throws>  NullPointerException if <c>file</c> is null </throws>
-		/// <throws>  IllegalArgumentException if a file with the same name </throws>
-		/// <summary>   has been added already
-		/// </summary>
-		public void  AddFile(String file)
-		{
-			if (merged)
-				throw new InvalidOperationException("Can't add extensions after merge has been called");
-			
-			if (file == null)
-				throw new ArgumentNullException("file");
-			
+        /// <summary>Add a source stream. <c>file</c> is the string by which the 
+        /// sub-stream will be known in the compound stream.
+        /// 
+        /// </summary>
+        /// <throws>  IllegalStateException if this writer is closed </throws>
+        /// <throws>  NullPointerException if <c>file</c> is null </throws>
+        /// <throws>  IllegalArgumentException if a file with the same name </throws>
+        /// <summary>   has been added already
+        /// </summary>
+        public void  AddFile(String file)
+        {
+            if (merged)
+                throw new InvalidOperationException("Can't add extensions after merge has been called");
+            
+            if (file == null)
+                throw new ArgumentNullException("file");
+            
             try
             {
                 ids.Add(file);
             }
             catch (Exception)
             {
-				throw new ArgumentException("File " + file + " already added");
+                throw new ArgumentException("File " + file + " already added");
             }
 
-	    	var entry = new FileEntry {file = file};
-	    	entries.AddLast(entry);
-		}
-		
+            var entry = new FileEntry {file = file};
+            entries.AddLast(entry);
+        }
+        
         [Obsolete("Use Dispose() instead")]
-		public void  Close()
-		{
-		    Dispose();
-		}
+        public void  Close()
+        {
+            Dispose();
+        }
 
         /// <summary>Merge files with the extensions added up to now.
         /// All files with these extensions are combined sequentially into the
@@ -226,50 +226,50 @@ namespace Lucene.Net.Index
             }
         }
 
-		
-		/// <summary>Copy the contents of the file with specified extension into the
-		/// provided output stream. Use the provided buffer for moving data
-		/// to reduce memory allocation.
-		/// </summary>
-		private void  CopyFile(FileEntry source, IndexOutput os, byte[] buffer)
-		{
-			IndexInput isRenamed = null;
-			try
-			{
-				long startPtr = os.FilePointer;
-				
-				isRenamed = directory.OpenInput(source.file);
-				long length = isRenamed.Length();
-				long remainder = length;
-				int chunk = buffer.Length;
-				
-				while (remainder > 0)
-				{
-					var len = (int) Math.Min(chunk, remainder);
-					isRenamed.ReadBytes(buffer, 0, len, false);
-					os.WriteBytes(buffer, len);
-					remainder -= len;
-					if (checkAbort != null)
-					// Roughly every 2 MB we will check if
-					// it's time to abort
-						checkAbort.Work(80);
-				}
-				
-				// Verify that remainder is 0
-				if (remainder != 0)
-					throw new System.IO.IOException("Non-zero remainder length after copying: " + remainder + " (id: " + source.file + ", length: " + length + ", buffer size: " + chunk + ")");
-				
-				// Verify that the output length diff is equal to original file
-				long endPtr = os.FilePointer;
-				long diff = endPtr - startPtr;
-				if (diff != length)
-					throw new System.IO.IOException("Difference in the output file offsets " + diff + " does not match the original file length " + length);
-			}
-			finally
-			{
-				if (isRenamed != null)
-					isRenamed.Close();
-			}
-		}
-	}
+        
+        /// <summary>Copy the contents of the file with specified extension into the
+        /// provided output stream. Use the provided buffer for moving data
+        /// to reduce memory allocation.
+        /// </summary>
+        private void  CopyFile(FileEntry source, IndexOutput os, byte[] buffer)
+        {
+            IndexInput isRenamed = null;
+            try
+            {
+                long startPtr = os.FilePointer;
+                
+                isRenamed = directory.OpenInput(source.file);
+                long length = isRenamed.Length();
+                long remainder = length;
+                int chunk = buffer.Length;
+                
+                while (remainder > 0)
+                {
+                    var len = (int) Math.Min(chunk, remainder);
+                    isRenamed.ReadBytes(buffer, 0, len, false);
+                    os.WriteBytes(buffer, len);
+                    remainder -= len;
+                    if (checkAbort != null)
+                    // Roughly every 2 MB we will check if
+                    // it's time to abort
+                        checkAbort.Work(80);
+                }
+                
+                // Verify that remainder is 0
+                if (remainder != 0)
+                    throw new System.IO.IOException("Non-zero remainder length after copying: " + remainder + " (id: " + source.file + ", length: " + length + ", buffer size: " + chunk + ")");
+                
+                // Verify that the output length diff is equal to original file
+                long endPtr = os.FilePointer;
+                long diff = endPtr - startPtr;
+                if (diff != length)
+                    throw new System.IO.IOException("Difference in the output file offsets " + diff + " does not match the original file length " + length);
+            }
+            finally
+            {
+                if (isRenamed != null)
+                    isRenamed.Close();
+            }
+        }
+    }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/62f018ab/src/core/Index/ConcurrentMergeScheduler.cs
----------------------------------------------------------------------
diff --git a/src/core/Index/ConcurrentMergeScheduler.cs b/src/core/Index/ConcurrentMergeScheduler.cs
index 8b8a300..79ea91f 100644
--- a/src/core/Index/ConcurrentMergeScheduler.cs
+++ b/src/core/Index/ConcurrentMergeScheduler.cs
@@ -21,118 +21,118 @@ using Directory = Lucene.Net.Store.Directory;
 
 namespace Lucene.Net.Index
 {
-	
-	/// <summary>A <see cref="MergeScheduler" /> that runs each merge using a
-	/// separate thread, up until a maximum number of threads
-	/// (<see cref="MaxThreadCount" />) at which when a merge is
-	/// needed, the thread(s) that are updating the index will
-	/// pause until one or more merges completes.  This is a
-	/// simple way to use concurrency in the indexing process
-	/// without having to create and manage application level
-	/// threads. 
-	/// </summary>
-	
-	public class ConcurrentMergeScheduler:MergeScheduler
-	{
-		
-		private int mergeThreadPriority = - 1;
+    
+    /// <summary>A <see cref="MergeScheduler" /> that runs each merge using a
+    /// separate thread, up until a maximum number of threads
+    /// (<see cref="MaxThreadCount" />) at which when a merge is
+    /// needed, the thread(s) that are updating the index will
+    /// pause until one or more merges completes.  This is a
+    /// simple way to use concurrency in the indexing process
+    /// without having to create and manage application level
+    /// threads. 
+    /// </summary>
+    
+    public class ConcurrentMergeScheduler:MergeScheduler
+    {
+        
+        private int mergeThreadPriority = - 1;
 
         protected internal IList<MergeThread> mergeThreads = new List<MergeThread>();
-		
-		// Max number of threads allowed to be merging at once
-		private int _maxThreadCount = 1;
-		
-		protected internal Directory dir;
-		
-		private bool closed;
-		protected internal IndexWriter writer;
-		protected internal int mergeThreadCount;
-		
-		public ConcurrentMergeScheduler()
-		{
-			if (allInstances != null)
-			{
-				// Only for testing
-				AddMyself();
-			}
-		}
+        
+        // Max number of threads allowed to be merging at once
+        private int _maxThreadCount = 1;
+        
+        protected internal Directory dir;
+        
+        private bool closed;
+        protected internal IndexWriter writer;
+        protected internal int mergeThreadCount;
+        
+        public ConcurrentMergeScheduler()
+        {
+            if (allInstances != null)
+            {
+                // Only for testing
+                AddMyself();
+            }
+        }
 
-	    /// <summary>Gets or sets the max # simultaneous threads that may be
-	    /// running.  If a merge is necessary yet we already have
-	    /// this many threads running, the incoming thread (that
-	    /// is calling add/updateDocument) will block until
-	    /// a merge thread has completed. 
-	    /// </summary>
-	    public virtual int MaxThreadCount
-	    {
-	        set
-	        {
-	            if (value < 1)
-	                throw new System.ArgumentException("count should be at least 1");
-	            _maxThreadCount = value;
-	        }
-	        get { return _maxThreadCount; }
+        /// <summary>Gets or sets the max # simultaneous threads that may be
+        /// running.  If a merge is necessary yet we already have
+        /// this many threads running, the incoming thread (that
+        /// is calling add/updateDocument) will block until
+        /// a merge thread has completed. 
+        /// </summary>
+        public virtual int MaxThreadCount
+        {
+            set
+            {
+                if (value < 1)
+                    throw new System.ArgumentException("count should be at least 1");
+                _maxThreadCount = value;
+            }
+            get { return _maxThreadCount; }
         }
 
-	    /// <summary>Return the priority that merge threads run at.  By
-		/// default the priority is 1 plus the priority of (ie,
-		/// slightly higher priority than) the first thread that
-		/// calls merge. 
-		/// </summary>
+        /// <summary>Return the priority that merge threads run at.  By
+        /// default the priority is 1 plus the priority of (ie,
+        /// slightly higher priority than) the first thread that
+        /// calls merge. 
+        /// </summary>
         [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Design", "CA1024:UsePropertiesWhereAppropriate")]
         public virtual int GetMergeThreadPriority()
-		{
-			lock (this)
-			{
-				InitMergeThreadPriority();
-				return mergeThreadPriority;
-			}
-		}
-		
-		/// <summary>Set the priority that merge threads run at. </summary>
-		public virtual void  SetMergeThreadPriority(int pri)
-		{
-			lock (this)
-			{
-				if (pri > (int) System.Threading.ThreadPriority.Highest || pri < (int) System.Threading.ThreadPriority.Lowest)
-					throw new System.ArgumentException("priority must be in range " + (int) System.Threading.ThreadPriority.Lowest + " .. " + (int) System.Threading.ThreadPriority.Highest + " inclusive");
-				mergeThreadPriority = pri;
-				
-				int numThreads = MergeThreadCount();
-				for (int i = 0; i < numThreads; i++)
-				{
-					MergeThread merge = mergeThreads[i];
-					merge.SetThreadPriority(pri);
-				}
-			}
-		}
-		
-		private bool Verbose()
-		{
-			return writer != null && writer.Verbose;
-		}
-		
-		private void  Message(System.String message)
-		{
-			if (Verbose())
-				writer.Message("CMS: " + message);
-		}
-		
-		private void  InitMergeThreadPriority()
-		{
-			lock (this)
-			{
-				if (mergeThreadPriority == - 1)
-				{
-					// Default to slightly higher priority than our
-					// calling thread
-					mergeThreadPriority = 1 + (System.Int32) ThreadClass.Current().Priority;
-					if (mergeThreadPriority > (int) System.Threading.ThreadPriority.Highest)
-						mergeThreadPriority = (int) System.Threading.ThreadPriority.Highest;
-				}
-			}
-		}
-		
+        {
+            lock (this)
+            {
+                InitMergeThreadPriority();
+                return mergeThreadPriority;
+            }
+        }
+        
+        /// <summary>Set the priority that merge threads run at. </summary>
+        public virtual void  SetMergeThreadPriority(int pri)
+        {
+            lock (this)
+            {
+                if (pri > (int) System.Threading.ThreadPriority.Highest || pri < (int) System.Threading.ThreadPriority.Lowest)
+                    throw new System.ArgumentException("priority must be in range " + (int) System.Threading.ThreadPriority.Lowest + " .. " + (int) System.Threading.ThreadPriority.Highest + " inclusive");
+                mergeThreadPriority = pri;
+                
+                int numThreads = MergeThreadCount();
+                for (int i = 0; i < numThreads; i++)
+                {
+                    MergeThread merge = mergeThreads[i];
+                    merge.SetThreadPriority(pri);
+                }
+            }
+        }
+        
+        private bool Verbose()
+        {
+            return writer != null && writer.Verbose;
+        }
+        
+        private void  Message(System.String message)
+        {
+            if (Verbose())
+                writer.Message("CMS: " + message);
+        }
+        
+        private void  InitMergeThreadPriority()
+        {
+            lock (this)
+            {
+                if (mergeThreadPriority == - 1)
+                {
+                    // Default to slightly higher priority than our
+                    // calling thread
+                    mergeThreadPriority = 1 + (System.Int32) ThreadClass.Current().Priority;
+                    if (mergeThreadPriority > (int) System.Threading.ThreadPriority.Highest)
+                        mergeThreadPriority = (int) System.Threading.ThreadPriority.Highest;
+                }
+            }
+        }
+        
         protected override void Dispose(bool disposing)
         {
             //if (disposing)
@@ -140,30 +140,30 @@ namespace Lucene.Net.Index
                 closed = true;
             //}
         }
-		
-		public virtual void  Sync()
-		{
-			lock (this)
-			{
-				while (MergeThreadCount() > 0)
-				{
-					if (Verbose())
-						Message("now wait for threads; currently " + mergeThreads.Count + " still running");
-					int count = mergeThreads.Count;
-					if (Verbose())
-					{
-						for (int i = 0; i < count; i++)
-							Message("    " + i + ": " + mergeThreads[i]);
-					}
-					
-					System.Threading.Monitor.Wait(this);
-					
-				}
-			}
-		}
-		
-		private int MergeThreadCount()
-		{
+        
+        public virtual void  Sync()
+        {
+            lock (this)
+            {
+                while (MergeThreadCount() > 0)
+                {
+                    if (Verbose())
+                        Message("now wait for threads; currently " + mergeThreads.Count + " still running");
+                    int count = mergeThreads.Count;
+                    if (Verbose())
+                    {
+                        for (int i = 0; i < count; i++)
+                            Message("    " + i + ": " + mergeThreads[i]);
+                    }
+                    
+                    System.Threading.Monitor.Wait(this);
+                    
+                }
+            }
+        }
+        
+        private int MergeThreadCount()
+        {
             lock (this)
             {
                 int count = 0;
@@ -178,327 +178,327 @@ namespace Lucene.Net.Index
                 return count;
             }
         }
-		
-		public override void  Merge(IndexWriter writer)
-		{
-			// TODO: .NET doesn't support this
-			// assert !Thread.holdsLock(writer);
-			
-			this.writer = writer;
-			
-			InitMergeThreadPriority();
-			
-			dir = writer.Directory;
-			
-			// First, quickly run through the newly proposed merges
-			// and add any orthogonal merges (ie a merge not
-			// involving segments already pending to be merged) to
-			// the queue.  If we are way behind on merging, many of
-			// these newly proposed merges will likely already be
-			// registered.
-			
-			if (Verbose())
-			{
-				Message("now merge");
-				Message("  index: " + writer.SegString());
-			}
-			
-			// Iterate, pulling from the IndexWriter's queue of
-			// pending merges, until it's empty:
-			while (true)
-			{
-				// TODO: we could be careful about which merges to do in
-				// the BG (eg maybe the "biggest" ones) vs FG, which
-				// merges to do first (the easiest ones?), etc.
-				
-				MergePolicy.OneMerge merge = writer.GetNextMerge();
-				if (merge == null)
-				{
-					if (Verbose())
-						Message("  no more merges pending; now return");
-					return ;
-				}
-				
-				// We do this w/ the primary thread to keep
-				// deterministic assignment of segment names
-				writer.MergeInit(merge);
-				
-				bool success = false;
-				try
-				{
-					lock (this)
-					{
-						while (MergeThreadCount() >= _maxThreadCount)
-						{
-							if (Verbose())
-								Message("    too many merge threads running; stalling...");
-							
+        
+        public override void  Merge(IndexWriter writer)
+        {
+            // TODO: .NET doesn't support this
+            // assert !Thread.holdsLock(writer);
+            
+            this.writer = writer;
+            
+            InitMergeThreadPriority();
+            
+            dir = writer.Directory;
+            
+            // First, quickly run through the newly proposed merges
+            // and add any orthogonal merges (ie a merge not
+            // involving segments already pending to be merged) to
+            // the queue.  If we are way behind on merging, many of
+            // these newly proposed merges will likely already be
+            // registered.
+            
+            if (Verbose())
+            {
+                Message("now merge");
+                Message("  index: " + writer.SegString());
+            }
+            
+            // Iterate, pulling from the IndexWriter's queue of
+            // pending merges, until it's empty:
+            while (true)
+            {
+                // TODO: we could be careful about which merges to do in
+                // the BG (eg maybe the "biggest" ones) vs FG, which
+                // merges to do first (the easiest ones?), etc.
+                
+                MergePolicy.OneMerge merge = writer.GetNextMerge();
+                if (merge == null)
+                {
+                    if (Verbose())
+                        Message("  no more merges pending; now return");
+                    return ;
+                }
+                
+                // We do this w/ the primary thread to keep
+                // deterministic assignment of segment names
+                writer.MergeInit(merge);
+                
+                bool success = false;
+                try
+                {
+                    lock (this)
+                    {
+                        while (MergeThreadCount() >= _maxThreadCount)
+                        {
+                            if (Verbose())
+                                Message("    too many merge threads running; stalling...");
+                            
                             System.Threading.Monitor.Wait(this);
-							
-							
-						}
-						
-						if (Verbose())
-							Message("  consider merge " + merge.SegString(dir));
+                            
+                            
+                        }
+                        
+                        if (Verbose())
+                            Message("  consider merge " + merge.SegString(dir));
 
-					    System.Diagnostics.Debug.Assert(MergeThreadCount() < _maxThreadCount);
-												
-						// OK to spawn a new merge thread to handle this
-						// merge:
-						MergeThread merger = GetMergeThread(writer, merge);
-						mergeThreads.Add(merger);
-						if (Verbose())
-							Message("    launch new thread [" + merger.Name + "]");
-						
-						merger.Start();
-						success = true;
-					}
-				}
-				finally
-				{
-					if (!success)
-					{
-						writer.MergeFinish(merge);
-					}
-				}
-			}
-		}
-		
-		/// <summary>Does the actual merge, by calling <see cref="IndexWriter.Merge" /> </summary>
-		protected internal virtual void  DoMerge(MergePolicy.OneMerge merge)
-		{
-			writer.Merge(merge);
-		}
-		
-		/// <summary>Create and return a new MergeThread </summary>
-		protected internal virtual MergeThread GetMergeThread(IndexWriter writer, MergePolicy.OneMerge merge)
-		{
-			lock (this)
-			{
-				var thread = new MergeThread(this, writer, merge);
-				thread.SetThreadPriority(mergeThreadPriority);
-				thread.IsBackground = true;
-				thread.Name = "Lucene Merge Thread #" + mergeThreadCount++;
-				return thread;
-			}
-		}
-		
-		public /*protected internal*/ class MergeThread:ThreadClass
-		{
-			private void  InitBlock(ConcurrentMergeScheduler enclosingInstance)
-			{
-				this.enclosingInstance = enclosingInstance;
-			}
-			private ConcurrentMergeScheduler enclosingInstance;
-			public ConcurrentMergeScheduler Enclosing_Instance
-			{
-				get
-				{
-					return enclosingInstance;
-				}
-				
-			}
-			
-			internal IndexWriter writer;
-			internal MergePolicy.OneMerge startMerge;
-			internal MergePolicy.OneMerge runningMerge;
-			
-			public MergeThread(ConcurrentMergeScheduler enclosingInstance, IndexWriter writer, MergePolicy.OneMerge startMerge)
-			{
-				InitBlock(enclosingInstance);
-				this.writer = writer;
-				this.startMerge = startMerge;
-			}
-			
-			public virtual void  SetRunningMerge(MergePolicy.OneMerge merge)
-			{
-				lock (this)
-				{
-					runningMerge = merge;
-				}
-			}
+                        System.Diagnostics.Debug.Assert(MergeThreadCount() < _maxThreadCount);
+                                                
+                        // OK to spawn a new merge thread to handle this
+                        // merge:
+                        MergeThread merger = GetMergeThread(writer, merge);
+                        mergeThreads.Add(merger);
+                        if (Verbose())
+                            Message("    launch new thread [" + merger.Name + "]");
+                        
+                        merger.Start();
+                        success = true;
+                    }
+                }
+                finally
+                {
+                    if (!success)
+                    {
+                        writer.MergeFinish(merge);
+                    }
+                }
+            }
+        }
+        
+        /// <summary>Does the actual merge, by calling <see cref="IndexWriter.Merge" /> </summary>
+        protected internal virtual void  DoMerge(MergePolicy.OneMerge merge)
+        {
+            writer.Merge(merge);
+        }
+        
+        /// <summary>Create and return a new MergeThread </summary>
+        protected internal virtual MergeThread GetMergeThread(IndexWriter writer, MergePolicy.OneMerge merge)
+        {
+            lock (this)
+            {
+                var thread = new MergeThread(this, writer, merge);
+                thread.SetThreadPriority(mergeThreadPriority);
+                thread.IsBackground = true;
+                thread.Name = "Lucene Merge Thread #" + mergeThreadCount++;
+                return thread;
+            }
+        }
+        
+        public /*protected internal*/ class MergeThread:ThreadClass
+        {
+            private void  InitBlock(ConcurrentMergeScheduler enclosingInstance)
+            {
+                this.enclosingInstance = enclosingInstance;
+            }
+            private ConcurrentMergeScheduler enclosingInstance;
+            public ConcurrentMergeScheduler Enclosing_Instance
+            {
+                get
+                {
+                    return enclosingInstance;
+                }
+                
+            }
+            
+            internal IndexWriter writer;
+            internal MergePolicy.OneMerge startMerge;
+            internal MergePolicy.OneMerge runningMerge;
+            
+            public MergeThread(ConcurrentMergeScheduler enclosingInstance, IndexWriter writer, MergePolicy.OneMerge startMerge)
+            {
+                InitBlock(enclosingInstance);
+                this.writer = writer;
+                this.startMerge = startMerge;
+            }
+            
+            public virtual void  SetRunningMerge(MergePolicy.OneMerge merge)
+            {
+                lock (this)
+                {
+                    runningMerge = merge;
+                }
+            }
 
-		    public virtual MergePolicy.OneMerge RunningMerge
-		    {
-		        get
-		        {
-		            lock (this)
-		            {
-		                return runningMerge;
-		            }
-		        }
-		    }
+            public virtual MergePolicy.OneMerge RunningMerge
+            {
+                get
+                {
+                    lock (this)
+                    {
+                        return runningMerge;
+                    }
+                }
+            }
 
-		    public virtual void  SetThreadPriority(int pri)
-			{
-				try
-				{
-					Priority = (System.Threading.ThreadPriority) pri;
-				}
-				catch (System.NullReferenceException)
-				{
-					// Strangely, Sun's JDK 1.5 on Linux sometimes
-					// throws NPE out of here...
-				}
-				catch (System.Security.SecurityException)
-				{
-					// Ignore this because we will still run fine with
-					// normal thread priority
-				}
-			}
-			
-			override public void  Run()
-			{
-				
-				// First time through the while loop we do the merge
-				// that we were started with:
-				MergePolicy.OneMerge merge = this.startMerge;
-				
-				try
-				{
-					
-					if (Enclosing_Instance.Verbose())
-						Enclosing_Instance.Message("  merge thread: start");
-					
-					while (true)
-					{
-						SetRunningMerge(merge);
-						Enclosing_Instance.DoMerge(merge);
-						
-						// Subsequent times through the loop we do any new
-						// merge that writer says is necessary:
-						merge = writer.GetNextMerge();
-						if (merge != null)
-						{
-							writer.MergeInit(merge);
-							if (Enclosing_Instance.Verbose())
-								Enclosing_Instance.Message("  merge thread: do another merge " + merge.SegString(Enclosing_Instance.dir));
-						}
-						else
-							break;
-					}
-					
-					if (Enclosing_Instance.Verbose())
-						Enclosing_Instance.Message("  merge thread: done");
-				}
-				catch (System.Exception exc)
-				{
-					// Ignore the exception if it was due to abort:
-					if (!(exc is MergePolicy.MergeAbortedException))
-					{
-						if (!Enclosing_Instance.suppressExceptions)
-						{
-							// suppressExceptions is normally only set during
-							// testing.
-							Lucene.Net.Index.ConcurrentMergeScheduler.anyExceptions = true;
-							Enclosing_Instance.HandleMergeException(exc);
-						}
-					}
-				}
-				finally
-				{
-					lock (Enclosing_Instance)
-					{
-						System.Threading.Monitor.PulseAll(Enclosing_Instance);
-						Enclosing_Instance.mergeThreads.Remove(this);
+            public virtual void  SetThreadPriority(int pri)
+            {
+                try
+                {
+                    Priority = (System.Threading.ThreadPriority) pri;
+                }
+                catch (System.NullReferenceException)
+                {
+                    // Strangely, Sun's JDK 1.5 on Linux sometimes
+                    // throws NPE out of here...
+                }
+                catch (System.Security.SecurityException)
+                {
+                    // Ignore this because we will still run fine with
+                    // normal thread priority
+                }
+            }
+            
+            override public void  Run()
+            {
+                
+                // First time through the while loop we do the merge
+                // that we were started with:
+                MergePolicy.OneMerge merge = this.startMerge;
+                
+                try
+                {
+                    
+                    if (Enclosing_Instance.Verbose())
+                        Enclosing_Instance.Message("  merge thread: start");
+                    
+                    while (true)
+                    {
+                        SetRunningMerge(merge);
+                        Enclosing_Instance.DoMerge(merge);
+                        
+                        // Subsequent times through the loop we do any new
+                        // merge that writer says is necessary:
+                        merge = writer.GetNextMerge();
+                        if (merge != null)
+                        {
+                            writer.MergeInit(merge);
+                            if (Enclosing_Instance.Verbose())
+                                Enclosing_Instance.Message("  merge thread: do another merge " + merge.SegString(Enclosing_Instance.dir));
+                        }
+                        else
+                            break;
+                    }
+                    
+                    if (Enclosing_Instance.Verbose())
+                        Enclosing_Instance.Message("  merge thread: done");
+                }
+                catch (System.Exception exc)
+                {
+                    // Ignore the exception if it was due to abort:
+                    if (!(exc is MergePolicy.MergeAbortedException))
+                    {
+                        if (!Enclosing_Instance.suppressExceptions)
+                        {
+                            // suppressExceptions is normally only set during
+                            // testing.
+                            Lucene.Net.Index.ConcurrentMergeScheduler.anyExceptions = true;
+                            Enclosing_Instance.HandleMergeException(exc);
+                        }
+                    }
+                }
+                finally
+                {
+                    lock (Enclosing_Instance)
+                    {
+                        System.Threading.Monitor.PulseAll(Enclosing_Instance);
+                        Enclosing_Instance.mergeThreads.Remove(this);
                         bool removed = !Enclosing_Instance.mergeThreads.Contains(this);
-						System.Diagnostics.Debug.Assert(removed);
-					}
-				}
-			}
-			
-			public override System.String ToString()
-			{
-				MergePolicy.OneMerge merge = RunningMerge ?? startMerge;
-				return "merge thread: " + merge.SegString(Enclosing_Instance.dir);
-			}
-		}
-		
-		/// <summary>Called when an exception is hit in a background merge
-		/// thread 
-		/// </summary>
-		protected internal virtual void  HandleMergeException(System.Exception exc)
-		{
-			// When an exception is hit during merge, IndexWriter
-			// removes any partial files and then allows another
-			// merge to run.  If whatever caused the error is not
-			// transient then the exception will keep happening,
-			// so, we sleep here to avoid saturating CPU in such
-			// cases:
-			System.Threading.Thread.Sleep(new System.TimeSpan((System.Int64) 10000 * 250));
-			
+                        System.Diagnostics.Debug.Assert(removed);
+                    }
+                }
+            }
+            
+            public override System.String ToString()
+            {
+                MergePolicy.OneMerge merge = RunningMerge ?? startMerge;
+                return "merge thread: " + merge.SegString(Enclosing_Instance.dir);
+            }
+        }
+        
+        /// <summary>Called when an exception is hit in a background merge
+        /// thread 
+        /// </summary>
+        protected internal virtual void  HandleMergeException(System.Exception exc)
+        {
+            // When an exception is hit during merge, IndexWriter
+            // removes any partial files and then allows another
+            // merge to run.  If whatever caused the error is not
+            // transient then the exception will keep happening,
+            // so, we sleep here to avoid saturating CPU in such
+            // cases:
+            System.Threading.Thread.Sleep(new System.TimeSpan((System.Int64) 10000 * 250));
+            
             throw new MergePolicy.MergeException(exc, dir);
-		}
-		
-		internal static bool anyExceptions = false;
-		
-		/// <summary>Used for testing </summary>
-		public static bool AnyUnhandledExceptions()
-		{
-			if (allInstances == null)
-			{
-				throw new System.SystemException("setTestMode() was not called; often this is because your test case's setUp method fails to call super.setUp in LuceneTestCase");
-			}
-			lock (allInstances)
-			{
-				int count = allInstances.Count;
-				// Make sure all outstanding threads are done so we see
-				// any exceptions they may produce:
-				for (int i = 0; i < count; i++)
-				    allInstances[i].Sync();
-				bool v = anyExceptions;
-				anyExceptions = false;
-				return v;
-			}
-		}
-		
-		public static void  ClearUnhandledExceptions()
-		{
-			lock (allInstances)
-			{
-				anyExceptions = false;
-			}
-		}
-		
-		/// <summary>Used for testing </summary>
-		private void  AddMyself()
-		{
-			lock (allInstances)
-			{
-				int size = allInstances.Count;
-				int upto = 0;
-				for (int i = 0; i < size; i++)
-				{
-					ConcurrentMergeScheduler other = allInstances[i];
-					if (!(other.closed && 0 == other.MergeThreadCount()))
-					// Keep this one for now: it still has threads or
-					// may spawn new threads
-						allInstances[upto++] = other;
-				}
-			    allInstances.RemoveRange(upto, allInstances.Count - upto);
-				allInstances.Add(this);
-			}
-		}
-		
-		private bool suppressExceptions;
-		
-		/// <summary>Used for testing </summary>
-		public /*internal*/ virtual void  SetSuppressExceptions()
-		{
-			suppressExceptions = true;
-		}
-		
-		/// <summary>Used for testing </summary>
-		public /*internal*/ virtual void  ClearSuppressExceptions()
-		{
-			suppressExceptions = false;
-		}
-		
-		/// <summary>Used for testing </summary>
-		private static List<ConcurrentMergeScheduler> allInstances;
-		public static void  SetTestMode()
-		{
-			allInstances = new List<ConcurrentMergeScheduler>();
-		}
-	}
+        }
+        
+        internal static bool anyExceptions = false;
+        
+        /// <summary>Used for testing </summary>
+        public static bool AnyUnhandledExceptions()
+        {
+            if (allInstances == null)
+            {
+                throw new System.SystemException("setTestMode() was not called; often this is because your test case's setUp method fails to call super.setUp in LuceneTestCase");
+            }
+            lock (allInstances)
+            {
+                int count = allInstances.Count;
+                // Make sure all outstanding threads are done so we see
+                // any exceptions they may produce:
+                for (int i = 0; i < count; i++)
+                    allInstances[i].Sync();
+                bool v = anyExceptions;
+                anyExceptions = false;
+                return v;
+            }
+        }
+        
+        public static void  ClearUnhandledExceptions()
+        {
+            lock (allInstances)
+            {
+                anyExceptions = false;
+            }
+        }
+        
+        /// <summary>Used for testing </summary>
+        private void  AddMyself()
+        {
+            lock (allInstances)
+            {
+                int size = allInstances.Count;
+                int upto = 0;
+                for (int i = 0; i < size; i++)
+                {
+                    ConcurrentMergeScheduler other = allInstances[i];
+                    if (!(other.closed && 0 == other.MergeThreadCount()))
+                    // Keep this one for now: it still has threads or
+                    // may spawn new threads
+                        allInstances[upto++] = other;
+                }
+                allInstances.RemoveRange(upto, allInstances.Count - upto);
+                allInstances.Add(this);
+            }
+        }
+        
+        private bool suppressExceptions;
+        
+        /// <summary>Used for testing </summary>
+        public /*internal*/ virtual void  SetSuppressExceptions()
+        {
+            suppressExceptions = true;
+        }
+        
+        /// <summary>Used for testing </summary>
+        public /*internal*/ virtual void  ClearSuppressExceptions()
+        {
+            suppressExceptions = false;
+        }
+        
+        /// <summary>Used for testing </summary>
+        private static List<ConcurrentMergeScheduler> allInstances;
+        public static void  SetTestMode()
+        {
+            allInstances = new List<ConcurrentMergeScheduler>();
+        }
+    }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/62f018ab/src/core/Index/DefaultSkipListReader.cs
----------------------------------------------------------------------
diff --git a/src/core/Index/DefaultSkipListReader.cs b/src/core/Index/DefaultSkipListReader.cs
index a1cddde..470df0d 100644
--- a/src/core/Index/DefaultSkipListReader.cs
+++ b/src/core/Index/DefaultSkipListReader.cs
@@ -20,109 +20,109 @@ using IndexInput = Lucene.Net.Store.IndexInput;
 
 namespace Lucene.Net.Index
 {
-	
-	/// <summary> Implements the skip list reader for the default posting list format
-	/// that stores positions and payloads.
-	/// 
-	/// </summary>
-	class DefaultSkipListReader:MultiLevelSkipListReader
-	{
-		private bool currentFieldStoresPayloads;
-		private readonly long[] freqPointer;
-		private readonly long[] proxPointer;
-		private readonly int[] payloadLength;
-		
-		private long lastFreqPointer;
-		private long lastProxPointer;
-		private int lastPayloadLength;
-		
-		
-		internal DefaultSkipListReader(IndexInput skipStream, int maxSkipLevels, int skipInterval):base(skipStream, maxSkipLevels, skipInterval)
-		{
-			freqPointer = new long[maxSkipLevels];
-			proxPointer = new long[maxSkipLevels];
-			payloadLength = new int[maxSkipLevels];
-		}
-		
-		internal virtual void  Init(long skipPointer, long freqBasePointer, long proxBasePointer, int df, bool storesPayloads)
-		{
-			base.Init(skipPointer, df);
-			this.currentFieldStoresPayloads = storesPayloads;
-			lastFreqPointer = freqBasePointer;
-			lastProxPointer = proxBasePointer;
+    
+    /// <summary> Implements the skip list reader for the default posting list format
+    /// that stores positions and payloads.
+    /// 
+    /// </summary>
+    class DefaultSkipListReader:MultiLevelSkipListReader
+    {
+        private bool currentFieldStoresPayloads;
+        private readonly long[] freqPointer;
+        private readonly long[] proxPointer;
+        private readonly int[] payloadLength;
+        
+        private long lastFreqPointer;
+        private long lastProxPointer;
+        private int lastPayloadLength;
+        
+        
+        internal DefaultSkipListReader(IndexInput skipStream, int maxSkipLevels, int skipInterval):base(skipStream, maxSkipLevels, skipInterval)
+        {
+            freqPointer = new long[maxSkipLevels];
+            proxPointer = new long[maxSkipLevels];
+            payloadLength = new int[maxSkipLevels];
+        }
+        
+        internal virtual void  Init(long skipPointer, long freqBasePointer, long proxBasePointer, int df, bool storesPayloads)
+        {
+            base.Init(skipPointer, df);
+            this.currentFieldStoresPayloads = storesPayloads;
+            lastFreqPointer = freqBasePointer;
+            lastProxPointer = proxBasePointer;
 
-			for (int i = 0; i < freqPointer.Length; i++) freqPointer[i] = freqBasePointer;
-			for (int i = 0; i < proxPointer.Length; i++) proxPointer[i] = proxBasePointer;
-			for (int i = 0; i < payloadLength.Length; i++) payloadLength[i] = 0;
+            for (int i = 0; i < freqPointer.Length; i++) freqPointer[i] = freqBasePointer;
+            for (int i = 0; i < proxPointer.Length; i++) proxPointer[i] = proxBasePointer;
+            for (int i = 0; i < payloadLength.Length; i++) payloadLength[i] = 0;
+        }
+        
+        /// <summary>Returns the freq pointer of the doc to which the last call of 
+        /// <see cref="MultiLevelSkipListReader.SkipTo(int)" /> has skipped.  
+        /// </summary>
+        internal virtual long GetFreqPointer()
+        {
+            return lastFreqPointer;
+        }
+        
+        /// <summary>Returns the prox pointer of the doc to which the last call of 
+        /// <see cref="MultiLevelSkipListReader.SkipTo(int)" /> has skipped.  
+        /// </summary>
+        internal virtual long GetProxPointer()
+        {
+            return lastProxPointer;
+        }
+        
+        /// <summary>Returns the payload length of the payload stored just before 
+        /// the doc to which the last call of <see cref="MultiLevelSkipListReader.SkipTo(int)" /> 
+        /// has skipped.  
+        /// </summary>
+        internal virtual int GetPayloadLength()
+        {
+            return lastPayloadLength;
+        }
+        
+        protected internal override void  SeekChild(int level)
+        {
+            base.SeekChild(level);
+            freqPointer[level] = lastFreqPointer;
+            proxPointer[level] = lastProxPointer;
+            payloadLength[level] = lastPayloadLength;
+        }
+        
+        protected internal override void  SetLastSkipData(int level)
+        {
+            base.SetLastSkipData(level);
+            lastFreqPointer = freqPointer[level];
+            lastProxPointer = proxPointer[level];
+            lastPayloadLength = payloadLength[level];
+        }
+        
+        
+        protected internal override int ReadSkipData(int level, IndexInput skipStream)
+        {
+            int delta;
+            if (currentFieldStoresPayloads)
+            {
+                // the current field stores payloads.
+                // if the doc delta is odd then we have
+                // to read the current payload length
+                // because it differs from the length of the
+                // previous payload
+                delta = skipStream.ReadVInt();
+                if ((delta & 1) != 0)
+                {
+                    payloadLength[level] = skipStream.ReadVInt();
+                }
+                delta = Number.URShift(delta, 1);
+            }
+            else
+            {
+                delta = skipStream.ReadVInt();
+            }
+            freqPointer[level] += skipStream.ReadVInt();
+            proxPointer[level] += skipStream.ReadVInt();
+            
+            return delta;
         }
-		
-		/// <summary>Returns the freq pointer of the doc to which the last call of 
-		/// <see cref="MultiLevelSkipListReader.SkipTo(int)" /> has skipped.  
-		/// </summary>
-		internal virtual long GetFreqPointer()
-		{
-			return lastFreqPointer;
-		}
-		
-		/// <summary>Returns the prox pointer of the doc to which the last call of 
-		/// <see cref="MultiLevelSkipListReader.SkipTo(int)" /> has skipped.  
-		/// </summary>
-		internal virtual long GetProxPointer()
-		{
-			return lastProxPointer;
-		}
-		
-		/// <summary>Returns the payload length of the payload stored just before 
-		/// the doc to which the last call of <see cref="MultiLevelSkipListReader.SkipTo(int)" /> 
-		/// has skipped.  
-		/// </summary>
-		internal virtual int GetPayloadLength()
-		{
-			return lastPayloadLength;
-		}
-		
-		protected internal override void  SeekChild(int level)
-		{
-			base.SeekChild(level);
-			freqPointer[level] = lastFreqPointer;
-			proxPointer[level] = lastProxPointer;
-			payloadLength[level] = lastPayloadLength;
-		}
-		
-		protected internal override void  SetLastSkipData(int level)
-		{
-			base.SetLastSkipData(level);
-			lastFreqPointer = freqPointer[level];
-			lastProxPointer = proxPointer[level];
-			lastPayloadLength = payloadLength[level];
-		}
-		
-		
-		protected internal override int ReadSkipData(int level, IndexInput skipStream)
-		{
-			int delta;
-			if (currentFieldStoresPayloads)
-			{
-				// the current field stores payloads.
-				// if the doc delta is odd then we have
-				// to read the current payload length
-				// because it differs from the length of the
-				// previous payload
-				delta = skipStream.ReadVInt();
-				if ((delta & 1) != 0)
-				{
-					payloadLength[level] = skipStream.ReadVInt();
-				}
-				delta = Number.URShift(delta, 1);
-			}
-			else
-			{
-				delta = skipStream.ReadVInt();
-			}
-			freqPointer[level] += skipStream.ReadVInt();
-			proxPointer[level] += skipStream.ReadVInt();
-			
-			return delta;
-		}
-	}
+    }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/62f018ab/src/core/Index/DefaultSkipListWriter.cs
----------------------------------------------------------------------
diff --git a/src/core/Index/DefaultSkipListWriter.cs b/src/core/Index/DefaultSkipListWriter.cs
index 77412af..1c2de1b 100644
--- a/src/core/Index/DefaultSkipListWriter.cs
+++ b/src/core/Index/DefaultSkipListWriter.cs
@@ -21,123 +21,123 @@ using IndexOutput = Lucene.Net.Store.IndexOutput;
 
 namespace Lucene.Net.Index
 {
-	
-	
-	/// <summary> Implements the skip list writer for the default posting list format
-	/// that stores positions and payloads.
-	/// 
-	/// </summary>
-	class DefaultSkipListWriter:MultiLevelSkipListWriter
-	{
-		private int[] lastSkipDoc;
-		private int[] lastSkipPayloadLength;
-		private long[] lastSkipFreqPointer;
-		private long[] lastSkipProxPointer;
-		
-		private IndexOutput freqOutput;
-		private IndexOutput proxOutput;
-		
-		private int curDoc;
-		private bool curStorePayloads;
-		private int curPayloadLength;
-		private long curFreqPointer;
-		private long curProxPointer;
-		
-		internal DefaultSkipListWriter(int skipInterval, int numberOfSkipLevels, int docCount, IndexOutput freqOutput, IndexOutput proxOutput):base(skipInterval, numberOfSkipLevels, docCount)
-		{
-			this.freqOutput = freqOutput;
-			this.proxOutput = proxOutput;
-			
-			lastSkipDoc = new int[numberOfSkipLevels];
-			lastSkipPayloadLength = new int[numberOfSkipLevels];
-			lastSkipFreqPointer = new long[numberOfSkipLevels];
-			lastSkipProxPointer = new long[numberOfSkipLevels];
-		}
-		
-		internal virtual void  SetFreqOutput(IndexOutput freqOutput)
-		{
-			this.freqOutput = freqOutput;
-		}
-		
-		internal virtual void  SetProxOutput(IndexOutput proxOutput)
-		{
-			this.proxOutput = proxOutput;
-		}
-		
-		/// <summary> Sets the values for the current skip data. </summary>
-		internal virtual void  SetSkipData(int doc, bool storePayloads, int payloadLength)
-		{
-			this.curDoc = doc;
-			this.curStorePayloads = storePayloads;
-			this.curPayloadLength = payloadLength;
-			this.curFreqPointer = freqOutput.FilePointer;
-			if (proxOutput != null)
-				this.curProxPointer = proxOutput.FilePointer;
-		}
-		
-		protected internal override void  ResetSkip()
-		{
-			base.ResetSkip();
-			for (int i = 0; i < lastSkipDoc.Length; i++) lastSkipDoc[i] = 0;
-			for (int i = 0; i < lastSkipPayloadLength.Length; i++) lastSkipPayloadLength[i] = -1; // we don't have to write the first length in the skip list
-			for (int i = 0; i < lastSkipFreqPointer.Length; i++) lastSkipFreqPointer[i] = freqOutput.FilePointer;
-			if (proxOutput != null)
-				for (int i = 0; i < lastSkipProxPointer.Length; i++) lastSkipProxPointer[i] = proxOutput.FilePointer;
-		}
-		
-		protected internal override void  WriteSkipData(int level, IndexOutput skipBuffer)
-		{
-			// To efficiently store payloads in the posting lists we do not store the length of
-			// every payload. Instead we omit the length for a payload if the previous payload had
-			// the same length.
-			// However, in order to support skipping the payload length at every skip point must be known.
-			// So we use the same length encoding that we use for the posting lists for the skip data as well:
-			// Case 1: current field does not store payloads
-			//           SkipDatum                 --> DocSkip, FreqSkip, ProxSkip
-			//           DocSkip,FreqSkip,ProxSkip --> VInt
-			//           DocSkip records the document number before every SkipInterval th  document in TermFreqs. 
-			//           Document numbers are represented as differences from the previous value in the sequence.
-			// Case 2: current field stores payloads
-			//           SkipDatum                 --> DocSkip, PayloadLength?, FreqSkip,ProxSkip
-			//           DocSkip,FreqSkip,ProxSkip --> VInt
-			//           PayloadLength             --> VInt    
-			//         In this case DocSkip/2 is the difference between
-			//         the current and the previous value. If DocSkip
-			//         is odd, then a PayloadLength encoded as VInt follows,
-			//         if DocSkip is even, then it is assumed that the
-			//         current payload length equals the length at the previous
-			//         skip point
-			if (curStorePayloads)
-			{
-				int delta = curDoc - lastSkipDoc[level];
-				if (curPayloadLength == lastSkipPayloadLength[level])
-				{
-					// the current payload length equals the length at the previous skip point,
-					// so we don't store the length again
-					skipBuffer.WriteVInt(delta * 2);
-				}
-				else
-				{
-					// the payload length is different from the previous one. We shift the DocSkip, 
-					// set the lowest bit and store the current payload length as VInt.
-					skipBuffer.WriteVInt(delta * 2 + 1);
-					skipBuffer.WriteVInt(curPayloadLength);
-					lastSkipPayloadLength[level] = curPayloadLength;
-				}
-			}
-			else
-			{
-				// current field does not store payloads
-				skipBuffer.WriteVInt(curDoc - lastSkipDoc[level]);
-			}
-			skipBuffer.WriteVInt((int) (curFreqPointer - lastSkipFreqPointer[level]));
-			skipBuffer.WriteVInt((int) (curProxPointer - lastSkipProxPointer[level]));
-			
-			lastSkipDoc[level] = curDoc;
-			//System.out.println("write doc at level " + level + ": " + curDoc);
-			
-			lastSkipFreqPointer[level] = curFreqPointer;
-			lastSkipProxPointer[level] = curProxPointer;
-		}
-	}
+    
+    
+    /// <summary> Implements the skip list writer for the default posting list format
+    /// that stores positions and payloads.
+    /// 
+    /// </summary>
+    class DefaultSkipListWriter:MultiLevelSkipListWriter
+    {
+        private int[] lastSkipDoc;
+        private int[] lastSkipPayloadLength;
+        private long[] lastSkipFreqPointer;
+        private long[] lastSkipProxPointer;
+        
+        private IndexOutput freqOutput;
+        private IndexOutput proxOutput;
+        
+        private int curDoc;
+        private bool curStorePayloads;
+        private int curPayloadLength;
+        private long curFreqPointer;
+        private long curProxPointer;
+        
+        internal DefaultSkipListWriter(int skipInterval, int numberOfSkipLevels, int docCount, IndexOutput freqOutput, IndexOutput proxOutput):base(skipInterval, numberOfSkipLevels, docCount)
+        {
+            this.freqOutput = freqOutput;
+            this.proxOutput = proxOutput;
+            
+            lastSkipDoc = new int[numberOfSkipLevels];
+            lastSkipPayloadLength = new int[numberOfSkipLevels];
+            lastSkipFreqPointer = new long[numberOfSkipLevels];
+            lastSkipProxPointer = new long[numberOfSkipLevels];
+        }
+        
+        internal virtual void  SetFreqOutput(IndexOutput freqOutput)
+        {
+            this.freqOutput = freqOutput;
+        }
+        
+        internal virtual void  SetProxOutput(IndexOutput proxOutput)
+        {
+            this.proxOutput = proxOutput;
+        }
+        
+        /// <summary> Sets the values for the current skip data. </summary>
+        internal virtual void  SetSkipData(int doc, bool storePayloads, int payloadLength)
+        {
+            this.curDoc = doc;
+            this.curStorePayloads = storePayloads;
+            this.curPayloadLength = payloadLength;
+            this.curFreqPointer = freqOutput.FilePointer;
+            if (proxOutput != null)
+                this.curProxPointer = proxOutput.FilePointer;
+        }
+        
+        protected internal override void  ResetSkip()
+        {
+            base.ResetSkip();
+            for (int i = 0; i < lastSkipDoc.Length; i++) lastSkipDoc[i] = 0;
+            for (int i = 0; i < lastSkipPayloadLength.Length; i++) lastSkipPayloadLength[i] = -1; // we don't have to write the first length in the skip list
+            for (int i = 0; i < lastSkipFreqPointer.Length; i++) lastSkipFreqPointer[i] = freqOutput.FilePointer;
+            if (proxOutput != null)
+                for (int i = 0; i < lastSkipProxPointer.Length; i++) lastSkipProxPointer[i] = proxOutput.FilePointer;
+        }
+        
+        protected internal override void  WriteSkipData(int level, IndexOutput skipBuffer)
+        {
+            // To efficiently store payloads in the posting lists we do not store the length of
+            // every payload. Instead we omit the length for a payload if the previous payload had
+            // the same length.
+            // However, in order to support skipping the payload length at every skip point must be known.
+            // So we use the same length encoding that we use for the posting lists for the skip data as well:
+            // Case 1: current field does not store payloads
+            //           SkipDatum                 --> DocSkip, FreqSkip, ProxSkip
+            //           DocSkip,FreqSkip,ProxSkip --> VInt
+            //           DocSkip records the document number before every SkipInterval th  document in TermFreqs. 
+            //           Document numbers are represented as differences from the previous value in the sequence.
+            // Case 2: current field stores payloads
+            //           SkipDatum                 --> DocSkip, PayloadLength?, FreqSkip,ProxSkip
+            //           DocSkip,FreqSkip,ProxSkip --> VInt
+            //           PayloadLength             --> VInt    
+            //         In this case DocSkip/2 is the difference between
+            //         the current and the previous value. If DocSkip
+            //         is odd, then a PayloadLength encoded as VInt follows,
+            //         if DocSkip is even, then it is assumed that the
+            //         current payload length equals the length at the previous
+            //         skip point
+            if (curStorePayloads)
+            {
+                int delta = curDoc - lastSkipDoc[level];
+                if (curPayloadLength == lastSkipPayloadLength[level])
+                {
+                    // the current payload length equals the length at the previous skip point,
+                    // so we don't store the length again
+                    skipBuffer.WriteVInt(delta * 2);
+                }
+                else
+                {
+                    // the payload length is different from the previous one. We shift the DocSkip, 
+                    // set the lowest bit and store the current payload length as VInt.
+                    skipBuffer.WriteVInt(delta * 2 + 1);
+                    skipBuffer.WriteVInt(curPayloadLength);
+                    lastSkipPayloadLength[level] = curPayloadLength;
+                }
+            }
+            else
+            {
+                // current field does not store payloads
+                skipBuffer.WriteVInt(curDoc - lastSkipDoc[level]);
+            }
+            skipBuffer.WriteVInt((int) (curFreqPointer - lastSkipFreqPointer[level]));
+            skipBuffer.WriteVInt((int) (curProxPointer - lastSkipProxPointer[level]));
+            
+            lastSkipDoc[level] = curDoc;
+            //System.out.println("write doc at level " + level + ": " + curDoc);
+            
+            lastSkipFreqPointer[level] = curFreqPointer;
+            lastSkipProxPointer[level] = curProxPointer;
+        }
+    }
 }
\ No newline at end of file