You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucenenet.apache.org by cc...@apache.org on 2011/11/09 22:03:52 UTC

[Lucene.Net] svn commit: r1199962 [4/14] - in /incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk: src/core/ src/core/Analysis/ src/core/Analysis/Standard/ src/core/Document/ src/core/Index/ src/core/QueryParser/ src/core/Search/ src/core/Search/Function/ src/cor...

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocFieldConsumer.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocFieldConsumer.cs?rev=1199962&r1=1199961&r2=1199962&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocFieldConsumer.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocFieldConsumer.cs Wed Nov  9 21:03:47 2011
@@ -16,6 +16,7 @@
  */
 
 using System;
+using System.Collections.Generic;
 
 namespace Lucene.Net.Index
 {
@@ -28,7 +29,7 @@ namespace Lucene.Net.Index
 		/// <summary>Called when DocumentsWriter decides to create a new
 		/// segment 
 		/// </summary>
-		public abstract void  Flush(System.Collections.IDictionary threadsAndFields, SegmentWriteState state);
+        public abstract void Flush(IDictionary<DocFieldConsumerPerThread, ICollection<DocFieldConsumerPerField>> threadsAndFields, SegmentWriteState state);
 		
 		/// <summary>Called when DocumentsWriter decides to close the doc
 		/// stores 

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocFieldConsumers.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocFieldConsumers.cs?rev=1199962&r1=1199961&r2=1199962&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocFieldConsumers.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocFieldConsumers.cs Wed Nov  9 21:03:47 2011
@@ -16,7 +16,7 @@
  */
 
 using System;
-
+using System.Collections.Generic;
 using ArrayUtil = Lucene.Net.Util.ArrayUtil;
 
 namespace Lucene.Net.Index
@@ -26,7 +26,7 @@ namespace Lucene.Net.Index
 	/// DocFieldConsumer instances as a single consumer. 
 	/// </summary>
 	
-	sealed class DocFieldConsumers:DocFieldConsumer
+	sealed class DocFieldConsumers : DocFieldConsumer
 	{
 		private void  InitBlock()
 		{
@@ -48,31 +48,26 @@ namespace Lucene.Net.Index
 			one.SetFieldInfos(fieldInfos);
 			two.SetFieldInfos(fieldInfos);
 		}
-		
-		public override void  Flush(System.Collections.IDictionary threadsAndFields, SegmentWriteState state)
+
+        public override void Flush(IDictionary<DocFieldConsumerPerThread, ICollection<DocFieldConsumerPerField>> threadsAndFields, SegmentWriteState state)
 		{
+
+            var oneThreadsAndFields = new SupportClass.HashMap<DocFieldConsumerPerThread, ICollection<DocFieldConsumerPerField>>();
+			var twoThreadsAndFields = new SupportClass.HashMap<DocFieldConsumerPerThread, ICollection<DocFieldConsumerPerField>>();
 			
-			System.Collections.IDictionary oneThreadsAndFields = new System.Collections.Hashtable();
-			System.Collections.IDictionary twoThreadsAndFields = new System.Collections.Hashtable();
-			
-			System.Collections.IEnumerator it = new System.Collections.Hashtable(threadsAndFields).GetEnumerator();
-			while (it.MoveNext())
+			foreach(var entry in threadsAndFields)
 			{
-				
-				System.Collections.DictionaryEntry entry = (System.Collections.DictionaryEntry) it.Current;
-				
 				DocFieldConsumersPerThread perThread = (DocFieldConsumersPerThread) entry.Key;
-				
-				System.Collections.ICollection fields = (System.Collections.ICollection) entry.Value;
-				
-				System.Collections.IEnumerator fieldsIt = fields.GetEnumerator();
-				System.Collections.Hashtable oneFields = new System.Collections.Hashtable();
-				System.Collections.Hashtable twoFields = new System.Collections.Hashtable();
+                ICollection<DocFieldConsumerPerField> fields = entry.Value;
+
+                IEnumerator<DocFieldConsumerPerField> fieldsIt = fields.GetEnumerator();
+                ICollection<DocFieldConsumerPerField> oneFields = new HashSet<DocFieldConsumerPerField>();
+                ICollection<DocFieldConsumerPerField> twoFields = new HashSet<DocFieldConsumerPerField>();
 				while (fieldsIt.MoveNext())
 				{
 					DocFieldConsumersPerField perField = (DocFieldConsumersPerField) fieldsIt.Current;
-					SupportClass.CollectionsHelper.AddIfNotContains(oneFields, perField.one);
-					SupportClass.CollectionsHelper.AddIfNotContains(twoFields, perField.two);
+					oneFields.Add(perField.one);
+					twoFields.Add(perField.two);
 				}
 				
 				oneThreadsAndFields[perThread.one] = oneFields;
@@ -83,8 +78,8 @@ namespace Lucene.Net.Index
 			one.Flush(oneThreadsAndFields, state);
 			two.Flush(twoThreadsAndFields, state);
 		}
-		
-		public override void  CloseDocStore(SegmentWriteState state)
+
+	    public override void  CloseDocStore(SegmentWriteState state)
 		{
 			try
 			{

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocFieldProcessor.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocFieldProcessor.cs?rev=1199962&r1=1199961&r2=1199962&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocFieldProcessor.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocFieldProcessor.cs Wed Nov  9 21:03:47 2011
@@ -16,6 +16,8 @@
  */
 
 using System;
+using System.Collections;
+using System.Collections.Generic;
 
 namespace Lucene.Net.Index
 {
@@ -52,11 +54,10 @@ namespace Lucene.Net.Index
 		public override void  Flush(System.Collections.ICollection threads, SegmentWriteState state)
 		{
 			
-			System.Collections.IDictionary childThreadsAndFields = new System.Collections.Hashtable();
-			System.Collections.IEnumerator it = threads.GetEnumerator();
-			while (it.MoveNext())
+			var childThreadsAndFields = new SupportClass.HashMap<DocFieldConsumerPerThread, ICollection<DocFieldConsumerPerField>>();
+			foreach(DocConsumerPerThread thread in threads)
 			{
-				DocFieldProcessorPerThread perThread = (DocFieldProcessorPerThread) ((System.Collections.DictionaryEntry) it.Current).Key;
+                DocFieldProcessorPerThread perThread = (DocFieldProcessorPerThread)thread;
 				childThreadsAndFields[perThread.consumer] = perThread.Fields();
 				perThread.TrimFields(state);
 			}
@@ -69,7 +70,7 @@ namespace Lucene.Net.Index
 			// FieldInfo.storePayload.
 			System.String fileName = state.SegmentFileName(IndexFileNames.FIELD_INFOS_EXTENSION);
 			fieldInfos.Write(state.directory, fileName);
-			SupportClass.CollectionsHelper.AddIfNotContains(state.flushedFiles, fileName);
+            state.flushedFiles.Add(fileName);
 		}
 		
 		public override void  Abort()

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocFieldProcessorPerThread.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocFieldProcessorPerThread.cs?rev=1199962&r1=1199961&r2=1199962&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocFieldProcessorPerThread.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocFieldProcessorPerThread.cs Wed Nov  9 21:03:47 2011
@@ -84,15 +84,16 @@ namespace Lucene.Net.Index
 			consumer.Abort();
 		}
 		
-		public System.Collections.ICollection Fields()
+		public System.Collections.Generic.ICollection<DocFieldConsumerPerField> Fields()
 		{
-			System.Collections.Hashtable fields = new System.Collections.Hashtable();
+		    System.Collections.Generic.ICollection<DocFieldConsumerPerField> fields =
+		        new System.Collections.Generic.HashSet<DocFieldConsumerPerField>();
 			for (int i = 0; i < fieldHash.Length; i++)
 			{
 				DocFieldProcessorPerField field = fieldHash[i];
 				while (field != null)
 				{
-					fields[field.consumer] = field.consumer;
+					fields.Add(field.consumer);
 					field = field.next;
 				}
 			}
@@ -146,7 +147,7 @@ namespace Lucene.Net.Index
 		
 		private void  Rehash()
 		{
-			int newHashSize = (int) (fieldHash.Length * 2);
+			int newHashSize = (fieldHash.Length * 2);
 			System.Diagnostics.Debug.Assert(newHashSize > fieldHash.Length);
 			
 			DocFieldProcessorPerField[] newHashArray = new DocFieldProcessorPerField[newHashSize];
@@ -184,7 +185,7 @@ namespace Lucene.Net.Index
 			
 			int thisFieldGen = fieldGen++;
 			
-			System.Collections.IList docFields = doc.GetFields();
+			System.Collections.Generic.IList<Fieldable> docFields = doc.GetFields();
 			int numDocFields = docFields.Count;
 			
 			// Absorb any new fields first seen in this document.
@@ -194,7 +195,7 @@ namespace Lucene.Net.Index
 			
 			for (int i = 0; i < numDocFields; i++)
 			{
-				Fieldable field = (Fieldable) docFields[i];
+				Fieldable field = docFields[i];
 				System.String fieldName = field.Name();
 				
 				// Make sure we have a PerField allocated
@@ -202,29 +203,35 @@ namespace Lucene.Net.Index
 				DocFieldProcessorPerField fp = fieldHash[hashPos];
 				while (fp != null && !fp.fieldInfo.name.Equals(fieldName))
 					fp = fp.next;
-				
-				if (fp == null)
-				{
-					
-					// TODO FI: we need to genericize the "flags" that a
-					// field holds, and, how these flags are merged; it
-					// needs to be more "pluggable" such that if I want
-					// to have a new "thing" my Fields can do, I can
-					// easily add it
-					FieldInfo fi = fieldInfos.Add(fieldName, field.IsIndexed(), field.IsTermVectorStored(), field.IsStorePositionWithTermVector(), field.IsStoreOffsetWithTermVector(), field.GetOmitNorms(), false, field.GetOmitTf());
-					
-					fp = new DocFieldProcessorPerField(this, fi);
-					fp.next = fieldHash[hashPos];
-					fieldHash[hashPos] = fp;
-					totalFieldCount++;
-					
-					if (totalFieldCount >= fieldHash.Length / 2)
-						Rehash();
-				}
-				else
-					fp.fieldInfo.Update(field.IsIndexed(), field.IsTermVectorStored(), field.IsStorePositionWithTermVector(), field.IsStoreOffsetWithTermVector(), field.GetOmitNorms(), false, field.GetOmitTf());
-				
-				if (thisFieldGen != fp.lastGen)
+
+                if (fp == null)
+                {
+
+                    // TODO FI: we need to genericize the "flags" that a
+                    // field holds, and, how these flags are merged; it
+                    // needs to be more "pluggable" such that if I want
+                    // to have a new "thing" my Fields can do, I can
+                    // easily add it
+                    FieldInfo fi = fieldInfos.Add(fieldName, field.IsIndexed(), field.IsTermVectorStored(),
+                                                  field.IsStorePositionWithTermVector(), field.IsStoreOffsetWithTermVector(),
+                                                  field.GetOmitNorms(), false, field.GetOmitTermFreqAndPositions());
+
+                    fp = new DocFieldProcessorPerField(this, fi);
+                    fp.next = fieldHash[hashPos];
+                    fieldHash[hashPos] = fp;
+                    totalFieldCount++;
+
+                    if (totalFieldCount >= fieldHash.Length / 2)
+                        Rehash();
+                }
+                else
+                {
+                    fp.fieldInfo.Update(field.IsIndexed(), field.IsTermVectorStored(),
+                                        field.IsStorePositionWithTermVector(), field.IsStoreOffsetWithTermVector(),
+                                        field.GetOmitNorms(), false, field.GetOmitTermFreqAndPositions());
+                }
+
+			    if (thisFieldGen != fp.lastGen)
 				{
 					
 					// First time we're seeing this field for this doc

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocInverter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocInverter.cs?rev=1199962&r1=1199961&r2=1199962&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocInverter.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocInverter.cs Wed Nov  9 21:03:47 2011
@@ -16,7 +16,7 @@
  */
 
 using System;
-
+using System.Collections.Generic;
 using AttributeSource = Lucene.Net.Util.AttributeSource;
 
 namespace Lucene.Net.Index
@@ -27,7 +27,7 @@ namespace Lucene.Net.Index
 	/// InvertedTermsConsumer to process those terms. 
 	/// </summary>
 	
-	sealed class DocInverter:DocFieldConsumer
+	sealed class DocInverter : DocFieldConsumer
 	{
 		
 		internal InvertedDocConsumer consumer;
@@ -45,31 +45,24 @@ namespace Lucene.Net.Index
 			consumer.SetFieldInfos(fieldInfos);
 			endConsumer.SetFieldInfos(fieldInfos);
 		}
-		
-		public override void  Flush(System.Collections.IDictionary threadsAndFields, SegmentWriteState state)
+
+        public override void Flush(IDictionary<DocFieldConsumerPerThread, ICollection<DocFieldConsumerPerField>> threadsAndFields, SegmentWriteState state)
 		{
-			
-			System.Collections.IDictionary childThreadsAndFields = new System.Collections.Hashtable();
-			System.Collections.IDictionary endChildThreadsAndFields = new System.Collections.Hashtable();
-			
-			System.Collections.IEnumerator it = new System.Collections.Hashtable(threadsAndFields).GetEnumerator();
-			while (it.MoveNext())
+
+            var childThreadsAndFields = new SupportClass.HashMap<InvertedDocConsumerPerThread, ICollection<InvertedDocConsumerPerField>>();
+            var endChildThreadsAndFields = new SupportClass.HashMap<InvertedDocEndConsumerPerThread, ICollection<InvertedDocEndConsumerPerField>>();
+
+            foreach (var entry in threadsAndFields)
 			{
-				
-				System.Collections.DictionaryEntry entry = (System.Collections.DictionaryEntry) it.Current;
-				
 				DocInverterPerThread perThread = (DocInverterPerThread) entry.Key;
-				
-				System.Collections.ICollection fields = (System.Collections.ICollection) entry.Value;
-				
-				System.Collections.IEnumerator fieldsIt = fields.GetEnumerator();
-				System.Collections.Hashtable childFields = new System.Collections.Hashtable();
-				System.Collections.Hashtable endChildFields = new System.Collections.Hashtable();
-				while (fieldsIt.MoveNext())
+
+				ICollection<InvertedDocConsumerPerField> childFields = new HashSet<InvertedDocConsumerPerField>();
+				ICollection<InvertedDocEndConsumerPerField> endChildFields = new HashSet<InvertedDocEndConsumerPerField>();
+				foreach(DocFieldConsumerPerField field in entry.Value)
 				{
-					DocInverterPerField perField = (DocInverterPerField) ((System.Collections.DictionaryEntry) fieldsIt.Current).Key;
-					childFields[perField.consumer] = perField.consumer;
-					endChildFields[perField.endConsumer] = perField.endConsumer;
+                    DocInverterPerField perField = (DocInverterPerField)field;
+					childFields.Add(perField.consumer);
+					endChildFields.Add(perField.endConsumer);
 				}
 				
 				childThreadsAndFields[perThread.consumer] = childFields;
@@ -79,8 +72,8 @@ namespace Lucene.Net.Index
 			consumer.Flush(childThreadsAndFields, state);
 			endConsumer.Flush(endChildThreadsAndFields, state);
 		}
-		
-		public override void  CloseDocStore(SegmentWriteState state)
+
+	    public override void  CloseDocStore(SegmentWriteState state)
 		{
 			consumer.CloseDocStore(state);
 			endConsumer.CloseDocStore(state);

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocInverterPerField.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocInverterPerField.cs?rev=1199962&r1=1199961&r2=1199962&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocInverterPerField.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocInverterPerField.cs Wed Nov  9 21:03:47 2011
@@ -89,9 +89,9 @@ namespace Lucene.Net.Index
 						// un-tokenized field
 						System.String stringValue = field.StringValue();
 						int valueLength = stringValue.Length;
-						perThread.singleTokenTokenStream.Reinit(stringValue, 0, valueLength);
-						fieldState.attributeSource = perThread.singleTokenTokenStream;
-						consumer.Start(field);
+						perThread.singleToken.Reinit(stringValue, 0, valueLength);
+						fieldState.attributeSource = perThread.singleToken;
+					    consumer.Start(field);
 						
 						bool success = false;
 						try
@@ -144,9 +144,6 @@ namespace Lucene.Net.Index
 						
 						int startLength = fieldState.length;
 						
-						// deprecated
-						bool allowMinus1Position = docState.allowMinus1Position;
-						
 						try
 						{
 							int offsetEnd = fieldState.offset - 1;
@@ -155,8 +152,8 @@ namespace Lucene.Net.Index
 							
 							fieldState.attributeSource = stream;
 							
-							OffsetAttribute offsetAttribute = (OffsetAttribute) fieldState.attributeSource.AddAttribute(typeof(OffsetAttribute));
-							PositionIncrementAttribute posIncrAttribute = (PositionIncrementAttribute) fieldState.attributeSource.AddAttribute(typeof(PositionIncrementAttribute));
+							OffsetAttribute offsetAttribute = fieldState.attributeSource.AddAttribute(typeof(OffsetAttribute));
+							PositionIncrementAttribute posIncrAttribute = fieldState.attributeSource.AddAttribute(typeof(PositionIncrementAttribute));
 							
 							consumer.Start(field);
 							
@@ -175,7 +172,7 @@ namespace Lucene.Net.Index
 								
 								int posIncr = posIncrAttribute.GetPositionIncrement();
 								fieldState.position += posIncr;
-								if (allowMinus1Position || fieldState.position > 0)
+								if (fieldState.position > 0)
 								{
 									fieldState.position--;
 								}

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocInverterPerThread.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocInverterPerThread.cs?rev=1199962&r1=1199961&r2=1199962&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocInverterPerThread.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocInverterPerThread.cs Wed Nov  9 21:03:47 2011
@@ -16,7 +16,7 @@
  */
 
 using System;
-
+using Lucene.Net.Util;
 using TokenStream = Lucene.Net.Analysis.TokenStream;
 using OffsetAttribute = Lucene.Net.Analysis.Tokenattributes.OffsetAttribute;
 using TermAttribute = Lucene.Net.Analysis.Tokenattributes.TermAttribute;
@@ -29,27 +29,26 @@ namespace Lucene.Net.Index
 	/// InvertedTermsConsumer to process those terms. 
 	/// </summary>
 	
-	sealed class DocInverterPerThread:DocFieldConsumerPerThread
+	sealed class DocInverterPerThread : DocFieldConsumerPerThread
 	{
 		private void  InitBlock()
 		{
-			singleTokenTokenStream = new SingleTokenTokenStream();
+			singleToken = new SingleTokenAttributeSource();
 		}
 		internal DocInverter docInverter;
 		internal InvertedDocConsumerPerThread consumer;
 		internal InvertedDocEndConsumerPerThread endConsumer;
-		//TODO: change to SingleTokenTokenStream after Token was removed
-		internal SingleTokenTokenStream singleTokenTokenStream;
+		internal SingleTokenAttributeSource singleToken;
 		
-		internal class SingleTokenTokenStream:TokenStream
+		internal class SingleTokenAttributeSource : AttributeSource
 		{
 			internal TermAttribute termAttribute;
 			internal OffsetAttribute offsetAttribute;
-			
-			internal SingleTokenTokenStream()
+
+            internal SingleTokenAttributeSource()
 			{
-				termAttribute = (TermAttribute) AddAttribute(typeof(TermAttribute));
-				offsetAttribute = (OffsetAttribute) AddAttribute(typeof(OffsetAttribute));
+				termAttribute = AddAttribute(typeof(TermAttribute));
+				offsetAttribute = AddAttribute(typeof(OffsetAttribute));
 			}
 			
 			public void  Reinit(System.String stringValue, int startOffset, int endOffset)
@@ -57,12 +56,6 @@ namespace Lucene.Net.Index
 				termAttribute.SetTermBuffer(stringValue);
 				offsetAttribute.SetOffset(startOffset, endOffset);
 			}
-			
-			// this is a dummy, to not throw an UOE because this class does not implement any iteration method
-			public override bool IncrementToken()
-			{
-				throw new System.NotSupportedException();
-			}
 		}
 		
 		internal DocumentsWriter.DocState docState;

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocumentsWriter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocumentsWriter.cs?rev=1199962&r1=1199961&r2=1199962&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocumentsWriter.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocumentsWriter.cs Wed Nov  9 21:03:47 2011
@@ -16,7 +16,9 @@
  */
 
 using System;
-
+using System.Collections.Generic;
+using System.Linq;
+using System.Threading;
 using Analyzer = Lucene.Net.Analysis.Analyzer;
 using Document = Lucene.Net.Documents.Document;
 using AlreadyClosedException = Lucene.Net.Store.AlreadyClosedException;
@@ -69,9 +71,8 @@ namespace Lucene.Net.Index
 	/// call).  Finally the synchronized "finishDocument" is
 	/// called to flush changes to the directory.
 	/// 
-	/// When flush is called by IndexWriter, or, we flush
-	/// internally when autoCommit=false, we forcefully idle all
-	/// threads and flush only once they are all idle.  This
+	/// When flush is called by IndexWriter we forcefully idle 
+	/// all threads and flush only once they are all idle.  This
 	/// means you can call flush with a given thread even while
 	/// other threads are actively adding/deleting documents.
 	/// 
@@ -153,19 +154,19 @@ namespace Lucene.Net.Index
 		internal IndexWriter writer;
 		internal Directory directory;
 		
-		internal System.String segment; // Current segment we are working on
-		private System.String docStoreSegment; // Current doc-store segment we are writing
-		private int docStoreOffset; // Current starting doc-store offset of current segment
-		
-		private int nextDocID; // Next docID to be added
-		private int numDocsInRAM; // # docs buffered in RAM
-		internal int numDocsInStore; // # docs written to doc stores
+		internal System.String segment;             // Current segment we are working on
+		private System.String docStoreSegment;      // Current doc-store segment we are writing
+		private int docStoreOffset;                 // Current starting doc-store offset of current segment
+		
+		private int nextDocID;                      // Next docID to be added
+		private int numDocsInRAM;                   // # docs buffered in RAM
+		internal int numDocsInStore;                // # docs written to doc stores
 		
 		// Max # ThreadState instances; if there are more threads
 		// than this they share ThreadStates
 		private const int MAX_THREAD_STATE = 5;
 		private DocumentsWriterThreadState[] threadStates = new DocumentsWriterThreadState[0];
-		private System.Collections.Hashtable threadBindings = new System.Collections.Hashtable();
+        private SupportClass.HashMap<SupportClass.ThreadClass, DocumentsWriterThreadState> threadBindings = new SupportClass.HashMap<SupportClass.ThreadClass, DocumentsWriterThreadState>();
 		
 		private int pauseThreads; // Non-zero when we need all threads to
 		// pause (eg to flush)
@@ -179,7 +180,7 @@ namespace Lucene.Net.Index
 		internal int maxFieldLength;
 		internal Similarity similarity;
 		
-		internal System.Collections.IList newFiles;
+		internal IList<string> newFiles;
 		
 		internal class DocState
 		{
@@ -192,10 +193,6 @@ namespace Lucene.Net.Index
 			internal Document doc;
 			internal System.String maxTermPrefix;
 			
-			// deprecated
-            [Obsolete]
-			internal bool allowMinus1Position;
-			
 			// Only called by asserts
 			public bool TestPoint(System.String name)
 			{
@@ -400,15 +397,6 @@ namespace Lucene.Net.Index
 			}
 		}
 		
-		internal void  SetAllowMinus1Position()
-		{
-			lock (this)
-			{
-				for (int i = 0; i < threadStates.Length; i++)
-					threadStates[i].docState.allowMinus1Position = true;
-			}
-		}
-		
 		/// <summary>Set how much RAM we can use before flushing. </summary>
 		internal void  SetRAMBufferSizeMB(double mb)
 		{
@@ -472,8 +460,7 @@ namespace Lucene.Net.Index
 		}
 		
 		/// <summary>Returns the current doc store segment we are writing
-		/// to.  This will be the same as segment when autoCommit
-		/// * is true. 
+		/// to. 
 		/// </summary>
 		internal System.String GetDocStoreSegment()
 		{
@@ -532,11 +519,11 @@ namespace Lucene.Net.Index
 			}
 		}
 		
-		private System.Collections.Generic.ICollection<string> abortedFiles; // List of files that were written before last abort()
+		private ICollection<string> abortedFiles; // List of files that were written before last abort()
 		
 		private SegmentWriteState flushState;
 
-        internal System.Collections.Generic.ICollection<string> AbortedFiles()
+        internal ICollection<string> AbortedFiles()
 		{
 			return abortedFiles;
 		}
@@ -547,28 +534,26 @@ namespace Lucene.Net.Index
 				writer.Message("DW: " + message);
 		}
 
-        internal System.Collections.Generic.IList<string> openFiles = new System.Collections.Generic.List<string>();
-        internal System.Collections.Generic.IList<string> closedFiles = new System.Collections.Generic.List<string>();
+        internal IList<string> openFiles = new List<string>();
+        internal IList<string> closedFiles = new List<string>();
 		
 		/* Returns Collection of files in use by this instance,
 		* including any flushed segments. */
-		internal System.Collections.Generic.IList<string> OpenFiles()
+		internal IList<string> OpenFiles()
 		{
 			lock (this)
 			{
-                string[] tmp = new string[openFiles.Count];
-                openFiles.CopyTo(tmp, 0);
-				return tmp;
+                // ToArray returns a copy
+			    return openFiles.ToArray();
 			}
 		}
 		
-		internal System.Collections.Generic.IList<string> ClosedFiles()
+		internal IList<string> ClosedFiles()
 		{
             lock (this)
             {
-                string[] tmp = new string[closedFiles.Count];
-                closedFiles.CopyTo(tmp, 0);
-                return tmp;
+                // ToArray returns a copy
+                return closedFiles.ToArray();
             }
 		}
 		
@@ -608,13 +593,14 @@ namespace Lucene.Net.Index
 		{
 			lock (this)
 			{
-				
 				try
 				{
-					if (infoStream != null)
-						Message("docWriter: now abort");
-					
-					// Forcefully remove waiting ThreadStates from line
+                    if (infoStream != null)
+                    {
+                        Message("docWriter: now abort");
+                    }
+
+				    // Forcefully remove waiting ThreadStates from line
 					waitQueue.Abort();
 					
 					// Wait for all other threads to finish with
@@ -713,10 +699,11 @@ namespace Lucene.Net.Index
 					}
 					catch (System.Threading.ThreadInterruptedException ie)
 					{
-						// In 3.0 we will change this to throw
-						// InterruptedException instead
-						SupportClass.ThreadClass.Current().Interrupt();
-						throw new System.SystemException(ie.Message, ie);
+                        //// In 3.0 we will change this to throw
+                        //// InterruptedException instead
+                        //SupportClass.ThreadClass.Current().Interrupt();
+                        //throw new System.SystemException(ie.Message, ie);
+					    throw;
 					}
 				}
 				
@@ -797,9 +784,9 @@ namespace Lucene.Net.Index
 						flushState.numDocsInStore = 0;
 					}
 					
-					System.Collections.Hashtable threads = new System.Collections.Hashtable();
+					ICollection<DocConsumerPerThread> threads = new HashSet<DocConsumerPerThread>();
 					for (int i = 0; i < threadStates.Length; i++)
-						threads[threadStates[i].consumer] = threadStates[i].consumer;
+						threads.Add(threadStates[i].consumer);
 					consumer.Flush(threads, flushState);
 					
 					if (infoStream != null)
@@ -831,7 +818,7 @@ namespace Lucene.Net.Index
 			}
 		}
 
-        internal System.Collections.ICollection GetFlushedFiles()
+        internal ICollection<string> GetFlushedFiles()
         {
             return flushState.flushedFiles;
         }
@@ -841,10 +828,9 @@ namespace Lucene.Net.Index
 		{
 			
 			CompoundFileWriter cfsWriter = new CompoundFileWriter(directory, segment + "." + IndexFileNames.COMPOUND_FILE_EXTENSION);
-			System.Collections.IEnumerator it = flushState.flushedFiles.GetEnumerator();
-			while (it.MoveNext())
+			foreach(string flushedFile in flushState.flushedFiles)
 			{
-				cfsWriter.AddFile((System.String) ((System.Collections.DictionaryEntry) it.Current).Key);
+                cfsWriter.AddFile(flushedFile);
 			}
 			
 			// Perform the merge
@@ -926,7 +912,7 @@ namespace Lucene.Net.Index
 				// First, find a thread state.  If this thread already
 				// has affinity to a specific ThreadState, use that one
 				// again.
-				DocumentsWriterThreadState state = (DocumentsWriterThreadState) threadBindings[SupportClass.ThreadClass.Current()];
+				DocumentsWriterThreadState state = threadBindings[SupportClass.ThreadClass.Current()];
 				if (state == null)
 				{
 					
@@ -1135,7 +1121,7 @@ namespace Lucene.Net.Index
 		}
 		
 		// for testing
-		internal System.Collections.IDictionary GetBufferedDeleteTerms()
+		internal IDictionary<Term, BufferedDeletes.Num> GetBufferedDeleteTerms()
 		{
 			lock (this)
 			{
@@ -1171,10 +1157,11 @@ namespace Lucene.Net.Index
 					}
 					catch (System.Threading.ThreadInterruptedException ie)
 					{
-						// In 3.0 we will change this to throw
-						// InterruptedException instead
-						SupportClass.ThreadClass.Current().Interrupt();
-						throw new System.SystemException(ie.Message, ie);
+					    throw;
+					    //// In 3.0 we will change this to throw
+					    //// InterruptedException instead
+					    //SupportClass.ThreadClass.Current().Interrupt();
+					    //throw new System.SystemException(ie.Message, ie);
 					}
 				}
 				
@@ -1279,7 +1266,6 @@ namespace Lucene.Net.Index
 		{
 			lock (this)
 			{
-				
 				if (!HasDeletes())
 					return false;
 				
@@ -1334,27 +1320,23 @@ namespace Lucene.Net.Index
 		{
 			lock (this)
 			{
-				
 				int docEnd = docIDStart + reader.MaxDoc();
 				bool any = false;
 				
                 System.Diagnostics.Debug.Assert(CheckDeleteTerm(null));
 
 				// Delete by term
-                //System.Collections.IEnumerator iter = new System.Collections.Hashtable(deletesFlushed.terms).GetEnumerator();
-				System.Collections.IEnumerator iter = deletesFlushed.terms.GetEnumerator();
 				TermDocs docs = reader.TermDocs();
 				try
 				{
-					while (iter.MoveNext())
+					foreach(KeyValuePair<Term, BufferedDeletes.Num> entry in deletesFlushed.terms)
 					{
-						System.Collections.DictionaryEntry entry = (System.Collections.DictionaryEntry) iter.Current;
-						Term term = (Term) entry.Key;
+						Term term = entry.Key;
 						// LUCENE-2086: we should be iterating a TreeMap,
                         // here, so terms better be in order:
                         System.Diagnostics.Debug.Assert(CheckDeleteTerm(term));
 						docs.Seek(term);
-						int limit = ((BufferedDeletes.Num) entry.Value).GetNum();
+						int limit = entry.Value.GetNum();
 						while (docs.Next())
 						{
 							int docID = docs.Doc();
@@ -1371,10 +1353,9 @@ namespace Lucene.Net.Index
 				}
 				
 				// Delete by docID
-				iter = deletesFlushed.docIDs.GetEnumerator();
-				while (iter.MoveNext())
+				foreach(int docIdInt in deletesFlushed.docIDs)
 				{
-					int docID = ((System.Int32) iter.Current);
+				    int docID = docIdInt;
 					if (docID >= docIDStart && docID < docEnd)
 					{
 						reader.DeleteDocument(docID - docIDStart);
@@ -1384,12 +1365,10 @@ namespace Lucene.Net.Index
 				
 				// Delete by query
 				IndexSearcher searcher = new IndexSearcher(reader);
-				iter = new System.Collections.Hashtable(deletesFlushed.queries).GetEnumerator();
-				while (iter.MoveNext())
+				foreach(KeyValuePair<Query, int?> entry in deletesFlushed.queries)
 				{
-					System.Collections.DictionaryEntry entry = (System.Collections.DictionaryEntry) iter.Current;
 					Query query = (Query) entry.Key;
-					int limit = ((System.Int32) entry.Value);
+					int limit = (int)entry.Value;
 					Weight weight = query.Weight(searcher);
 					Scorer scorer = weight.Scorer(reader, true, false);
 					if (scorer != null)
@@ -1417,7 +1396,7 @@ namespace Lucene.Net.Index
 		{
 			lock (this)
 			{
-				BufferedDeletes.Num num = (BufferedDeletes.Num) deletesInRAM.terms[term];
+				BufferedDeletes.Num num = deletesInRAM.terms[term];
 				int docIDUpto = flushedDocCount + docCount;
 				if (num == null)
 					deletesInRAM.terms[term] = new BufferedDeletes.Num(docIDUpto);
@@ -1435,8 +1414,8 @@ namespace Lucene.Net.Index
 		{
 			lock (this)
 			{
-				deletesInRAM.docIDs.Add((System.Int32) (flushedDocCount + docID));
-				deletesInRAM.AddBytesUsed(BYTES_PER_DEL_DOCID);
+			    deletesInRAM.docIDs.Add(flushedDocCount + docID);
+                deletesInRAM.AddBytesUsed(BYTES_PER_DEL_DOCID);
 			}
 		}
 		
@@ -1444,7 +1423,7 @@ namespace Lucene.Net.Index
 		{
 			lock (this)
 			{
-				deletesInRAM.queries[query] = (System.Int32) (flushedDocCount + docID);
+				deletesInRAM.queries[query] = flushedDocCount + docID;
 				deletesInRAM.AddBytesUsed(BYTES_PER_DEL_QUERY);
 			}
 		}
@@ -1530,10 +1509,11 @@ namespace Lucene.Net.Index
 					}
 					catch (System.Threading.ThreadInterruptedException ie)
 					{
-						// In 3.0 we will change this to throw
-						// InterruptedException instead
-						SupportClass.ThreadClass.Current().Interrupt();
-						throw new System.SystemException(ie.Message, ie);
+					    throw;
+					    //// In 3.0 we will change this to throw
+					    //// InterruptedException instead
+					    //SupportClass.ThreadClass.Current().Interrupt();
+					    //throw new System.SystemException(ie.Message, ie);
 					}
 				}
 				while (!waitQueue.DoResume());
@@ -1602,7 +1582,7 @@ namespace Lucene.Net.Index
 		internal static readonly int BYTE_BLOCK_MASK = BYTE_BLOCK_SIZE - 1;
 		internal static readonly int BYTE_BLOCK_NOT_MASK = ~ BYTE_BLOCK_MASK;
 		
-		internal class ByteBlockAllocator:ByteBlockPool.Allocator
+		internal class ByteBlockAllocator : ByteBlockPool.Allocator
 		{
             public ByteBlockAllocator(DocumentsWriter enclosingInstance, int blockSize)
 			{
@@ -1624,7 +1604,7 @@ namespace Lucene.Net.Index
 			}
 
             int blockSize;
-			internal System.Collections.ArrayList freeByteBlocks = new System.Collections.ArrayList();
+			internal List<byte[]> freeByteBlocks = new List<byte[]>();
             
 			/* Allocate another byte[] from the shared pool */
 			public /*internal*/ override byte[] GetByteBlock(bool trackAllocations)
@@ -1646,10 +1626,8 @@ namespace Lucene.Net.Index
 					}
 					else
 					{
-						System.Object tempObject;
-						tempObject = freeByteBlocks[size - 1];
-						freeByteBlocks.RemoveAt(size - 1);
-						b = (byte[]) tempObject;
+					    b = freeByteBlocks[size - 1];
+					    freeByteBlocks.RemoveAt(size - 1);
 					}
 					if (trackAllocations)
 						Enclosing_Instance.numBytesUsed += blockSize;
@@ -1668,14 +1646,10 @@ namespace Lucene.Net.Index
                         freeByteBlocks.Add(blocks[i]);
                         blocks[i] = null;
                     }
-                    if (enclosingInstance.infoStream != null && blockSize != 1024)
-                    {
-                        enclosingInstance.Message("DW.recycleByteBlocks blockSize=" + blockSize + " count=" + (end - start) + " total now " + freeByteBlocks.Count);
-                    }
 				}
 			}
 
-            public /*internal*/ override void RecycleByteBlocks(System.Collections.ArrayList blocks)
+            public /*internal*/ override void RecycleByteBlocks(IList<byte[]> blocks)
             {
                 lock (Enclosing_Instance)
                 {
@@ -1691,8 +1665,8 @@ namespace Lucene.Net.Index
 		internal const int INT_BLOCK_SHIFT = 13;
 		internal static readonly int INT_BLOCK_SIZE = 1 << INT_BLOCK_SHIFT;
 		internal static readonly int INT_BLOCK_MASK = INT_BLOCK_SIZE - 1;
-		
-		private System.Collections.ArrayList freeIntBlocks = new System.Collections.ArrayList();
+
+        private List<int[]> freeIntBlocks = new List<int[]>();
 		
 		/* Allocate another int[] from the shared pool */
 		internal int[] GetIntBlock(bool trackAllocations)
@@ -1714,10 +1688,8 @@ namespace Lucene.Net.Index
 				}
 				else
 				{
-					System.Object tempObject;
-					tempObject = freeIntBlocks[size - 1];
-					freeIntBlocks.RemoveAt(size - 1);
-					b = (int[]) tempObject;
+				    b = freeIntBlocks[size - 1];
+				    freeIntBlocks.RemoveAt(size - 1);
 				}
 				if (trackAllocations)
 					numBytesUsed += INT_BLOCK_SIZE * INT_NUM_BYTE;
@@ -1753,10 +1725,6 @@ namespace Lucene.Net.Index
                     freeIntBlocks.Add(blocks[i]);
                     blocks[i] = null;
                 }
-                if (infoStream != null)
-                {
-                    Message("DW.recycleIntBlocks count=" + (end - start) + " total now " + freeIntBlocks.Count);
-                }
 			}
 		}
 		
@@ -1774,7 +1742,7 @@ namespace Lucene.Net.Index
 		
 		internal static readonly int MAX_TERM_LENGTH = CHAR_BLOCK_SIZE - 1;
 		
-		private System.Collections.ArrayList freeCharBlocks = new System.Collections.ArrayList();
+		private List<char[]> freeCharBlocks = new List<char[]>();
 		
 		/* Allocate another char[] from the shared pool */
 		internal char[] GetCharBlock()
@@ -1790,10 +1758,8 @@ namespace Lucene.Net.Index
 				}
 				else
 				{
-					System.Object tempObject;
-					tempObject = freeCharBlocks[size - 1];
-					freeCharBlocks.RemoveAt(size - 1);
-					c = (char[]) tempObject;
+				    c = freeCharBlocks[size - 1];
+				    freeCharBlocks.RemoveAt(size - 1);
 				}
 				// We always track allocations of char blocks, for now,
 				// because nothing that skips allocation tracking
@@ -1815,10 +1781,6 @@ namespace Lucene.Net.Index
                     freeCharBlocks.Add(blocks[i]);
                     blocks[i] = null;
                 }
-                if (infoStream != null)
-                {
-                    Message("DW.recycleCharBlocks count=" + numBlocks + " total now " + freeCharBlocks.Count);
-                }
 			}
 		}
 		

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocumentsWriterThreadState.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocumentsWriterThreadState.cs?rev=1199962&r1=1199961&r2=1199962&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocumentsWriterThreadState.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocumentsWriterThreadState.cs Wed Nov  9 21:03:47 2011
@@ -44,7 +44,6 @@ namespace Lucene.Net.Index
 			docState.infoStream = docWriter.infoStream;
 			docState.similarity = docWriter.similarity;
 			docState.docWriter = docWriter;
-			docState.allowMinus1Position = docWriter.writer.GetAllowMinus1Position();
 			consumer = docWriter.consumer.AddThread(this);
 		}
 		

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/FieldInfos.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/FieldInfos.cs?rev=1199962&r1=1199961&r2=1199962&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/FieldInfos.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/FieldInfos.cs Wed Nov  9 21:03:47 2011
@@ -44,16 +44,16 @@ namespace Lucene.Net.Index
 		
 		internal static readonly int CURRENT_FORMAT = FORMAT_START;
 		
-		internal const byte IS_INDEXED = (byte) (0x1);
-		internal const byte STORE_TERMVECTOR = (byte) (0x2);
-		internal const byte STORE_POSITIONS_WITH_TERMVECTOR = (byte) (0x4);
-		internal const byte STORE_OFFSET_WITH_TERMVECTOR = (byte) (0x8);
-		internal const byte OMIT_NORMS = (byte) (0x10);
-		internal const byte STORE_PAYLOADS = (byte) (0x20);
-		internal const byte OMIT_TERM_FREQ_AND_POSITIONS = (byte) (0x40);
-		
-		private System.Collections.ArrayList byNumber = new System.Collections.ArrayList();
-		private System.Collections.Hashtable byName = new System.Collections.Hashtable();
+		internal const byte IS_INDEXED = (0x1);
+		internal const byte STORE_TERMVECTOR = (0x2);
+		internal const byte STORE_POSITIONS_WITH_TERMVECTOR =(0x4);
+		internal const byte STORE_OFFSET_WITH_TERMVECTOR = (0x8);
+		internal const byte OMIT_NORMS = (0x10);
+		internal const byte STORE_PAYLOADS = (0x20);
+		internal const byte OMIT_TERM_FREQ_AND_POSITIONS = (0x40);
+
+        private System.Collections.Generic.List<FieldInfo> byNumber = new System.Collections.Generic.List<FieldInfo>();
+        private SupportClass.HashMap<string, FieldInfo> byName = new SupportClass.HashMap<string, FieldInfo>();
 		private int format;
 		
 		public /*internal*/ FieldInfos()
@@ -121,7 +121,7 @@ namespace Lucene.Net.Index
                 int numField = byNumber.Count;
                 for (int i = 0; i < numField; i++)
                 {
-                    FieldInfo fi = (FieldInfo)((FieldInfo)byNumber[i]).Clone();
+                    FieldInfo fi = (FieldInfo)byNumber[i].Clone();
                     fis.byNumber.Add(fi);
                     fis.byName[fi.name] = fi;
                 }
@@ -134,13 +134,13 @@ namespace Lucene.Net.Index
 		{
 			lock (this)
 			{
-				System.Collections.IList fields = doc.GetFields();
-				System.Collections.IEnumerator fieldIterator = fields.GetEnumerator();
-				while (fieldIterator.MoveNext())
-				{
-					Fieldable field = (Fieldable) fieldIterator.Current;
-					Add(field.Name(), field.IsIndexed(), field.IsTermVectorStored(), field.IsStorePositionWithTermVector(), field.IsStoreOffsetWithTermVector(), field.GetOmitNorms(), false, field.GetOmitTf());
-				}
+				System.Collections.Generic.IList<Fieldable> fields = doc.GetFields();
+                foreach(Fieldable field in fields)
+                {
+                    Add(field.Name(), field.IsIndexed(), field.IsTermVectorStored(),
+                        field.IsStorePositionWithTermVector(), field.IsStoreOffsetWithTermVector(), field.GetOmitNorms(),
+                        false, field.GetOmitTermFreqAndPositions());
+                }
 			}
 		}
 		
@@ -170,14 +170,13 @@ namespace Lucene.Net.Index
 		/// </param>
 		/// <param name="storeOffsetWithTermVector">true if offsets should be stored
 		/// </param>
-		public void  AddIndexed(System.Collections.ICollection names, bool storeTermVectors, bool storePositionWithTermVector, bool storeOffsetWithTermVector)
+		public void  AddIndexed(System.Collections.Generic.ICollection<string> names, bool storeTermVectors, bool storePositionWithTermVector, bool storeOffsetWithTermVector)
 		{
 			lock (this)
 			{
-				System.Collections.IEnumerator i = names.GetEnumerator();
-				while (i.MoveNext())
+				foreach(string name in names)
 				{
-					Add((System.String) i.Current, true, storeTermVectors, storePositionWithTermVector, storeOffsetWithTermVector);
+					Add(name, true, storeTermVectors, storePositionWithTermVector, storeOffsetWithTermVector);
 				}
 			}
 		}
@@ -196,10 +195,9 @@ namespace Lucene.Net.Index
 		{
 			lock (this)
 			{
-				System.Collections.IEnumerator i = names.GetEnumerator();
-				while (i.MoveNext())
+				foreach(string name in names)
 				{
-					Add((System.String) i.Current, isIndexed);
+					Add(name, isIndexed);
 				}
 			}
 		}
@@ -345,7 +343,7 @@ namespace Lucene.Net.Index
 		
 		public FieldInfo FieldInfo(System.String fieldName)
 		{
-			return (FieldInfo) byName[fieldName];
+			return byName[fieldName];
 		}
 		
 		/// <summary> Return the fieldName identified by its number.
@@ -358,8 +356,8 @@ namespace Lucene.Net.Index
 		/// </returns>
 		public System.String FieldName(int fieldNumber)
 		{
-			FieldInfo fi = FieldInfo(fieldNumber);
-			return (fi != null)?fi.name:"";
+		    FieldInfo fi = FieldInfo(fieldNumber);
+		    return (fi != null) ? fi.name : "";
 		}
 		
 		/// <summary> Return the fieldinfo object referenced by the fieldNumber.</summary>
@@ -370,7 +368,7 @@ namespace Lucene.Net.Index
 		/// </returns>
 		public FieldInfo FieldInfo(int fieldNumber)
 		{
-			return (fieldNumber >= 0)?(FieldInfo) byNumber[fieldNumber]:null;
+		    return (fieldNumber >= 0) ? byNumber[fieldNumber] : null;
 		}
 		
 		public int Size()

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/FieldsReader.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/FieldsReader.cs?rev=1199962&r1=1199961&r2=1199962&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/FieldsReader.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/FieldsReader.cs Wed Nov  9 21:03:47 2011
@@ -16,14 +16,13 @@
  */
 
 using System;
-
+using Lucene.Net.Util;
 using TokenStream = Lucene.Net.Analysis.TokenStream;
 using Lucene.Net.Documents;
 using AlreadyClosedException = Lucene.Net.Store.AlreadyClosedException;
 using BufferedIndexInput = Lucene.Net.Store.BufferedIndexInput;
 using Directory = Lucene.Net.Store.Directory;
 using IndexInput = Lucene.Net.Store.IndexInput;
-using CloseableThreadLocal = Lucene.Net.Util.CloseableThreadLocal;
 using StringHelper = Lucene.Net.Util.StringHelper;
 
 namespace Lucene.Net.Index
@@ -34,8 +33,6 @@ namespace Lucene.Net.Index
 	/// It uses &lt;segment&gt;.fdt and &lt;segment&gt;.fdx; files.
 	/// 
 	/// </summary>
-	/// <version>  $Id: FieldsReader.java 801344 2009-08-05 18:05:06Z yonik $
-	/// </version>
 	public sealed class FieldsReader : System.ICloneable
 	{
 		private FieldInfos fieldInfos;
@@ -59,7 +56,7 @@ namespace Lucene.Net.Index
 		// file.  This will be 0 if we have our own private file.
 		private int docStoreOffset;
 		
-		private CloseableThreadLocal fieldsStreamTL = new CloseableThreadLocal();
+		private CloseableThreadLocal<IndexInput> fieldsStreamTL = new CloseableThreadLocal<IndexInput>();
 		private bool isOriginal = false;
 		
 		/// <summary>Returns a cloned FieldsReader that shares open
@@ -117,9 +114,7 @@ namespace Lucene.Net.Index
 				else
 					format = firstInt;
 				
-				if (format > FieldsWriter.FORMAT_CURRENT
-                    /* extra support for Lucene 3.0 indexes: */ && format != FieldsWriter.FORMAT_LUCENE_3_0_NO_COMPRESSED_FIELDS
-                    )
+				if (format > FieldsWriter.FORMAT_CURRENT)
 					throw new CorruptIndexException("Incompatible format version: " + format + " expected " + FieldsWriter.FORMAT_CURRENT + " or lower");
 				
 				if (format > FieldsWriter.FORMAT)
@@ -221,8 +216,12 @@ namespace Lucene.Net.Index
 		}
 		
 		internal bool CanReadRawDocs()
-		{
-			return format >= FieldsWriter.FORMAT_VERSION_UTF8_LENGTH_IN_BYTES;
+        {
+            // Disable reading raw docs in 2.x format, because of the removal of compressed
+            // fields in 3.0. We don't want rawDocs() to decode field bits to figure out
+            // if a field was compressed, hence we enforce ordinary (non-raw) stored field merges
+            // for <3.0 indexes.
+			return format >= FieldsWriter.FORMAT_LUCENE_3_0_NO_COMPRESSED_FIELDS;
 		}
 		
 		public /*internal*/ Document Doc(int n, FieldSelector fieldSelector)
@@ -243,6 +242,9 @@ namespace Lucene.Net.Index
 				System.Diagnostics.Debug.Assert(bits <= FieldsWriter.FIELD_IS_COMPRESSED + FieldsWriter.FIELD_IS_TOKENIZED + FieldsWriter.FIELD_IS_BINARY);
 				
 				bool compressed = (bits & FieldsWriter.FIELD_IS_COMPRESSED) != 0;
+			    System.Diagnostics.Debug.Assert(
+			        (compressed ? (format < FieldsWriter.FORMAT_LUCENE_3_0_NO_COMPRESSED_FIELDS) : true),
+			        "compressed fields are only allowed in indexes of version <= 2.9");
 				bool tokenize = (bits & FieldsWriter.FIELD_IS_TOKENIZED) != 0;
 				bool binary = (bits & FieldsWriter.FIELD_IS_BINARY) != 0;
 				//TODO: Find an alternative approach here if this list continues to grow beyond the
@@ -251,10 +253,6 @@ namespace Lucene.Net.Index
 				{
 					AddField(doc, fi, binary, compressed, tokenize);
 				}
-				else if (acceptField.Equals(FieldSelectorResult.LOAD_FOR_MERGE))
-				{
-					AddFieldForMerge(doc, fi, binary, compressed, tokenize);
-				}
 				else if (acceptField.Equals(FieldSelectorResult.LOAD_AND_BREAK))
 				{
 					AddField(doc, fi, binary, compressed, tokenize);
@@ -338,32 +336,24 @@ namespace Lucene.Net.Index
 			{
 				int toRead = fieldsStream.ReadVInt();
 				long pointer = fieldsStream.GetFilePointer();
-				if (compressed)
-				{
-					//was: doc.add(new Fieldable(fi.name, uncompress(b), Fieldable.Store.COMPRESS));
-					doc.Add(new LazyField(this, fi.name, Field.Store.COMPRESS, toRead, pointer, binary));
-				}
-				else
-				{
-					//was: doc.add(new Fieldable(fi.name, b, Fieldable.Store.YES));
-					doc.Add(new LazyField(this, fi.name, Field.Store.YES, toRead, pointer, binary));
-				}
+				//was: doc.add(new Fieldable(fi.name, b, Fieldable.Store.YES));
+				doc.Add(new LazyField(this, fi.name, Field.Store.YES, toRead, pointer, binary, compressed));
+
 				//Need to move the pointer ahead by toRead positions
 				fieldsStream.Seek(pointer + toRead);
 			}
 			else
 			{
 				Field.Store store = Field.Store.YES;
-				Field.Index index = GetIndexType(fi, tokenize);
-				Field.TermVector termVector = GetTermVectorType(fi);
+				Field.Index index = FieldExtensions.ToIndex(fi.isIndexed, tokenize);
+				Field.TermVector termVector = FieldExtensions.ToTermVector(fi.storeTermVector, fi.storeOffsetWithTermVector, fi.storePositionWithTermVector);
 				
 				AbstractField f;
 				if (compressed)
 				{
-					store = Field.Store.COMPRESS;
 					int toRead = fieldsStream.ReadVInt();
 					long pointer = fieldsStream.GetFilePointer();
-					f = new LazyField(this, fi.name, store, toRead, pointer, binary);
+					f = new LazyField(this, fi.name, store, toRead, pointer, binary, compressed);
 					//skip over the part that we aren't loading
 					fieldsStream.Seek(pointer + toRead);
 					f.SetOmitNorms(fi.omitNorms);
@@ -374,62 +364,49 @@ namespace Lucene.Net.Index
 					int length = fieldsStream.ReadVInt();
 					long pointer = fieldsStream.GetFilePointer();
 					//Skip ahead of where we are by the length of what is stored
-					if (format >= FieldsWriter.FORMAT_VERSION_UTF8_LENGTH_IN_BYTES)
-						fieldsStream.Seek(pointer + length);
-					else
-						fieldsStream.SkipChars(length);
-					f = new LazyField(this, fi.name, store, index, termVector, length, pointer, binary);
+                    if (format >= FieldsWriter.FORMAT_VERSION_UTF8_LENGTH_IN_BYTES)
+                    {
+                        fieldsStream.Seek(pointer + length);
+                    }
+                    else
+                    {
+                        fieldsStream.SkipChars(length);
+                    }
+				    f = new LazyField(this, fi.name, store, index, termVector, length, pointer, binary, compressed);
 					f.SetOmitNorms(fi.omitNorms);
 					f.SetOmitTermFreqAndPositions(fi.omitTermFreqAndPositions);
 				}
+
 				doc.Add(f);
 			}
 		}
-		
-		// in merge mode we don't uncompress the data of a compressed field
-		private void  AddFieldForMerge(Document doc, FieldInfo fi, bool binary, bool compressed, bool tokenize)
-		{
-			System.Object data;
-			
-			if (binary || compressed)
-			{
-				int toRead = fieldsStream.ReadVInt();
-				byte[] b = new byte[toRead];
-				fieldsStream.ReadBytes(b, 0, b.Length);
-				data = b;
-			}
-			else
-			{
-				data = fieldsStream.ReadString();
-			}
-			
-			doc.Add(new FieldForMerge(data, fi, binary, compressed, tokenize));
-		}
-		
-		private void  AddField(Document doc, FieldInfo fi, bool binary, bool compressed, bool tokenize)
+
+		private void AddField(Document doc, FieldInfo fi, bool binary, bool compressed, bool tokenize)
 		{
-			
 			//we have a binary stored field, and it may be compressed
 			if (binary)
 			{
 				int toRead = fieldsStream.ReadVInt();
 				byte[] b = new byte[toRead];
 				fieldsStream.ReadBytes(b, 0, b.Length);
-				if (compressed)
-					doc.Add(new Field(fi.name, Uncompress(b), Field.Store.COMPRESS));
-				else
-					doc.Add(new Field(fi.name, b, Field.Store.YES));
+                if (compressed)
+                {
+                    doc.Add(new Field(fi.name, Uncompress(b), Field.Store.YES));
+                }
+                else
+                {
+                    doc.Add(new Field(fi.name, b, Field.Store.YES));
+                }
 			}
 			else
 			{
 				Field.Store store = Field.Store.YES;
-				Field.Index index = GetIndexType(fi, tokenize);
-				Field.TermVector termVector = GetTermVectorType(fi);
+				Field.Index index = FieldExtensions.ToIndex(fi.isIndexed, tokenize);
+				Field.TermVector termVector = FieldExtensions.ToTermVector(fi.storeTermVector, fi.storeOffsetWithTermVector, fi.storePositionWithTermVector);
 				
 				AbstractField f;
 				if (compressed)
 				{
-					store = Field.Store.COMPRESS;
 					int toRead = fieldsStream.ReadVInt();
 					
 					byte[] b = new byte[toRead];
@@ -444,6 +421,7 @@ namespace Lucene.Net.Index
 					f.SetOmitTermFreqAndPositions(fi.omitTermFreqAndPositions);
 					f.SetOmitNorms(fi.omitNorms);
 				}
+
 				doc.Add(f);
 			}
 		}
@@ -463,55 +441,11 @@ namespace Lucene.Net.Index
 			return size;
 		}
 		
-		private Field.TermVector GetTermVectorType(FieldInfo fi)
-		{
-			Field.TermVector termVector = null;
-			if (fi.storeTermVector)
-			{
-				if (fi.storeOffsetWithTermVector)
-				{
-					if (fi.storePositionWithTermVector)
-					{
-						termVector = Field.TermVector.WITH_POSITIONS_OFFSETS;
-					}
-					else
-					{
-						termVector = Field.TermVector.WITH_OFFSETS;
-					}
-				}
-				else if (fi.storePositionWithTermVector)
-				{
-					termVector = Field.TermVector.WITH_POSITIONS;
-				}
-				else
-				{
-					termVector = Field.TermVector.YES;
-				}
-			}
-			else
-			{
-				termVector = Field.TermVector.NO;
-			}
-			return termVector;
-		}
-		
-		private Field.Index GetIndexType(FieldInfo fi, bool tokenize)
-		{
-			Field.Index index;
-			if (fi.isIndexed && tokenize)
-				index = Field.Index.ANALYZED;
-			else if (fi.isIndexed && !tokenize)
-				index = Field.Index.NOT_ANALYZED;
-			else
-				index = Field.Index.NO;
-			return index;
-		}
-		
 		/// <summary> A Lazy implementation of Fieldable that differs loading of fields until asked for, instead of when the Document is
 		/// loaded.
 		/// </summary>
 		[Serializable]
-		private class LazyField:AbstractField, Fieldable
+		private class LazyField : AbstractField, Fieldable
 		{
 			private void  InitBlock(FieldsReader enclosingInstance)
 			{
@@ -528,8 +462,10 @@ namespace Lucene.Net.Index
 			}
 			private int toRead;
 			private long pointer;
+            [Obsolete("Only kept for backward-compatbility with <3.0 indexes. Will be removed in 4.0.")]
+		    private Boolean isCompressed;
 			
-			public LazyField(FieldsReader enclosingInstance, System.String name, Field.Store store, int toRead, long pointer, bool isBinary):base(name, store, Field.Index.NO, Field.TermVector.NO)
+			public LazyField(FieldsReader enclosingInstance, System.String name, Field.Store store, int toRead, long pointer, bool isBinary, bool isCompressed):base(name, store, Field.Index.NO, Field.TermVector.NO)
 			{
 				InitBlock(enclosingInstance);
 				this.toRead = toRead;
@@ -538,9 +474,10 @@ namespace Lucene.Net.Index
 				if (isBinary)
 					binaryLength = toRead;
 				lazy = true;
+			    this.isCompressed = isCompressed;
 			}
 			
-			public LazyField(FieldsReader enclosingInstance, System.String name, Field.Store store, Field.Index index, Field.TermVector termVector, int toRead, long pointer, bool isBinary):base(name, store, index, termVector)
+			public LazyField(FieldsReader enclosingInstance, System.String name, Field.Store store, Field.Index index, Field.TermVector termVector, int toRead, long pointer, bool isBinary, bool isCompressed):base(name, store, index, termVector)
 			{
 				InitBlock(enclosingInstance);
 				this.toRead = toRead;
@@ -549,11 +486,12 @@ namespace Lucene.Net.Index
 				if (isBinary)
 					binaryLength = toRead;
 				lazy = true;
+			    this.isCompressed = isCompressed;
 			}
 			
 			private IndexInput GetFieldStream()
 			{
-				IndexInput localFieldsStream = (IndexInput) Enclosing_Instance.fieldsStreamTL.Get();
+				IndexInput localFieldsStream = Enclosing_Instance.fieldsStreamTL.Get();
 				if (localFieldsStream == null)
 				{
 					localFieldsStream = (IndexInput) Enclosing_Instance.cloneableFieldsStream.Clone();
@@ -562,18 +500,9 @@ namespace Lucene.Net.Index
 				return localFieldsStream;
 			}
 			
-			/// <summary>The value of the field in Binary, or null.  If null, the Reader value,
-			/// String value, or TokenStream value is used. Exactly one of stringValue(), 
-			/// readerValue(), binaryValue(), and tokenStreamValue() must be set. 
-			/// </summary>
-			public override byte[] BinaryValue()
-			{
-				return GetBinaryValue(null);
-			}
-			
 			/// <summary>The value of the field as a Reader, or null.  If null, the String value,
-			/// binary value, or TokenStream value is used.  Exactly one of stringValue(), 
-			/// readerValue(), binaryValue(), and tokenStreamValue() must be set. 
+			/// binary value, or TokenStream value is used.  Exactly one of StringValue(), 
+			/// ReaderValue(), GetBinaryValue(), and TokenStreamValue() must be set. 
 			/// </summary>
 			public override System.IO.TextReader ReaderValue()
 			{
@@ -582,8 +511,8 @@ namespace Lucene.Net.Index
 			}
 			
 			/// <summary>The value of the field as a TokenStream, or null.  If null, the Reader value,
-			/// String value, or binary value is used. Exactly one of stringValue(), 
-			/// readerValue(), binaryValue(), and tokenStreamValue() must be set. 
+            /// String value, or binary value is used. Exactly one of StringValue(), 
+            /// ReaderValue(), GetBinaryValue(), and TokenStreamValue() must be set. 
 			/// </summary>
 			public override TokenStream TokenStreamValue()
 			{
@@ -592,8 +521,8 @@ namespace Lucene.Net.Index
 			}
 			
 			/// <summary>The value of the field as a String, or null.  If null, the Reader value,
-			/// binary value, or TokenStream value is used.  Exactly one of stringValue(), 
-			/// readerValue(), binaryValue(), and tokenStreamValue() must be set. 
+            /// binary value, or TokenStream value is used.  Exactly one of StringValue(), 
+            /// ReaderValue(), GetBinaryValue(), and TokenStreamValue() must be set. 
 			/// </summary>
 			public override System.String StringValue()
 			{
@@ -725,53 +654,5 @@ namespace Lucene.Net.Index
 				throw newException;
 			}
 		}
-		
-		// Instances of this class hold field properties and data
-		// for merge
-		[Serializable]
-		internal sealed class FieldForMerge:AbstractField
-		{
-			public override System.String StringValue()
-			{
-				return (System.String) this.fieldsData;
-			}
-			
-			public override System.IO.TextReader ReaderValue()
-			{
-				// not needed for merge
-				return null;
-			}
-			
-			public override byte[] BinaryValue()
-			{
-				return (byte[]) this.fieldsData;
-			}
-			
-			public override TokenStream TokenStreamValue()
-			{
-				// not needed for merge
-				return null;
-			}
-			
-			public FieldForMerge(System.Object value_Renamed, FieldInfo fi, bool binary, bool compressed, bool tokenize)
-			{
-				this.isStored = true;
-				this.fieldsData = value_Renamed;
-				this.isCompressed = compressed;
-				this.isBinary = binary;
-				if (binary)
-					binaryLength = ((byte[]) value_Renamed).Length;
-				
-				this.isTokenized = tokenize;
-				
-				this.name = StringHelper.Intern(fi.name);
-				this.isIndexed = fi.isIndexed;
-				this.omitNorms = fi.omitNorms;
-				this.omitTermFreqAndPositions = fi.omitTermFreqAndPositions;
-				this.storeOffsetWithTermVector = fi.storeOffsetWithTermVector;
-				this.storePositionWithTermVector = fi.storePositionWithTermVector;
-				this.storeTermVector = fi.storeTermVector;
-			}
-		}
 	}
 }
\ No newline at end of file

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/FieldsWriter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/FieldsWriter.cs?rev=1199962&r1=1199961&r2=1199962&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/FieldsWriter.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/FieldsWriter.cs Wed Nov  9 21:03:47 2011
@@ -30,9 +30,10 @@ namespace Lucene.Net.Index
 	
 	sealed class FieldsWriter
 	{
-		internal const byte FIELD_IS_TOKENIZED = (byte) (0x1);
-		internal const byte FIELD_IS_BINARY = (byte) (0x2);
-		internal const byte FIELD_IS_COMPRESSED = (byte) (0x4);
+		internal const byte FIELD_IS_TOKENIZED = (0x1);
+		internal const byte FIELD_IS_BINARY = (0x2);
+        [Obsolete("Kept for backwards-compatibility with <3.0 indexes; will be removed in 4.0")]
+		internal const byte FIELD_IS_COMPRESSED = (0x4);
 		
 		// Original format
 		internal const int FORMAT = 0;
@@ -40,16 +41,13 @@ namespace Lucene.Net.Index
 		// Changed strings to UTF8
 		internal const int FORMAT_VERSION_UTF8_LENGTH_IN_BYTES = 1;
                  
-        // Lucene 3.0: Removal of compressed fields: This is only to provide compatibility with 3.0-created indexes
-        // new segments always use the FORMAT_CURRENT. As the index format did not change in 3.0, only
-        // new stored field files that no longer support compression are marked as such to optimize merging.
-        // But 2.9 can still read them.
+        // Lucene 3.0: Removal of compressed fields
         internal static int FORMAT_LUCENE_3_0_NO_COMPRESSED_FIELDS = 2;
 		
 		// NOTE: if you introduce a new format, make it 1 higher
 		// than the current one, and always change this if you
 		// switch to a new format!
-		internal static readonly int FORMAT_CURRENT = FORMAT_VERSION_UTF8_LENGTH_IN_BYTES;
+        internal static readonly int FORMAT_CURRENT = FORMAT_LUCENE_3_0_NO_COMPRESSED_FIELDS;
 		
 		private FieldInfos fieldInfos;
 		
@@ -231,74 +229,31 @@ namespace Lucene.Net.Index
 		
 		internal void  WriteField(FieldInfo fi, Fieldable field)
 		{
-			// if the field as an instanceof FieldsReader.FieldForMerge, we're in merge mode
-			// and field.binaryValue() already returns the compressed value for a field
-			// with isCompressed()==true, so we disable compression in that case
-			bool disableCompression = (field is FieldsReader.FieldForMerge);
 			fieldsStream.WriteVInt(fi.number);
 			byte bits = 0;
 			if (field.IsTokenized())
 				bits |= FieldsWriter.FIELD_IS_TOKENIZED;
 			if (field.IsBinary())
 				bits |= FieldsWriter.FIELD_IS_BINARY;
-			if (field.IsCompressed())
-				bits |= FieldsWriter.FIELD_IS_COMPRESSED;
 			
 			fieldsStream.WriteByte(bits);
 			
-			if (field.IsCompressed())
+			// compression is disabled for the current field
+			if (field.IsBinary())
 			{
-				// compression is enabled for the current field
 				byte[] data;
 				int len;
 				int offset;
-				if (disableCompression)
-				{
-					// optimized case for merging, the data
-					// is already compressed
-					data = field.GetBinaryValue();
-					System.Diagnostics.Debug.Assert(data != null);
-					len = field.GetBinaryLength();
-					offset = field.GetBinaryOffset();
-				}
-				else
-				{
-					// check if it is a binary field
-					if (field.IsBinary())
-					{
-						data = CompressionTools.Compress(field.GetBinaryValue(), field.GetBinaryOffset(), field.GetBinaryLength());
-					}
-					else
-					{
-						byte[] x = System.Text.Encoding.GetEncoding("UTF-8").GetBytes(field.StringValue());
-						data = CompressionTools.Compress(x, 0, x.Length);
-					}
-					len = data.Length;
-					offset = 0;
-				}
-				
+				data = field.GetBinaryValue();
+				len = field.GetBinaryLength();
+				offset = field.GetBinaryOffset();
+					
 				fieldsStream.WriteVInt(len);
 				fieldsStream.WriteBytes(data, offset, len);
 			}
 			else
 			{
-				// compression is disabled for the current field
-				if (field.IsBinary())
-				{
-					byte[] data;
-					int len;
-					int offset;
-					data = field.GetBinaryValue();
-					len = field.GetBinaryLength();
-					offset = field.GetBinaryOffset();
-					
-					fieldsStream.WriteVInt(len);
-					fieldsStream.WriteBytes(data, offset, len);
-				}
-				else
-				{
-					fieldsStream.WriteString(field.StringValue());
-				}
+				fieldsStream.WriteString(field.StringValue());
 			}
 		}
 		
@@ -326,19 +281,16 @@ namespace Lucene.Net.Index
 			indexStream.WriteLong(fieldsStream.GetFilePointer());
 			
 			int storedCount = 0;
-			System.Collections.IEnumerator fieldIterator = doc.GetFields().GetEnumerator();
-			while (fieldIterator.MoveNext())
+		    System.Collections.Generic.IList<Fieldable> fields = doc.GetFields();
+			foreach(Fieldable field in fields)
 			{
-				Fieldable field = (Fieldable) fieldIterator.Current;
 				if (field.IsStored())
 					storedCount++;
 			}
 			fieldsStream.WriteVInt(storedCount);
 			
-			fieldIterator = doc.GetFields().GetEnumerator();
-			while (fieldIterator.MoveNext())
+			foreach(Fieldable field in fields)
 			{
-				Fieldable field = (Fieldable) fieldIterator.Current;
 				if (field.IsStored())
 					WriteField(fieldInfos.FieldInfo(field.Name()), field);
 			}

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/FilterIndexReader.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/FilterIndexReader.cs?rev=1199962&r1=1199961&r2=1199962&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/FilterIndexReader.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/FilterIndexReader.cs Wed Nov  9 21:03:47 2011
@@ -281,14 +281,6 @@ namespace Lucene.Net.Index
 		{
 			in_Renamed.DeleteDocument(n);
 		}
-		
-		/// <deprecated> 
-		/// </deprecated>
-        [Obsolete]
-		protected internal override void  DoCommit()
-		{
-			DoCommit(null);
-		}
 
         protected internal override void DoCommit(System.Collections.Generic.IDictionary<string, string> commitUserData)
 		{

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/FormatPostingsDocsWriter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/FormatPostingsDocsWriter.cs?rev=1199962&r1=1199961&r2=1199962&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/FormatPostingsDocsWriter.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/FormatPostingsDocsWriter.cs Wed Nov  9 21:03:47 2011
@@ -43,7 +43,7 @@ namespace Lucene.Net.Index
 		{
 			this.parent = parent;
 			System.String fileName = IndexFileNames.SegmentFileName(parent.parent.segment, IndexFileNames.FREQ_EXTENSION);
-			SupportClass.CollectionsHelper.AddIfNotContains(state.flushedFiles, fileName);
+			state.flushedFiles.Add(fileName);
 			out_Renamed = parent.parent.dir.CreateOutput(fileName);
 			totalNumDocs = parent.parent.totalNumDocs;
 			

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/FormatPostingsFieldsWriter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/FormatPostingsFieldsWriter.cs?rev=1199962&r1=1199961&r2=1199962&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/FormatPostingsFieldsWriter.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/FormatPostingsFieldsWriter.cs Wed Nov  9 21:03:47 2011
@@ -48,8 +48,8 @@ namespace Lucene.Net.Index
 			// can provide skip data or not
 			skipListWriter = new DefaultSkipListWriter(termsOut.skipInterval, termsOut.maxSkipLevels, totalNumDocs, null, null);
 			
-			SupportClass.CollectionsHelper.AddIfNotContains(state.flushedFiles, state.SegmentFileName(IndexFileNames.TERMS_EXTENSION));
-			SupportClass.CollectionsHelper.AddIfNotContains(state.flushedFiles, state.SegmentFileName(IndexFileNames.TERMS_INDEX_EXTENSION));
+			state.flushedFiles.Add(state.SegmentFileName(IndexFileNames.TERMS_EXTENSION));
+			state.flushedFiles.Add(state.SegmentFileName(IndexFileNames.TERMS_INDEX_EXTENSION));
 			
 			termsWriter = new FormatPostingsTermsWriter(state, this);
 		}

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/FormatPostingsPositionsWriter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/FormatPostingsPositionsWriter.cs?rev=1199962&r1=1199961&r2=1199962&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/FormatPostingsPositionsWriter.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/FormatPostingsPositionsWriter.cs Wed Nov  9 21:03:47 2011
@@ -42,7 +42,7 @@ namespace Lucene.Net.Index
 				// At least one field does not omit TF, so create the
 				// prox file
 				System.String fileName = IndexFileNames.SegmentFileName(parent.parent.parent.segment, IndexFileNames.PROX_EXTENSION);
-				SupportClass.CollectionsHelper.AddIfNotContains(state.flushedFiles, fileName);
+				state.flushedFiles.Add(fileName);
 				out_Renamed = parent.parent.parent.dir.CreateOutput(fileName);
 				parent.skipListWriter.SetProxOutput(out_Renamed);
 			}

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/IndexFileDeleter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/IndexFileDeleter.cs?rev=1199962&r1=1199961&r2=1199962&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/IndexFileDeleter.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/IndexFileDeleter.cs Wed Nov  9 21:03:47 2011
@@ -16,7 +16,7 @@
 */
 
 using System;
-
+using System.Collections.Generic;
 using Directory = Lucene.Net.Store.Directory;
 
 namespace Lucene.Net.Index
@@ -31,13 +31,6 @@ namespace Lucene.Net.Index
     /// not yet committed.  This class uses simple reference
     /// counting to map the live SegmentInfos instances to
     /// individual files in the Directory.</para>
-    ///
-    /// <para>When autoCommit=true, IndexWriter currently commits only
-    /// on completion of a merge (though this may change with
-    /// time: it is not a guarantee).  When autoCommit=false,
-    /// IndexWriter only commits when it is closed.  Regardless
-    /// of autoCommit, the user may call IndexWriter.commit() to
-    /// force a blocking commit.</para>
     /// 
     /// <para>The same directory file may be referenced by more than
     /// one IndexCommit, i.e. more than one SegmentInfos.
@@ -72,26 +65,26 @@ namespace Lucene.Net.Index
 		//// Files that we tried to delete but failed (likely
 		/// because they are open and we are running on Windows),
 		/// so we will retry them again later: ////
-		private System.Collections.Generic.IList<string> deletable;
+		private IList<string> deletable;
 		
 		//// Reference count for all files in the index.  
 		/// Counts how many existing commits reference a file.
 		/// Maps String to RefCount (class below) instances: ////
-		private System.Collections.Generic.Dictionary<System.String, RefCount> refCounts = new System.Collections.Generic.Dictionary<System.String, RefCount>();
+		private IDictionary<string, RefCount> refCounts = new SupportClass.HashMap<string, RefCount>();
 		
 		//// Holds all commits (segments_N) currently in the index.
 		/// This will have just 1 commit if you are using the
 		/// default delete policy (KeepOnlyLastCommitDeletionPolicy).
 		/// Other policies may leave commit points live for longer
 		/// in which case this list would be longer than 1: ////
-		private System.Collections.ArrayList commits = new System.Collections.ArrayList();
+        private List<CommitPoint> commits = new List<CommitPoint>();
 		
 		//// Holds files we had incref'd from the previous
 		/// non-commit checkpoint: ////
-        private System.Collections.Generic.IList<string> lastFiles = new System.Collections.Generic.List<string>();
+        private List<ICollection<string>> lastFiles = new List<ICollection<string>>();
 		
 		//// Commits that the IndexDeletionPolicy have decided to delete: ////
-		private System.Collections.ArrayList commitsToDelete = new System.Collections.ArrayList();
+        private List<CommitPoint> commitsToDelete = new List<CommitPoint>();
 		
 		private System.IO.StreamWriter infoStream;
 		private Directory directory;
@@ -101,7 +94,7 @@ namespace Lucene.Net.Index
 		internal bool startingCommitDeleted;
         private SegmentInfos lastSegmentInfos;
 
-        private System.Collections.Generic.Dictionary<string, string> synced;
+        private HashSet<string> synced;
 		
 		/// <summary>Change to true to see details of reference counts when
 		/// infoStream != null 
@@ -129,7 +122,7 @@ namespace Lucene.Net.Index
 		/// </summary>
 		/// <throws>  CorruptIndexException if the index is corrupt </throws>
 		/// <throws>  IOException if there is a low-level IO error </throws>
-        public IndexFileDeleter(Directory directory, IndexDeletionPolicy policy, SegmentInfos segmentInfos, System.IO.StreamWriter infoStream, DocumentsWriter docWriter, System.Collections.Generic.Dictionary<string, string> synced)
+        public IndexFileDeleter(Directory directory, IndexDeletionPolicy policy, SegmentInfos segmentInfos, System.IO.StreamWriter infoStream, DocumentsWriter docWriter, HashSet<string> synced)
 		{
 			
 			this.docWriter = docWriter;
@@ -210,7 +203,7 @@ namespace Lucene.Net.Index
                         }
                         if (sis != null)
                         {
-                            CommitPoint commitPoint = new CommitPoint(this,commitsToDelete, directory, sis);
+                            CommitPoint commitPoint = new CommitPoint(this, commitsToDelete, directory, sis);
                             if (sis.GetGeneration() == segmentInfos.GetGeneration())
                             {
                                 currentCommitPoint = commitPoint;
@@ -258,11 +251,10 @@ namespace Lucene.Net.Index
 			// Now delete anything with ref count at 0.  These are
 			// presumably abandoned files eg due to crash of
 			// IndexWriter.
-			System.Collections.Generic.IEnumerator<System.Collections.Generic.KeyValuePair<System.String, RefCount>> it = refCounts.GetEnumerator();
-			while (it.MoveNext())
+			foreach(KeyValuePair<string, RefCount> entry in refCounts)
 			{
-				System.String fileName = (System.String) it.Current.Key;
-				RefCount rc = (RefCount) refCounts[fileName];
+                string fileName = entry.Key;
+				RefCount rc = refCounts[fileName];
 				if (0 == rc.count)
 				{
 					if (infoStream != null)
@@ -306,15 +298,14 @@ namespace Lucene.Net.Index
 				// the now-deleted commits:
 				for (int i = 0; i < size; i++)
 				{
-					CommitPoint commit = (CommitPoint) commitsToDelete[i];
+					CommitPoint commit = commitsToDelete[i];
 					if (infoStream != null)
 					{
 						Message("deleteCommits: now decRef commit \"" + commit.GetSegmentsFileName() + "\"");
 					}
-					System.Collections.Generic.IEnumerator<string> it = commit.files.GetEnumerator();
-					while (it.MoveNext())
+					foreach(string file in commit.files)
 					{
-						DecRef(it.Current);
+						DecRef(file);
 					}
 				}
 				commitsToDelete.Clear();
@@ -325,7 +316,7 @@ namespace Lucene.Net.Index
 				int writeTo = 0;
 				while (readFrom < size)
 				{
-					CommitPoint commit = (CommitPoint) commits[readFrom];
+					CommitPoint commit = commits[readFrom];
 					if (!commit.deleted)
 					{
 						if (writeTo != readFrom)
@@ -407,7 +398,7 @@ namespace Lucene.Net.Index
 		{
 			if (deletable != null)
 			{
-				System.Collections.Generic.IList<string> oldDeletable = deletable;
+				IList<string> oldDeletable = deletable;
 				deletable = null;
 				int size = oldDeletable.Count;
 				for (int i = 0; i < size; i++)
@@ -469,7 +460,7 @@ namespace Lucene.Net.Index
 			else
 			{
 				
-				System.Collections.Generic.IList<string> docWriterFiles;
+				IList<string> docWriterFiles;
 				if (docWriter != null)
 				{
 					docWriterFiles = docWriter.OpenFiles();
@@ -492,17 +483,11 @@ namespace Lucene.Net.Index
 				}
 				
 				// Save files so we can decr on next checkpoint/commit:
-                foreach (string fname in segmentInfos.Files(directory, false))
-                {
-                    lastFiles.Add(fname);
-                }
+                lastFiles.Add(segmentInfos.Files(directory, false));
 				
                 if (docWriterFiles != null)
                 {
-                    foreach (string fname in docWriterFiles)
-                    {
-                        lastFiles.Add(fname);
-                    }
+                    lastFiles.Add(docWriterFiles);
                 }
 			}
 		}
@@ -511,23 +496,21 @@ namespace Lucene.Net.Index
 		{
 			// If this is a commit point, also incRef the
 			// segments_N file:
-			System.Collections.Generic.IEnumerator<string> it = segmentInfos.Files(directory, isCommit).GetEnumerator();
-			while (it.MoveNext())
+			foreach(string fileName in segmentInfos.Files(directory, isCommit))
 			{
-				IncRef(it.Current);
+				IncRef(fileName);
 			}
 		}
-		
-		internal void  IncRef(System.Collections.Generic.IList<string> files)
+
+        internal void IncRef(ICollection<string> files)
 		{
-			int size = files.Count;
-			for (int i = 0; i < size; i++)
+            foreach(string file in files)
 			{
-				IncRef((System.String) files[i]);
+                IncRef(file);
 			}
 		}
 		
-		internal void  IncRef(System.String fileName)
+		internal void  IncRef(string fileName)
 		{
 			RefCount rc = GetRefCount(fileName);
 			if (infoStream != null && VERBOSE_REF_COUNTS)
@@ -537,12 +520,11 @@ namespace Lucene.Net.Index
 			rc.IncRef();
 		}
 		
-		internal void  DecRef(System.Collections.Generic.ICollection<string> files)
+		internal void  DecRef(ICollection<string> files)
 		{
-            System.Collections.Generic.IEnumerator<string> it = files.GetEnumerator();
-            while (it.MoveNext())
+            foreach(string file in files)
             {
-                DecRef(it.Current);
+                DecRef(file);
             }
 		}
 		
@@ -571,10 +553,9 @@ namespace Lucene.Net.Index
 		
 		internal void  DecRef(SegmentInfos segmentInfos)
 		{
-			System.Collections.Generic.IEnumerator<string> it = segmentInfos.Files(directory, false).GetEnumerator();
-			while (it.MoveNext())
+			foreach(string file in segmentInfos.Files(directory, false))
 			{
-				DecRef(it.Current);
+				DecRef(file);
 			}
 		}
 
@@ -600,16 +581,15 @@ namespace Lucene.Net.Index
 			}
 			else
 			{
-				rc = (RefCount) refCounts[fileName];
+				rc = refCounts[fileName];
 			}
 			return rc;
 		}
 		
-		internal void  DeleteFiles(System.Collections.IList files)
+		internal void  DeleteFiles(System.Collections.Generic.IList<string> files)
 		{
-			int size = files.Count;
-			for (int i = 0; i < size; i++)
-				DeleteFile((System.String) files[i]);
+			foreach(string file in files)
+				DeleteFile(file);
 		}
 		
 		/// <summary>Deletes the specified files, but only if they are new
@@ -617,10 +597,8 @@ namespace Lucene.Net.Index
 		/// </summary>
         internal void DeleteNewFiles(System.Collections.Generic.ICollection<string> files)
 		{
-			System.Collections.IEnumerator it = files.GetEnumerator();
-			while (it.MoveNext())
+			foreach(string fileName in files)
 			{
-				System.String fileName = (System.String) it.Current;
                 if (!refCounts.ContainsKey(fileName))
                 {
                     if (infoStream != null)
@@ -661,7 +639,7 @@ namespace Lucene.Net.Index
 					}
 					if (deletable == null)
 					{
-                        deletable = new System.Collections.Generic.List<string>();
+                        deletable = new List<string>();
 					}
 					deletable.Add(fileName); // add to deletable
 				}
@@ -708,7 +686,7 @@ namespace Lucene.Net.Index
 		/// equals.
 		/// </summary>
 		
-		sealed private class CommitPoint:IndexCommit, System.IComparable
+		sealed private class CommitPoint:IndexCommit, System.IComparable<CommitPoint>
 		{
             private void InitBlock(IndexFileDeleter enclosingInstance)
             {
@@ -725,17 +703,17 @@ namespace Lucene.Net.Index
             }
 			
 			internal long gen;
-            internal System.Collections.Generic.ICollection<string> files;
-			internal System.String segmentsFileName;
+            internal ICollection<string> files;
+			internal string segmentsFileName;
 			internal bool deleted;
 			internal Directory directory;
-			internal System.Collections.ICollection commitsToDelete;
+            internal ICollection<CommitPoint> commitsToDelete;
 			internal long version;
 			internal long generation;
 			internal bool isOptimized;
-            internal System.Collections.Generic.IDictionary<string, string> userData;
+            internal IDictionary<string, string> userData;
 			
-			public CommitPoint(IndexFileDeleter enclosingInstance, System.Collections.ICollection commitsToDelete, Directory directory, SegmentInfos segmentInfos)
+			public CommitPoint(IndexFileDeleter enclosingInstance, ICollection<CommitPoint> commitsToDelete, Directory directory, SegmentInfos segmentInfos)
 			{
 				InitBlock(enclosingInstance);
 				this.directory = directory;
@@ -766,7 +744,7 @@ namespace Lucene.Net.Index
 				return segmentsFileName;
 			}
 
-            public override System.Collections.Generic.ICollection<string> GetFileNames()
+            public override ICollection<string> GetFileNames()
 			{
 				return files;
 			}
@@ -786,7 +764,7 @@ namespace Lucene.Net.Index
 				return generation;
 			}
 
-            public override System.Collections.Generic.IDictionary<string, string> GetUserData()
+            public override IDictionary<string, string> GetUserData()
 			{
 				return userData;
 			}
@@ -807,10 +785,9 @@ namespace Lucene.Net.Index
 			{
 				return deleted;
 			}
-			
-			public int CompareTo(System.Object obj)
+
+            public int CompareTo(CommitPoint commit)
 			{
-				CommitPoint commit = (CommitPoint) obj;
 				if (gen < commit.gen)
 				{
 					return - 1;