You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucenenet.apache.org by mh...@apache.org on 2011/07/24 03:21:30 UTC

[Lucene.Net] svn commit: r1150245 [1/2] - in /incubator/lucene.net/branches/Lucene.Net_2_9_4g: ./ src/core/ src/core/Analysis/ src/core/Analysis/Tokenattributes/ src/core/Store/ src/core/Util/ test/core/ test/core/Analysis/ test/core/Analysis/Tokenattributes/ test/...

Author: mherndon
Date: Sun Jul 24 01:21:27 2011
New Revision: 1150245

URL: http://svn.apache.org/viewvc?rev=1150245&view=rev
Log:
LUCENENET-439, LUCENENET-438, LUCENENET-436. Added categories constant for Nunit Category attribute. cleaning up tests and classes that they touch starting in the Analysis namespace.  TokenStream, Analyzer, AlreadyClosedException was cleaned up, Attribute interface was change to IAttribute.

Added:
    incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Categories.cs
Modified:
    incubator/lucene.net/branches/Lucene.Net_2_9_4g/DIFFs FROM 2.9.4.txt
    incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Analyzer.cs
    incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/SimpleAnalyzer.cs
    incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Token.cs
    incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/TokenStream.cs
    incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Tokenattributes/FlagsAttribute.cs
    incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Tokenattributes/OffsetAttribute.cs
    incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Tokenattributes/PayloadAttribute.cs
    incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Tokenattributes/PositionIncrementAttribute.cs
    incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Tokenattributes/TermAttribute.cs
    incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Tokenattributes/TypeAttribute.cs
    incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Lucene.Net.csproj
    incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Store/AlreadyClosedException.cs
    incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Util/Attribute.cs
    incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Util/AttributeImpl.cs
    incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Util/AttributeSource.cs
    incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Analysis/BaseTokenStreamTestCase.cs
    incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Analysis/TestAnalyzers.cs
    incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Analysis/TestTokenStreamBWComp.cs
    incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Analysis/Tokenattributes/TestSimpleAttributeImpls.cs
    incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Analysis/Tokenattributes/TestTermAttributeImpl.cs
    incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Lucene.Net.Test.csproj
    incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Util/_TestUtil.cs

Modified: incubator/lucene.net/branches/Lucene.Net_2_9_4g/DIFFs FROM 2.9.4.txt
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net_2_9_4g/DIFFs%20FROM%202.9.4.txt?rev=1150245&r1=1150244&r2=1150245&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net_2_9_4g/DIFFs FROM 2.9.4.txt (original)
+++ incubator/lucene.net/branches/Lucene.Net_2_9_4g/DIFFs FROM 2.9.4.txt Sun Jul 24 01:21:27 2011
@@ -23,6 +23,11 @@
   
 * Filter.Bits(Obsolete) removed.
 
+* LUCENENET-438 JavaDoc are turned into XML Doc Comments
+* LUCENENET-439 fix exception re-throws
+* LUCENENET-435 fix up the test-suite for Lucene.Net Core lib. 
+* LUCENENET-436 fix up deprecated code. 
+
 -------------------------------------------------------------------------------
 
 A few API changes like:

Modified: incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Analyzer.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Analyzer.cs?rev=1150245&r1=1150244&r2=1150245&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Analyzer.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Analyzer.cs Sun Jul 24 01:21:27 2011
@@ -20,148 +20,215 @@ using System;
 using Fieldable = Lucene.Net.Documents.Fieldable;
 using AlreadyClosedException = Lucene.Net.Store.AlreadyClosedException;
 using Lucene.Net.Util;
+using System.Reflection;
+using System.IO;
 
 namespace Lucene.Net.Analysis
 {
+    // JAVA: src/java/org/apache/lucene/analysis/Analyzer.java
 	
-	/// <summary>An Analyzer builds TokenStreams, which analyze text.  It thus represents a
-	/// policy for extracting index terms from text.
-	/// <p/>
-	/// Typical implementations first build a Tokenizer, which breaks the stream of
-	/// characters from the Reader into raw Tokens.  One or more TokenFilters may
-	/// then be applied to the output of the Tokenizer.
+	/// <summary>
+    ///     An <see cref="Analyzer"/> represents a policy for extracting terms that are 
+    ///     indexed from text. The <see cref="Analyzer"/> builds <see cref="TokenStream"/>s, which 
+    ///     breaks down text into tokens. 
 	/// </summary>
+    /// <remarks>
+    ///     <para>
+    ///         A typical <see cref="Analyzer"/> implementation will first build a <see cref="Tokenizer"/>.
+    ///         The <see cref="Tokenizer"/> will break down the stream of characters from the 
+    ///         <see cref="System.IO.TextReader"/> into raw <see cref="Token"/>s.  One or 
+    ///         more <see cref="TokenFilter"/>s may then be applied to the output of the <see cref="Tokenizer"/>.
+    ///     </para>
+    /// </remarks>
+    // REFACTOR: determine if this class should use IDisposable since it has a Close() method.
 	public abstract class Analyzer
 	{
-		/// <summary>Creates a TokenStream which tokenizes all the text in the provided
-		/// Reader.  Must be able to handle null field name for
-		/// backward compatibility.
-		/// </summary>
+        private CloseableThreadLocal<object> tokenStreams = new CloseableThreadLocal<object>();
+
+        /// <summary>
+        /// Gets or sets whether this class overrides the <see cref="TokenStream(String, TextReader)"/> method. 
+        /// </summary>
+        protected internal bool overridesTokenStreamMethod;
+
+		/// <summary>
+        /// Creates a <see cref="TokenStream"/> which tokenizes all the text in 
+        /// the provided <see cref="TextReader"/>.
+        /// </summary>
+		/// <param name="fieldName">The name of the <see cref="Lucene.Net.Documents.Field"/>. the fieldName can be <c>null</c>.</param>
+		/// <param name="reader">The text reader.</param>
+		/// <returns>A <see cref="TokenStream"/>.</returns>
 		public abstract TokenStream TokenStream(System.String fieldName, System.IO.TextReader reader);
 		
-		/// <summary>Creates a TokenStream that is allowed to be re-used
-		/// from the previous time that the same thread called
-		/// this method.  Callers that do not need to use more
-		/// than one TokenStream at the same time from this
-		/// analyzer should use this method for better
-		/// performance.
-		/// </summary>
-		public virtual TokenStream ReusableTokenStream(System.String fieldName, System.IO.TextReader reader)
+		/// <summary>
+        ///     Creates a re-useable previously saved <see cref="TokenStream"/> inside the
+        ///     same thread that called this method. Callers that do not need to use more
+		///     than one TokenStream at the same time from this analyzer should use this 
+        ///     method for better performance.
+		/// </summary>
+        /// <remarks>
+        ///     <para>
+        ///         This method defaults to invoking <see cref="TokenStream(String, TextReader)" />
+        ///     </para>
+        /// </remarks>
+		public virtual TokenStream ReusableTokenStream(String fieldName, TextReader reader)
 		{
 			return TokenStream(fieldName, reader);
 		}
 		
-		private CloseableThreadLocal<object> tokenStreams = new CloseableThreadLocal<object>();
 		
-		/// <summary>Used by Analyzers that implement reusableTokenStream
-		/// to retrieve previously saved TokenStreams for re-use
-		/// by the same thread. 
-		/// </summary>
+		
+		/// <summary>
+        /// Gets the previous <see cref="TokenStream"/> used by Analyzers that implement (overrides) 
+        /// <see cref="Analyzer.ReusableTokenStream(String, TextReader)"/> to retrieve a 
+        /// previously saved <see cref="TokenStream"/> for re-use by the same thread. 
+		/// </summary>
+        /// <remarks>
+        ///     <para>
+        ///         This method uses a <see cref="CloseableThreadLocal{T}"/> to store the previous thread and retrieve it.
+        ///     </para>
+        /// </remarks>
+        /// <exception cref="AlreadyClosedException">Throws when there is a null reference exception and the analyzer is closed.</exception>
+        /// <exception cref="System.NullReferenceException">
+        ///     Throws when there is a null reference to <see cref="CloseableThreadLocal{T}"/> and the
+        ///     analyzer is still open.
+        /// </exception>
+        // REFACTOR: turn into a property.
 		protected internal virtual System.Object GetPreviousTokenStream()
 		{
 			try
 			{
 				return tokenStreams.Get();
 			}
-			catch (System.NullReferenceException npe)
+			catch (System.NullReferenceException ex)
 			{
+                // GLOBALIZATION: get exception message from resource file.
 				if (tokenStreams == null)
-				{
-					throw new AlreadyClosedException("this Analyzer is closed");
-				}
-				else
-				{
-					throw npe;
-				}
+					throw new AlreadyClosedException("this Analyzer is closed", ex);
+
+                // default to re-throw keep stack trace intact.
+				throw;
+				
 			}
 		}
 		
-		/// <summary>Used by Analyzers that implement reusableTokenStream
-		/// to save a TokenStream for later re-use by the same
-		/// thread. 
-		/// </summary>
+		/// <summary>
+        ///     Sets the <see cref="TokenStream"/> used by Analyzers that implement (overrides) 
+        ///     <see cref="Analyzer.ReusableTokenStream(String, TextReader)"/>
+        ///     to save a <see cref="TokenStream" /> for later re-use by the same thread. 
+        /// </summary>
+		/// <param name="obj">The previous <see cref="TokenStream"/>.</param>
 		protected internal virtual void  SetPreviousTokenStream(System.Object obj)
 		{
 			try
 			{
 				tokenStreams.Set(obj);
 			}
-			catch (System.NullReferenceException npe)
+			catch (System.NullReferenceException ex)
 			{
+                // GLOBALIZATION: get exception message from resource file.
 				if (tokenStreams == null)
-				{
-					throw new AlreadyClosedException("this Analyzer is closed");
-				}
-				else
-				{
-					throw npe;
-				}
+					throw new AlreadyClosedException("this Analyzer is closed", ex);
+
+                // default to re-throw keep stack trace intact.
+                throw;
+				
 			}
 		}
 		
-		protected internal bool overridesTokenStreamMethod;
-		
-		/// <deprecated> This is only present to preserve
-		/// back-compat of classes that subclass a core analyzer
-		/// and override tokenStream but not reusableTokenStream 
-		/// </deprecated>
-        [Obsolete("This is only present to preserve back-compat of classes that subclass a core analyzer and override tokenStream but not reusableTokenStream ")]
+       
+		
+	    /// <summary>
+        /// This is only present to preserve
+        /// back-compat of classes that subclass a core analyzer
+        /// and override tokenStream but not reusableTokenStream.
+	    /// </summary>
+	    /// <param name="baseClass">The base class type.</param>
+        [Obsolete("This is only present to preserve backwards compatibility of classes that subclass a core analyzer and override tokenStream but not reusableTokenStream ")]
 		protected internal virtual void  SetOverridesTokenStreamMethod(System.Type baseClass)
 		{
-			
-			System.Type[] params_Renamed = new System.Type[2];
-			params_Renamed[0] = typeof(System.String);
-			params_Renamed[1] = typeof(System.IO.TextReader);
-			
+
+            Type[] paramsRenamed = new Type[] { typeof(String), typeof(TextReader) };
+
 			try
 			{
-				System.Reflection.MethodInfo m = this.GetType().GetMethod("TokenStream", (params_Renamed == null)?new System.Type[0]:(System.Type[]) params_Renamed);
-				if (m != null)
-				{
-					overridesTokenStreamMethod = m.DeclaringType != baseClass;
-				}
-				else
-				{
-					overridesTokenStreamMethod = false;
-				}
+                Type[] types = paramsRenamed ?? new Type[0];
+
+				MethodInfo method = this.GetType().GetMethod("TokenStream", types);
+
+                overridesTokenStreamMethod = (method != null && method.DeclaringType != baseClass);
 			}
-			catch (System.MethodAccessException nsme)
+			catch
 			{
 				overridesTokenStreamMethod = false;
 			}
 		}
 		
 		
-		/// <summary> Invoked before indexing a Fieldable instance if
-		/// terms have already been added to that field.  This allows custom
-		/// analyzers to place an automatic position increment gap between
-		/// Fieldable instances using the same field name.  The default value
-		/// position increment gap is 0.  With a 0 position increment gap and
-		/// the typical default token position increment of 1, all terms in a field,
-		/// including across Fieldable instances, are in successive positions, allowing
-		/// exact PhraseQuery matches, for instance, across Fieldable instance boundaries.
-		/// 
-		/// </summary>
-		/// <param name="fieldName">Fieldable name being indexed.
-		/// </param>
-		/// <returns> position increment gap, added to the next token emitted from {@link #TokenStream(String,Reader)}
+		/// <summary> 
+        ///     Gets the position of the increment gap between two 
+        ///     <see cref="Lucene.Net.Documents.Field"/>s using the same name. This 
+        ///     is called before indexing a <see cref="Fieldable"/> instance if terms 
+        ///     have already been added to that field. 
+        /// </summary>
+        /// <remarks>
+        ///     <para>
+        ///     Specifying the position of the increment gap allows custom
+        ///     <see cref="Analyzer"/>s to place an automatic position increment gap between
+        ///     <see cref="Fieldable"/> instances using the same field name. 
+        ///     </para>
+        ///     <para>
+        ///         The default value position increment gap is 0.  
+        ///     </para>
+        ///     <para>
+        ///         <b>Position Increment Gap</b> - The value that controls the 
+        ///         virtual space between the last <see cref="Token"/> of one <see cref="Field"/> 
+        ///         instance and the first <see cref="Token"/> of the next instance. 
+        ///         Both fields share the same name. 
+        ///     </para>
+        ///     <para>
+        ///         Suppose a document has a multi-valued "author" field. Like this:
+        ///     </para>
+        ///     <ul>
+        ///         <li>author: John Doe</li>
+        ///         <li>author: Bob Smith</li>
+        ///     </ul>
+        ///     <para>
+        ///         With a position increment gap of 0, a phrase query of "doe bob" would
+        ///         be a match.  With a gap of 100, a phrase query of "doe bob" would not
+        ///         match.  The gap of 100 would prevent the phrase queries from matching
+        ///         even with a modest slop factor. 
+        ///     </para>
+        ///     <note>
+        ///         This explanation of the position increment gap was pulled from an entry by Erik Hatcher on the 
+        ///         <a href="http://mail-archives.apache.org/mod_mbox/lucene-solr-user/200810.mbox/%3C045DC0D3-789D-433E-88B9-9252392BB1D6@ehatchersolutions.com%3E">
+        ///         lucene-solr-user list</a>. 
+        ///         This was a better explanation than the one found in the code comments from the Lucene-Solr project.
+        ///     </note>
+		/// </remarks>
+		/// <param name="fieldName">The name of the field being indexed. </param>
+		/// <returns> 
+        ///     The position of the increment gap added to the next token emitted 
+        ///     from <see cref="TokenStream(String,TextReader)" />
 		/// </returns>
 		public virtual int GetPositionIncrementGap(System.String fieldName)
 		{
 			return 0;
 		}
 		
-		/// <summary> Just like {@link #getPositionIncrementGap}, except for
-		/// Token offsets instead.  By default this returns 1 for
-		/// tokenized fields and, as if the fields were joined
-		/// with an extra space character, and 0 for un-tokenized
-		/// fields.  This method is only called if the field
-		/// produced at least one token for indexing.
-		/// 
-		/// </summary>
-		/// <param name="field">the field just indexed
-		/// </param>
-		/// <returns> offset gap, added to the next token emitted from {@link #TokenStream(String,Reader)}
+		/// <summary> 
+        ///     Gets the offset gap for a token in the specified field. By default this method
+        ///     returns 1 for tokenized fields and 0 if the field is untokenized.
+        /// </summary>
+        /// <remarks>
+        ///     <para>
+        ///         This method is similar to <see cref="GetPositionIncrementGap(String)"/>
+        ///         and is only called if the field produced at least one token for indexing.
+        ///     </para>
+        /// </remarks>
+		/// <param name="field">the field that was just analyzed </param>
+		/// <returns> 
+        ///     The offset gap, added to the next token emitted 
+        ///     from <see cref="TokenStream(String,TextReader)" />.
 		/// </returns>
 		public virtual int GetOffsetGap(Fieldable field)
 		{
@@ -171,7 +238,15 @@ namespace Lucene.Net.Analysis
 				return 0;
 		}
 		
-		/// <summary>Frees persistent resources used by this Analyzer </summary>
+		/// <summary>   
+        ///     Frees persistent resources used by the <see cref="Analyzer"/>.
+        /// </summary>
+        /// <remarks>
+        ///     <para>
+        ///         The default implementation closes the internal <see cref="TokenStream"/>s 
+        ///         used by the analyzer.
+        ///     </para>
+        /// </remarks>
 		public virtual void  Close()
 		{
 			tokenStreams.Close();

Modified: incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/SimpleAnalyzer.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/SimpleAnalyzer.cs?rev=1150245&r1=1150244&r2=1150245&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/SimpleAnalyzer.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/SimpleAnalyzer.cs Sun Jul 24 01:21:27 2011
@@ -20,19 +20,33 @@ using System;
 namespace Lucene.Net.Analysis
 {
 	
-	/// <summary>An {@link Analyzer} that filters {@link LetterTokenizer} 
-	/// with {@link LowerCaseFilter} 
+	/// <summary>An <see cref="Analyzer"/> that filters <see cref="LetterTokenizer"/>
+	/// with <see cref="LowerCaseFilter"/>
 	/// </summary>
 	
-	public sealed class SimpleAnalyzer:Analyzer
+	public sealed class SimpleAnalyzer : Analyzer
 	{
+
+        /// <summary>
+        /// 
+        /// </summary>
+        /// <param name="fieldName"></param>
+        /// <param name="reader"></param>
+        /// <returns><see cref="LowerCaseTokenizer"/></returns>
 		public override TokenStream TokenStream(System.String fieldName, System.IO.TextReader reader)
 		{
 			return new LowerCaseTokenizer(reader);
 		}
 		
+        /// <summary>
+        /// 
+        /// </summary>
+        /// <param name="fieldName"></param>
+        /// <param name="reader"></param>
+        /// <returns></returns>
 		public override TokenStream ReusableTokenStream(System.String fieldName, System.IO.TextReader reader)
 		{
+            
 			Tokenizer tokenizer = (Tokenizer) GetPreviousTokenStream();
 			if (tokenizer == null)
 			{

Modified: incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Token.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Token.cs?rev=1150245&r1=1150244&r2=1150245&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Token.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Token.cs Sun Jul 24 01:21:27 2011
@@ -26,7 +26,7 @@ using TypeAttribute = Lucene.Net.Analysi
 using Payload = Lucene.Net.Index.Payload;
 using TermPositions = Lucene.Net.Index.TermPositions;
 using ArrayUtil = Lucene.Net.Util.ArrayUtil;
-using Attribute = Lucene.Net.Util.Attribute;
+using Attribute = Lucene.Net.Util.IAttribute;
 using AttributeImpl = Lucene.Net.Util.AttributeImpl;
 
 namespace Lucene.Net.Analysis

Modified: incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/TokenStream.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/TokenStream.cs?rev=1150245&r1=1150244&r2=1150245&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/TokenStream.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/TokenStream.cs Sun Jul 24 01:21:27 2011
@@ -26,91 +26,94 @@ using TypeAttribute = Lucene.Net.Analysi
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
-using Attribute = Lucene.Net.Util.Attribute;
+using Attribute = Lucene.Net.Util.IAttribute;
 using AttributeImpl = Lucene.Net.Util.AttributeImpl;
 using AttributeSource = Lucene.Net.Util.AttributeSource;
+using Lucene.Net.Util;
 
 namespace Lucene.Net.Analysis
 {
+    // JAVA: src/java/org/apache/lucene/analysis/TokenStream.java
 	
-	/// <summary> A <code>TokenStream</code> enumerates the sequence of tokens, either from
-	/// {@link Field}s of a {@link Document} or from query text.
-	/// <p/>
-	/// This is an abstract class. Concrete subclasses are:
-	/// <ul>
-	/// <li>{@link Tokenizer}, a <code>TokenStream</code> whose input is a Reader; and</li>
-	/// <li>{@link TokenFilter}, a <code>TokenStream</code> whose input is another
-	/// <code>TokenStream</code>.</li>
-	/// </ul>
-	/// A new <code>TokenStream</code> API has been introduced with Lucene 2.9. This API
-	/// has moved from being {@link Token} based to {@link Attribute} based. While
-	/// {@link Token} still exists in 2.9 as a convenience class, the preferred way
-	/// to store the information of a {@link Token} is to use {@link AttributeImpl}s.
-	/// <p/>
-	/// <code>TokenStream</code> now extends {@link AttributeSource}, which provides
-	/// access to all of the token {@link Attribute}s for the <code>TokenStream</code>.
-	/// Note that only one instance per {@link AttributeImpl} is created and reused
-	/// for every token. This approach reduces object creation and allows local
-	/// caching of references to the {@link AttributeImpl}s. See
-	/// {@link #IncrementToken()} for further details.
-	/// <p/>
-	/// <b>The workflow of the new <code>TokenStream</code> API is as follows:</b>
-	/// <ol>
-	/// <li>Instantiation of <code>TokenStream</code>/{@link TokenFilter}s which add/get
-	/// attributes to/from the {@link AttributeSource}.</li>
-	/// <li>The consumer calls {@link TokenStream#Reset()}.</li>
-	/// <li>The consumer retrieves attributes from the stream and stores local
-	/// references to all attributes it wants to access</li>
-	/// <li>The consumer calls {@link #IncrementToken()} until it returns false and
-	/// consumes the attributes after each call.</li>
-	/// <li>The consumer calls {@link #End()} so that any end-of-stream operations
-	/// can be performed.</li>
-	/// <li>The consumer calls {@link #Close()} to release any resource when finished
-	/// using the <code>TokenStream</code></li>
-	/// </ol>
-	/// To make sure that filters and consumers know which attributes are available,
-	/// the attributes must be added during instantiation. Filters and consumers are
-	/// not required to check for availability of attributes in
-	/// {@link #IncrementToken()}.
-	/// <p/>
-	/// You can find some example code for the new API in the analysis package level
-	/// Javadoc.
-	/// <p/>
-	/// Sometimes it is desirable to capture a current state of a <code>TokenStream</code>
-	/// , e. g. for buffering purposes (see {@link CachingTokenFilter},
-	/// {@link TeeSinkTokenFilter}). For this usecase
-	/// {@link AttributeSource#CaptureState} and {@link AttributeSource#RestoreState}
-	/// can be used.
-	/// </summary>
-	public abstract class TokenStream:AttributeSource
+	/// <summary> 
+    ///     A <see cref="Lucene.Net.Analysis.TokenStream"/> enumerates the sequence of tokens, either from
+    ///     <see cref="Lucene.Net.Documents.Field"/>s of a <see cref="Lucene.Net.Documents.Document"/> 
+    ///     or from query text.
+    /// </summary>
+    /// <remarks>
+    ///     <para>
+    ///         A new <see cref="Lucene.Net.Analysis.TokenStream"/>  API has been introduced with Lucene 2.9. This API
+    ///         has moved from being <see cref="Lucene.Net.Analysis.Token"/> based to <see cref="Lucene.Net.Util.IAttribute" /> based. While
+    ///         <see cref="Lucene.Net.Analysis.Token"/> still exists in 2.9 as a convenience class, the preferred way
+    ///         to store the information of a <see cref="Lucene.Net.Analysis.Token"/> is to use <see cref="Lucene.Net.Util.AttributeImpl" />s.
+    ///     </para>
+	///     <para>
+    ///         <c>TokenStream</c> now extends <see cref="Lucene.Net.Util.AttributeSource" />, which provides
+    ///         access to all of the token <see cref="Lucene.Net.Util.IAttribute"/>s for the <c>TokenStream</c>.
+    ///         Note that only one instance per <see cref="Lucene.Net.Util.AttributeImpl" /> is created and reused
+	///         for every token. This approach reduces object creation and allows local
+    ///         caching of references to the <see cref="Lucene.Net.Util.AttributeImpl" />s. See
+	///         <see cref="IncrementToken"/> for further details.
+    ///     </para>
+	///     <para>
+    ///         <b>The workflow of the new <c>TokenStream</c> API is as follows:</b>
+    ///     </para>
+	///     <ol>
+	///         <li>
+    ///             Instantiation of <see cref="TokenStream" /> / <see cref="TokenFilter"/>s which add/get
+	///             attributes to/from the <see cref="Lucene.Net.Util.AttributeSource"/>.
+    ///         </li>
+	///         <li>
+    ///             The consumer calls <see cref="Reset()"/>.
+    ///         </li>
+	///         <li>
+    ///             The consumer retrieves attributes from the stream and stores local
+	///             references to all attributes it wants to access.
+    ///         </li>
+	///         <li>
+    ///             The consumer calls <see cref="IncrementToken()"/> until it returns false and
+	///             consumes the attributes after each call.
+    ///         </li>
+	///         <li>
+    ///             The consumer calls <see cref="End()"/> so that any end-of-stream operations
+	///             can be performed.
+    ///         </li>
+	///         <li>
+    ///             The consumer calls <see cref="Close()"/> to release any resource when finished
+	///             using the <c>TokenStream</c>
+    ///         </li>
+	///     </ol>
+    /// </remarks>
+	public abstract class TokenStream : AttributeSource
 	{
-		private void  InitBlock()
-		{
-			supportedMethods = GetSupportedMethods(this.GetType());
-		}
-		
-		/// <deprecated> Remove this when old API is removed! 
-		/// </deprecated>
+        // REMOVE: in 3.0
         [Obsolete("Remove this when old API is removed! ")]
-		private static readonly AttributeFactory DEFAULT_TOKEN_WRAPPER_ATTRIBUTE_FACTORY = new TokenWrapperAttributeFactory(AttributeFactory.DEFAULT_ATTRIBUTE_FACTORY);
-		
-		/// <deprecated> Remove this when old API is removed! 
-		/// </deprecated>
+        private static readonly AttributeFactory DEFAULT_TOKEN_WRAPPER_ATTRIBUTE_FACTORY = new TokenWrapperAttributeFactory(AttributeFactory.DEFAULT_ATTRIBUTE_FACTORY);
+
+        // REMOVE: in 3.0
         [Obsolete("Remove this when old API is removed! ")]
-		private TokenWrapper tokenWrapper;
-		
-		/// <deprecated> Remove this when old API is removed! 
-		/// </deprecated>
+        private TokenWrapper tokenWrapper;
+
+
+        // REMOVE: in 3.0
         [Obsolete("Remove this when old API is removed! ")]
-		private static bool onlyUseNewAPI = false;
-		
-		/// <deprecated> Remove this when old API is removed! 
-		/// </deprecated>
+        private static bool onlyUseNewAPI = false;
+
+        // REMOVE: in 3.0
         [Obsolete("Remove this when old API is removed! ")]
-		private MethodSupport supportedMethods;
+        private MethodSupport supportedMethods;
+
+		private void  InitBlock()
+        {
+            // REMOVE: in 3.0
+            #pragma warning disable 618
+            supportedMethods = GetSupportedMethods(this.GetType());
+            #pragma warning restore 618
+		}
+		
+		
 		
-		/// <deprecated> Remove this when old API is removed! 
-		/// </deprecated>
+		// REMOVE: in 3.0
         [Obsolete("Remove this when old API is removed! ")]
 		private sealed class MethodSupport
 		{
@@ -141,9 +144,8 @@ namespace Lucene.Net.Analysis
 			private static readonly System.Type[] METHOD_NO_PARAMS = new System.Type[0];
 			private static readonly System.Type[] METHOD_TOKEN_PARAM = new System.Type[]{typeof(Token)};
 		}
-		
-		/// <deprecated> Remove this when old API is removed! 
-		/// </deprecated>
+
+        // REMOVE: in 3.0
         [Obsolete("Remove this when old API is removed! ")]
         private static readonly Support.Dictionary<Type, MethodSupport> knownMethodSupport = new Support.Dictionary<Type, MethodSupport>();
 
@@ -159,8 +161,7 @@ namespace Lucene.Net.Analysis
          */
         // Aroush-2.9}}
 
-		/// <deprecated> Remove this when old API is removed! 
-		/// </deprecated>
+        // REMOVE: in 3.0
         [Obsolete("Remove this when old API is removed! ")]
 		private static MethodSupport GetSupportedMethods(System.Type clazz)
 		{
@@ -175,9 +176,8 @@ namespace Lucene.Net.Analysis
 			}
 			return supportedMethods;
 		}
-		
-		/// <deprecated> Remove this when old API is removed! 
-		/// </deprecated>
+
+        // REMOVE: in 3.0
         [Obsolete("Remove this when old API is removed! ")]
 		private sealed class TokenWrapperAttributeFactory:AttributeFactory
 		{
@@ -212,33 +212,47 @@ namespace Lucene.Net.Analysis
 				return delegate_Renamed.GetHashCode() ^ 0x0a45ff31;
 			}
 		}
-		
-		/// <summary> A TokenStream using the default attribute factory.</summary>
-		protected internal TokenStream():base(onlyUseNewAPI?AttributeFactory.DEFAULT_ATTRIBUTE_FACTORY:TokenStream.DEFAULT_TOKEN_WRAPPER_ATTRIBUTE_FACTORY)
+
+        /// <summary> A <see cref="TokenStream"/> using the default attribute factory.</summary>
+        #pragma warning disable 618
+        protected internal TokenStream() : 
+            base( onlyUseNewAPI ? AttributeFactory.DEFAULT_ATTRIBUTE_FACTORY : TokenStream.DEFAULT_TOKEN_WRAPPER_ATTRIBUTE_FACTORY)
 		{
 			InitBlock();
 			tokenWrapper = InitTokenWrapper(null);
 			Check();
 		}
-		
-		/// <summary> A TokenStream that uses the same attributes as the supplied one.</summary>
+        #pragma warning restore 618
+
+        /// <summary> A <see cref="TokenStream"/> that uses the same attributes as the supplied one.</summary>
 		protected internal TokenStream(AttributeSource input):base(input)
 		{
 			InitBlock();
-			tokenWrapper = InitTokenWrapper(input);
+            
+            // REMOVE: in 3.0
+            #pragma warning disable 618
+            tokenWrapper = InitTokenWrapper(input);
 			Check();
-		}
-		
-		/// <summary> A TokenStream using the supplied AttributeFactory for creating new {@link Attribute} instances.</summary>
-		protected internal TokenStream(AttributeFactory factory):base(onlyUseNewAPI?factory:new TokenWrapperAttributeFactory(factory))
+            #pragma warning restore 618
+        }
+
+        /// <summary> 
+        ///     A <see cref="TokenStream"/> using the supplied AttributeFactory for creating 
+        ///     new <see cref="IAttribute"/> instances.
+        /// </summary>
+        #pragma warning disable 618
+        protected internal TokenStream(AttributeFactory factory)
+            :base( onlyUseNewAPI? factory: new TokenWrapperAttributeFactory(factory))
 		{
 			InitBlock();
-			tokenWrapper = InitTokenWrapper(null);
+
+            // REMOVE: in 3.0
+           
+            tokenWrapper = InitTokenWrapper(null);
 			Check();
-		}
+            #pragma warning restore 618
+        }
 		
-		/// <deprecated> Remove this when old API is removed! 
-		/// </deprecated>
         [Obsolete("Remove this when old API is removed! ")]
 		private TokenWrapper InitTokenWrapper(AttributeSource input)
 		{
@@ -255,7 +269,7 @@ namespace Lucene.Net.Analysis
 					return ((TokenStream) input).tokenWrapper;
 				}
 				// check that all attributes are implemented by the same TokenWrapper instance
-				Attribute att = AddAttribute(typeof(TermAttribute));
+				IAttribute att = AddAttribute(typeof(TermAttribute));
 				if (att is TokenWrapper && AddAttribute(typeof(TypeAttribute)) == att && AddAttribute(typeof(PositionIncrementAttribute)) == att && AddAttribute(typeof(FlagsAttribute)) == att && AddAttribute(typeof(OffsetAttribute)) == att && AddAttribute(typeof(PayloadAttribute)) == att)
 				{
 					return (TokenWrapper) att;
@@ -267,8 +281,7 @@ namespace Lucene.Net.Analysis
 			}
 		}
 		
-		/// <deprecated> Remove this when old API is removed! 
-		/// </deprecated>
+	
         [Obsolete("Remove this when old API is removed! ")]
 		private void  Check()
 		{
@@ -284,81 +297,92 @@ namespace Lucene.Net.Analysis
 			}
 		}
 		
-		/// <summary> For extra performance you can globally enable the new
-		/// {@link #IncrementToken} API using {@link Attribute}s. There will be a
-		/// small, but in most cases negligible performance increase by enabling this,
-		/// but it only works if <b>all</b> <code>TokenStream</code>s use the new API and
-		/// implement {@link #IncrementToken}. This setting can only be enabled
-		/// globally.
-		/// <p/>
-		/// This setting only affects <code>TokenStream</code>s instantiated after this
-		/// call. All <code>TokenStream</code>s already created use the other setting.
-		/// <p/>
-		/// All core {@link Analyzer}s are compatible with this setting, if you have
-		/// your own <code>TokenStream</code>s that are also compatible, you should enable
-		/// this.
-		/// <p/>
-		/// When enabled, tokenization may throw {@link UnsupportedOperationException}
-		/// s, if the whole tokenizer chain is not compatible eg one of the
-		/// <code>TokenStream</code>s does not implement the new <code>TokenStream</code> API.
-		/// <p/>
-		/// The default is <code>false</code>, so there is the fallback to the old API
-		/// available.
-		/// 
-		/// </summary>
-		/// <deprecated> This setting will no longer be needed in Lucene 3.0 as the old
-		/// API will be removed.
-		/// </deprecated>
+		/// <summary> 
+        ///     <para>
+        ///         For extra performance you can globally enable the new
+		///         <see cref="IncrementToken()"/> API using <see cref="IAttribute"/>s. There will be a
+		///         small, but in most cases negligible performance increase by enabling this,
+		///         but it only works if <b>all</b> <c>TokenStream</c>s use the new API and
+		///         implement <see cref="IncrementToken()"/>. This setting can only be enabled
+		///         globally.
+        ///     </para>
+        /// </summary>
+        /// <remarks>
+		///     <para>
+        ///         This setting only affects <see cref="TokenStream"/>s instantiated after this
+		///         call. All <c>TokenStream</c>s already created use the other setting.
+        ///     </para>
+        ///     <para>
+        ///         All core <see cref="Lucene.Net.Analysis.Analyzer"/>s are compatible with this setting, if you have
+		///         your own <c>TokenStream</c>s that are also compatible, you should enable
+		///         this.
+        ///     </para>
+		///     <para>
+		///         When enabled, tokenization may throw <see cref="System.NotSupportedException"/>s. 
+		///         If the whole tokenizer chain is not compatible e.g. one of the
+		///         <c>TokenStream</c>s does not implement the new <c>TokenStream</c> API.
+        ///     </para>
+        ///     <para>
+		///         The default is <c>false</c>, so there is the fallback to the old API
+		///         available.
+        ///     </para>
+        /// </remarks>
+        /// <exception cref="System.NotSupportedException">When enabled, it make throw this exception</exception>
         [Obsolete("This setting will no longer be needed in Lucene 3.0 as the old API will be removed.")]
 		public static void  SetOnlyUseNewAPI(bool onlyUseNewAPI)
-		{
-			TokenStream.onlyUseNewAPI = onlyUseNewAPI;
-		}
+        {
+            #pragma warning disable 618
+            TokenStream.onlyUseNewAPI = onlyUseNewAPI;
+            #pragma warning restore 618
+        }
 		
-		/// <summary> Returns if only the new API is used.
-		/// 
+		/// <summary> 
+        ///     Returns <c>true</c> if the new API is used, otherwise <c>false</c>.
 		/// </summary>
-		/// <seealso cref="setOnlyUseNewAPI">
-		/// </seealso>
-		/// <deprecated> This setting will no longer be needed in Lucene 3.0 as
-		/// the old API will be removed.
-		/// </deprecated>
         [Obsolete("This setting will no longer be needed in Lucene 3.0 as the old API will be removed.")]
 		public static bool GetOnlyUseNewAPI()
-		{
-			return onlyUseNewAPI;
-		}
-		
-		/// <summary> Consumers (i.e., {@link IndexWriter}) use this method to advance the stream to
-		/// the next token. Implementing classes must implement this method and update
-		/// the appropriate {@link AttributeImpl}s with the attributes of the next
-		/// token.
-		/// 
-		/// The producer must make no assumptions about the attributes after the
-		/// method has been returned: the caller may arbitrarily change it. If the
-		/// producer needs to preserve the state for subsequent calls, it can use
-		/// {@link #captureState} to create a copy of the current attribute state.
-		/// 
-		/// This method is called for every token of a document, so an efficient
-		/// implementation is crucial for good performance. To avoid calls to
-		/// {@link #AddAttribute(Class)} and {@link #GetAttribute(Class)} or downcasts,
-		/// references to all {@link AttributeImpl}s that this stream uses should be
-		/// retrieved during instantiation.
-		/// 
-		/// To ensure that filters and consumers know which attributes are available,
-		/// the attributes must be added during instantiation. Filters and consumers
-		/// are not required to check for availability of attributes in
-		/// {@link #IncrementToken()}.
-		/// 
-		/// </summary>
-		/// <returns> false for end of stream; true otherwise
-		/// 
-		/// Note that this method will be defined abstract in Lucene
-		/// 3.0.
-		/// </returns>
+        {
+            #pragma warning disable 618
+            return onlyUseNewAPI;
+            #pragma warning restore 618
+        }
+		
+		/// 
+        /// <summary> 
+        ///     Consumers, like <see cref="Lucene.Net.Index.IndexWriter"/>, use this 
+        ///     method to advance the stream to the next token. Implementing classes must 
+        ///     implement this method and update the appropriate <see cref="Lucene.Net.Util.AttributeImpl"/>s 
+        ///     with the attributes of the next token.
+        /// </summary>
+		/// <remarks>
+        ///     <para>
+		///         The producer must make no assumptions about the attributes after the
+		///         method has been returned: the caller may arbitrarily change it. If the
+		///         producer needs to preserve the state for subsequent calls, it can use
+		///         <see cref="AttributeSource.CaptureState()"/> to create a copy of the 
+        ///         current attribute state.
+        ///     </para>
+        ///     <para>
+		///         This method is called for every token of a document, so an efficient
+		///         implementation is crucial for good performance. To avoid calls to
+		///         <see cref="AttributeSource.AddAttribute(Type)"/> and <see cref="AttributeSource.GetAttribute(Type)"/> or downcasts,
+		///         references to all <see cref="AttributeImpl" />s that this stream uses should be
+		///         retrieved during instantiation.
+        ///     </para>
+        ///     <para>
+		///         To ensure that filters and consumers know which attributes are available,
+		///         the attributes must be added during instantiation. Filters and consumers
+		///         are not required to check for availability of attributes in
+		///         <see cref="IncrementToken()" />.
+        ///     </para>
+        /// </remarks>
+        /// <returns> <c>true</c> if the stream has <b>not</b> reached its end, otherwise <c>false</c>. </returns>
+        
 		public virtual bool IncrementToken()
-		{
-			System.Diagnostics.Debug.Assert(tokenWrapper != null);
+        {
+            // CHANGE: IncrementToken becomes an empty abstract method in 3.0 
+            #pragma warning disable 618
+            System.Diagnostics.Debug.Assert(tokenWrapper != null);
 			
 			Token token;
 			if (supportedMethods.hasReusableNext)
@@ -370,61 +394,78 @@ namespace Lucene.Net.Analysis
 				System.Diagnostics.Debug.Assert(supportedMethods.hasNext);
 				token = Next();
 			}
-			if (token == null)
+			
+            if (token == null)
 				return false;
-			tokenWrapper.delegate_Renamed = token;
+			
+            tokenWrapper.delegate_Renamed = token;
 			return true;
-		}
-		
-		/// <summary> This method is called by the consumer after the last token has been
-		/// consumed, after {@link #IncrementToken()} returned <code>false</code>
-		/// (using the new <code>TokenStream</code> API). Streams implementing the old API
-		/// should upgrade to use this feature.
-		/// <p/>
-		/// This method can be used to perform any end-of-stream operations, such as
-		/// setting the final offset of a stream. The final offset of a stream might
-		/// differ from the offset of the last token eg in case one or more whitespaces
-		/// followed after the last token, but a {@link WhitespaceTokenizer} was used.
-		/// 
-		/// </summary>
-		/// <throws>  IOException </throws>
+            
+            #pragma warning restore 618
+        }
+		
+		/// <summary> 
+        ///     This method is called by the consumer after the last token has been
+		///     consumed, after <see cref="IncrementToken()" /> returned <c>false</c>
+		///     Using the new <c>TokenStream</c> API, Streams implementing the old API
+		///     should upgrade to use this feature.
+        /// </summary>
+		/// <remarks>
+        ///     <para>
+		///         This method can be used to perform any end-of-stream operations, like
+		///         setting the final offset of a stream. The final offset of a stream might
+		///         differ from the offset of the last token. e.g. in case one or more whitespaces
+		///         followed after the last token and a <see cref="WhitespaceTokenizer"/> was used.
+        ///     </para>
+        /// </remarks>
+        /// <exception cref="System.IO.IOException" />
 		public virtual void  End()
 		{
 			// do nothing by default
 		}
 		
-		/// <summary> Returns the next token in the stream, or null at EOS. When possible, the
-		/// input Token should be used as the returned Token (this gives fastest
-		/// tokenization performance), but this is not required and a new Token may be
-		/// returned. Callers may re-use a single Token instance for successive calls
-		/// to this method.
-		/// 
-		/// This implicitly defines a "contract" between consumers (callers of this
-		/// method) and producers (implementations of this method that are the source
-		/// for tokens):
-		/// <ul>
-		/// <li>A consumer must fully consume the previously returned {@link Token}
-		/// before calling this method again.</li>
-		/// <li>A producer must call {@link Token#Clear()} before setting the fields in
-		/// it and returning it</li>
-		/// </ul>
-		/// Also, the producer must make no assumptions about a {@link Token} after it
-		/// has been returned: the caller may arbitrarily change it. If the producer
-		/// needs to hold onto the {@link Token} for subsequent calls, it must clone()
-		/// it before storing it. Note that a {@link TokenFilter} is considered a
-		/// consumer.
-		/// 
-		/// </summary>
-		/// <param name="reusableToken">a {@link Token} that may or may not be used to return;
-		/// this parameter should never be null (the callee is not required to
-		/// check for null before using it, but it is a good idea to assert that
-		/// it is not null.)
+		/// <summary> 
+        ///     Returns the next token in the stream, or <c>null</c> at end-of-stream.
+        /// </summary>
+		/// <remarks>
+        ///     <para>
+        ///         The input Token should be used as the Token that is returned when possible, which will 
+        ///         give the fastest tokenization performance. However, this is not required. A new Token may be
+        ///         returned. Callers may re-use a single Token instance for successive calls
+        ///         to this method.
+        ///     </para>
+        ///     <para>
+		///         This implicitly defines a "contract" between consumers, the callers of this
+		///         method, and producers, the implementations of this method that are the source
+		///         for tokens:
+        ///     </para>
+		///     <ul>
+		///         <li>
+        ///             A consumer must fully consume the previously returned <see cref="Token" />
+		///             before calling this method again.
+        ///         </li>
+		///         <li>
+        ///             A producer must call <see cref="Token.Clear()"/> before setting the fields in
+		///             it and returning it.
+        ///         </li>
+		///     </ul>
+        ///     <para>
+		///         Also, the producer must make no assumptions about a <see cref="Token" /> after it
+		///         has been returned: the caller may arbitrarily change it. If the producer
+		///         needs to hold onto the <see cref="Token" /> for subsequent calls, it must clone()
+		///         it before storing it. Note that a <see cref="TokenFilter" /> is considered a
+		///         consumer.
+        ///     </para>
+        /// </remarks>
+		/// <param name="reusableToken">
+        ///     A <see cref="Token"/> that may or may not be used to return;
+		///     this parameter should never be null. The callee is not required to
+		///     check for null before using it, but it is a good idea to assert that
+		///     it is not null.
 		/// </param>
-		/// <returns> next {@link Token} in the stream or null if end-of-stream was hit
+        /// <returns> 
+        ///     The next <see cref="Token"/> in the stream or <c>null</c> if the end-of-stream was hit.
 		/// </returns>
-		/// <deprecated> The new {@link #IncrementToken()} and {@link AttributeSource}
-		/// APIs should be used instead.
-		/// </deprecated>
         [Obsolete("The new IncrementToken() and AttributeSource APIs should be used instead.")]
 		public virtual Token Next(Token reusableToken)
 		{
@@ -445,17 +486,22 @@ namespace Lucene.Net.Analysis
 			}
 		}
 		
-		/// <summary> Returns the next {@link Token} in the stream, or null at EOS.
-		/// 
+		/// <summary> 
+        /// Returns the next <see cref="Token" /> in the stream, or null at EOS.
 		/// </summary>
-		/// <deprecated> The returned Token is a "full private copy" (not re-used across
-		/// calls to {@link #Next()}) but will be slower than calling
-		/// {@link #Next(Token)} or using the new {@link #IncrementToken()}
-		/// method with the new {@link AttributeSource} API.
-		/// </deprecated>
+		/// <remarks>
+        ///     <para>
+        ///         The returned Token is a "full private copy" (not re-used across
+		///         calls to <see cref="Next()" />) but will be slower than calling
+		///         <see cref="Next(Token)" /> or using the new <see cref="IncrementToken()" />
+		///         method with the new <see cref="AttributeSource" /> API.
+        ///     </para>
+        /// </remarks>
         [Obsolete("The returned Token is a \"full private copy\" (not re-used across calls to Next()) but will be slower than calling {@link #Next(Token)} or using the new IncrementToken() method with the new AttributeSource API.")]
 		public virtual Token Next()
 		{
+            #pragma warning disable 618
+
 			if (tokenWrapper == null)
 				throw new System.NotSupportedException("This TokenStream only supports the new Attributes API.");
 			
@@ -481,18 +527,27 @@ namespace Lucene.Net.Analysis
 					nextToken.SetPayload((Lucene.Net.Index.Payload) p.Clone());
 				}
 			}
-			return nextToken;
+			
+            return nextToken;
+
+            #pragma warning restore 618
 		}
 		
-		/// <summary> Resets this stream to the beginning. This is an optional operation, so
-		/// subclasses may or may not implement this method. {@link #Reset()} is not needed for
-		/// the standard indexing process. However, if the tokens of a
-		/// <code>TokenStream</code> are intended to be consumed more than once, it is
-		/// necessary to implement {@link #Reset()}. Note that if your TokenStream
-		/// caches tokens and feeds them back again after a reset, it is imperative
-		/// that you clone the tokens when you store them away (on the first pass) as
-		/// well as when you return them (on future passes after {@link #Reset()}).
-		/// </summary>
+		/// <summary>
+        /// Resets this stream to the beginning. This is an optional operation, so
+		/// subclasses may or may not implement this method. <see cref="Reset()" /> is not needed for
+		/// the standard indexing process.
+        /// </summary>
+        /// <remarks>
+        ///     <para>
+        ///         However, if the tokens of a <c>TokenStream</c> are intended to be 
+        ///         consumed more than once, it is necessary to implement <see cref="Reset()" />. 
+        ///         Note that if your <c>TokenStream</c> caches tokens and feeds them back again
+        ///         after a reset, it is imperative that you clone the tokens when you 
+        ///         store them away on the first pass as well as when you return 
+        ///         them on future passes after <see cref="Reset()" />.
+        ///     </para>
+        /// </remarks>
 		public virtual void  Reset()
 		{
 		}

Modified: incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Tokenattributes/FlagsAttribute.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Tokenattributes/FlagsAttribute.cs?rev=1150245&r1=1150244&r2=1150245&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Tokenattributes/FlagsAttribute.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Tokenattributes/FlagsAttribute.cs Sun Jul 24 01:21:27 2011
@@ -18,7 +18,7 @@
 using System;
 
 using Tokenizer = Lucene.Net.Analysis.Tokenizer;
-using Attribute = Lucene.Net.Util.Attribute;
+using Attribute = Lucene.Net.Util.IAttribute;
 
 namespace Lucene.Net.Analysis.Tokenattributes
 {

Modified: incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Tokenattributes/OffsetAttribute.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Tokenattributes/OffsetAttribute.cs?rev=1150245&r1=1150244&r2=1150245&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Tokenattributes/OffsetAttribute.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Tokenattributes/OffsetAttribute.cs Sun Jul 24 01:21:27 2011
@@ -17,7 +17,7 @@
 
 using System;
 
-using Attribute = Lucene.Net.Util.Attribute;
+using Attribute = Lucene.Net.Util.IAttribute;
 
 namespace Lucene.Net.Analysis.Tokenattributes
 {

Modified: incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Tokenattributes/PayloadAttribute.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Tokenattributes/PayloadAttribute.cs?rev=1150245&r1=1150244&r2=1150245&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Tokenattributes/PayloadAttribute.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Tokenattributes/PayloadAttribute.cs Sun Jul 24 01:21:27 2011
@@ -18,7 +18,7 @@
 using System;
 
 using Payload = Lucene.Net.Index.Payload;
-using Attribute = Lucene.Net.Util.Attribute;
+using Attribute = Lucene.Net.Util.IAttribute;
 
 namespace Lucene.Net.Analysis.Tokenattributes
 {

Modified: incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Tokenattributes/PositionIncrementAttribute.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Tokenattributes/PositionIncrementAttribute.cs?rev=1150245&r1=1150244&r2=1150245&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Tokenattributes/PositionIncrementAttribute.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Tokenattributes/PositionIncrementAttribute.cs Sun Jul 24 01:21:27 2011
@@ -17,7 +17,7 @@
 
 using System;
 
-using Attribute = Lucene.Net.Util.Attribute;
+using Attribute = Lucene.Net.Util.IAttribute;
 
 namespace Lucene.Net.Analysis.Tokenattributes
 {

Modified: incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Tokenattributes/TermAttribute.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Tokenattributes/TermAttribute.cs?rev=1150245&r1=1150244&r2=1150245&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Tokenattributes/TermAttribute.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Tokenattributes/TermAttribute.cs Sun Jul 24 01:21:27 2011
@@ -17,7 +17,7 @@
 
 using System;
 
-using Attribute = Lucene.Net.Util.Attribute;
+using Attribute = Lucene.Net.Util.IAttribute;
 
 namespace Lucene.Net.Analysis.Tokenattributes
 {

Modified: incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Tokenattributes/TypeAttribute.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Tokenattributes/TypeAttribute.cs?rev=1150245&r1=1150244&r2=1150245&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Tokenattributes/TypeAttribute.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Tokenattributes/TypeAttribute.cs Sun Jul 24 01:21:27 2011
@@ -17,7 +17,7 @@
 
 using System;
 
-using Attribute = Lucene.Net.Util.Attribute;
+using Attribute = Lucene.Net.Util.IAttribute;
 
 namespace Lucene.Net.Analysis.Tokenattributes
 {

Modified: incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Lucene.Net.csproj
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Lucene.Net.csproj?rev=1150245&r1=1150244&r2=1150245&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Lucene.Net.csproj (original)
+++ incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Lucene.Net.csproj Sun Jul 24 01:21:27 2011
@@ -53,8 +53,7 @@
     <ConfigurationOverrideFile>
     </ConfigurationOverrideFile>
     <DefineConstants>TRACE;DEBUG</DefineConstants>
-    <DocumentationFile>
-    </DocumentationFile>
+    <DocumentationFile>..\..\bin\core\Debug\Lucene.Net.XML</DocumentationFile>
     <DebugSymbols>true</DebugSymbols>
     <FileAlignment>4096</FileAlignment>
     <NoStdLib>false</NoStdLib>

Modified: incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Store/AlreadyClosedException.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Store/AlreadyClosedException.cs?rev=1150245&r1=1150244&r2=1150245&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Store/AlreadyClosedException.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Store/AlreadyClosedException.cs Sun Jul 24 01:21:27 2011
@@ -20,17 +20,71 @@ using System.Runtime.Serialization;
 
 namespace Lucene.Net.Store
 {
+
+    // JAVA: src/java/org/apache/lucene/store/AlreadyClosedException.java
 	
-	/// <summary> This exception is thrown when there is an attempt to
-	/// access something that has already been closed.
+	/// <summary> 
+    /// This exception is thrown when there is an attempt to access a resource 
+    /// that has already been closed.
 	/// </summary>
+    /// <remarks>
+    ///     <para>
+    ///         An example would be when a <see cref="Lucene.Net.Analysis.TokenStream"/> has already been closed. 
+    ///     </para>
+    /// </remarks>
 	[Serializable]
-	public class AlreadyClosedException:System.SystemException
+	public class AlreadyClosedException : System.SystemException
 	{
+        /// <summary>
+        /// Initializes a new instance of <see cref="AlreadyClosedException"/> with a message and <c>null</c> inner exception.
+        /// </summary>
+        /// <param name="message">
+        ///     A <c>String</c> that describes the error. The content of message is intended to be understood 
+        ///     by humans. The caller of this constructor is required to ensure that this string has been 
+        ///     localized for the current system culture. 
+        /// </param>
+        /// <remarks>
+        ///     <para>
+        ///         The constructor initializes the <see cref="System.Exception.Message"/> property of the new instance using message.
+        ///     </para>
+        /// </remarks>
 		public AlreadyClosedException(System.String message):base(message)
 		{
 		}
 
+        /// <summary>
+        /// Initializes a new instance of <see cref="AlreadyClosedException"/> with a message and inner exception.
+        /// </summary>
+        /// <param name="message">
+        ///     A <c>String</c> that describes the error. The content of message is intended to be understood 
+        ///     by humans. The caller of this constructor is required to ensure that this string has been 
+        ///     localized for the current system culture. 
+        /// </param>
+        /// <param name="innerException">
+        ///     The exception that is the cause of the current exception. If the <paramref name="innerException"/> parameter is not null, the 
+        ///     current exception is raised in a catch block that handles the inner exception. 
+        /// </param>
+        /// <remarks>
+        ///     <para>
+        ///         An exception that is thrown as a direct result of a previous exception should include a reference to the 
+        ///         previous exception in the <see cref="System.Exception.InnerException"/> property. The <see cref="System.Exception.InnerException"/> property 
+        ///         returns the same value that is passed into the constructor, or <c>null</c> if 
+        ///         the <see cref="System.Exception.InnerException"/> property does not supply the inner 
+        ///         exception value to the constructor.
+        ///     </para>
+        /// </remarks>
+        public AlreadyClosedException(string message, Exception innerException)
+            : base(message, innerException)
+        {
+
+        }
+
+        /// <summary>
+        /// Initializes a new instance of the <see cref="AlreadyClosedException"/> class with the specified serialization and context information.
+        /// </summary>
+        /// <param name="info">The data for serializing or deserializing the object. </param>
+        /// <param name="context">The source and destination for the object. </param>
+        // REFACTOR: add build conditional to only compile in the client and full versions of the .NET framework.
         protected AlreadyClosedException(SerializationInfo info, StreamingContext context) : base(info, context)
         {
         }

Modified: incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Util/Attribute.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Util/Attribute.cs?rev=1150245&r1=1150244&r2=1150245&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Util/Attribute.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Util/Attribute.cs Sun Jul 24 01:21:27 2011
@@ -19,9 +19,18 @@ using System;
 
 namespace Lucene.Net.Util
 {
+    // This class might be better off as a base attribute to simplify the way the code queries
+    // for types that implement this interface.  Look at the AttributeSource.cs class. If there
+    // is a good reason to keep this as-is, please notate this in the comments and remove this one.
 	
-	/// <summary> Base interface for attributes.</summary>
-	public interface Attribute
+    // JAVA: src/java/org/apache/lucene/util/Attribute.java
+
+	/// <summary> 
+    /// The contract interface for attributes.
+    /// This interface is used as a way to query types that implement this interface and 
+    /// references to those types that do.
+    /// </summary>
+	public interface IAttribute
 	{
 	}
 }
\ No newline at end of file

Modified: incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Util/AttributeImpl.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Util/AttributeImpl.cs?rev=1150245&r1=1150244&r2=1150245&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Util/AttributeImpl.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Util/AttributeImpl.cs Sun Jul 24 01:21:27 2011
@@ -27,7 +27,7 @@ namespace Lucene.Net.Util
 	/// of usually streamed objects, e. g. a {@link Lucene.Net.Analysis.TokenStream}.
 	/// </summary>
 	[Serializable]
-	public abstract class AttributeImpl : System.ICloneable, Attribute
+	public abstract class AttributeImpl : System.ICloneable, IAttribute
 	{
 		/// <summary> Clears the values in this AttributeImpl and resets it to its 
 		/// default value. If this implementation implements more than one Attribute interface

Modified: incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Util/AttributeSource.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Util/AttributeSource.cs?rev=1150245&r1=1150244&r2=1150245&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Util/AttributeSource.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Util/AttributeSource.cs Sun Jul 24 01:21:27 2011
@@ -302,7 +302,7 @@ namespace Lucene.Net.Util
                         for (int i = 0; i < interfaces.Length; i++)
                         {
                             System.Type curInterface = interfaces[i];
-                            if (curInterface != typeof(Attribute) && typeof(Attribute).IsAssignableFrom(curInterface))
+                            if (curInterface != typeof(IAttribute) && typeof(IAttribute).IsAssignableFrom(curInterface))
                             {
                                 foundInterfaces.Add(new WeakReference(curInterface));
                             }
@@ -338,11 +338,11 @@ namespace Lucene.Net.Util
         /// new instance is created, added to this AttributeSource and returned. 
         /// Signature for Java 1.5: <code>public &lt;T extends Attribute&gt; T addAttribute(Class&lt;T&gt;)</code>
         /// </summary>
-        public virtual Attribute AddAttribute(System.Type attClass)
+        public virtual IAttribute AddAttribute(System.Type attClass)
         {
             if (!attributes.ContainsKey(attClass))
             {
-                if (!(attClass.IsInterface && typeof(Attribute).IsAssignableFrom(attClass)))
+                if (!(attClass.IsInterface && typeof(IAttribute).IsAssignableFrom(attClass)))
                 {
                     throw new ArgumentException(
                         "AddAttribute() only accepts an interface that extends Attribute, but " +
@@ -387,7 +387,7 @@ namespace Lucene.Net.Util
         /// available. If you want to only use the attribute, if it is available (to optimize
         /// consuming), use {@link #hasAttribute}.
         /// </summary>
-        public virtual Attribute GetAttribute(System.Type attClass)
+        public virtual IAttribute GetAttribute(System.Type attClass)
         {
             if (!this.attributes.ContainsKey(attClass))
             {

Modified: incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Analysis/BaseTokenStreamTestCase.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Analysis/BaseTokenStreamTestCase.cs?rev=1150245&r1=1150244&r2=1150245&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Analysis/BaseTokenStreamTestCase.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Analysis/BaseTokenStreamTestCase.cs Sun Jul 24 01:21:27 2011
@@ -25,12 +25,13 @@ using LuceneTestCase = Lucene.Net.Util.L
 namespace Lucene.Net.Analysis
 {
 	
-	/// <summary> Base class for all Lucene unit tests that use TokenStreams.  
-	/// <p/>
-	/// This class runs all tests twice, one time with {@link TokenStream#setOnlyUseNewAPI} <code>false</code>
-	/// and after that one time with <code>true</code>.
+	/// <summary> Base class for all Lucene.Net unit tests that use TokenStreams.  
+	/// <para>
+	/// This class runs all tests twice, one time with <see cref="TokenStream.setOnlyUseNewAPI" /> <c>false</c>
+	/// and after that one time with <c>true</c>.
+    /// </para>
 	/// </summary>
-	public abstract class BaseTokenStreamTestCase:LuceneTestCase
+	public abstract class BaseTokenStreamTestCase : LuceneTestCase
 	{
 		
 		private bool onlyUseNewAPI = false;
@@ -38,12 +39,14 @@ namespace Lucene.Net.Analysis
 		
 		public BaseTokenStreamTestCase():base()
 		{
-			this.testWithNewAPI = null; // run all tests also with onlyUseNewAPI
+            // run all tests also with onlyUseNewAPI
+			this.testWithNewAPI = null; 
 		}
 		
 		public BaseTokenStreamTestCase(System.String name):base(name)
 		{
-			this.testWithNewAPI = null; // run all tests also with onlyUseNewAPI
+            // run all tests also with onlyUseNewAPI
+			this.testWithNewAPI = null; 
 		}
 		
 		public BaseTokenStreamTestCase(System.Collections.Hashtable testWithNewAPI):base()
@@ -61,23 +64,29 @@ namespace Lucene.Net.Analysis
 		public override void  SetUp()
 		{
 			base.SetUp();
-			TokenStream.SetOnlyUseNewAPI(onlyUseNewAPI);
-		}
+
+            // needed for this test.
+            #pragma warning disable 618
+                TokenStream.SetOnlyUseNewAPI(onlyUseNewAPI);
+            #pragma warning restore 618
+        }
 		
 		// @Override
 		public override void  RunBare()
 		{
-			// Do the test with onlyUseNewAPI=false (default)
+			// Test with onlyUseNewAPI=false (default)
 			try
 			{
 				onlyUseNewAPI = false;
-				// base.RunBare();  // {{Aroush-2.9}}
+				
+                // TODO: This needs to be notated why the test has this commented out. 
+                // base.RunBare();  // {{Aroush-2.9}}
                 System.Diagnostics.Debug.Fail("Port issue:", "base.RunBare()"); // {{Aroush-2.9}}
 			}
-			catch (System.Exception e)
+			catch
 			{
-				System.Console.Out.WriteLine("Test failure of '" + GetType() + "' occurred with onlyUseNewAPI=false");
-				throw e;
+				Console.WriteLine("Test failure of '" + GetType() + "' occurred with onlyUseNewAPI=false");
+				throw;
 			}
 			
 			if (testWithNewAPI == null || testWithNewAPI.Contains(GetType()))
@@ -88,17 +97,17 @@ namespace Lucene.Net.Analysis
 					onlyUseNewAPI = true;
 					base.RunBare();
 				}
-				catch (System.Exception e)
+				catch
 				{
-					System.Console.Out.WriteLine("Test failure of '" + GetType() + "' occurred with onlyUseNewAPI=true");
-					throw e;
+					Console.WriteLine("Test failure of '" + GetType() + "' occurred with onlyUseNewAPI=true");
+                    throw;
 				}
 			}
 		}
 		
 		// some helpers to test Analyzers and TokenStreams:
 
-        public interface CheckClearAttributesAttribute : Lucene.Net.Util.Attribute
+        public interface CheckClearAttributesAttribute : Lucene.Net.Util.IAttribute
         {
                bool GetAndResetClearCalled();
         }
@@ -128,9 +137,14 @@ namespace Lucene.Net.Analysis
             //@Override
             public  override bool Equals(Object other) 
             {
+                if (other == null)
+                    throw new ArgumentNullException("other", "The argument 'other' must not be null.");
+
+                CheckClearAttributesAttributeImpl attributeImpl = (CheckClearAttributesAttributeImpl) other;
+
                 return (
-                other is CheckClearAttributesAttributeImpl &&
-                ((CheckClearAttributesAttributeImpl) other).clearCalled == this.clearCalled
+                    other is CheckClearAttributesAttributeImpl &&
+                    attributeImpl.clearCalled == this.clearCalled
                 );
             }
 
@@ -165,6 +179,7 @@ namespace Lucene.Net.Analysis
             }
     
             TypeAttribute typeAtt = null;
+            
             if (types != null)
             {
                 Assert.IsTrue(ts.HasAttribute(typeof(TypeAttribute)), "has no TypeAttribute");
@@ -172,6 +187,7 @@ namespace Lucene.Net.Analysis
             }
             
             PositionIncrementAttribute posIncrAtt = null;
+
             if (posIncrements != null)
             {
                 Assert.IsTrue(ts.HasAttribute(typeof(PositionIncrementAttribute)), "has no PositionIncrementAttribute");
@@ -179,33 +195,49 @@ namespace Lucene.Net.Analysis
             }
 
             ts.Reset();
+            
             for (int i = 0; i < output.Length; i++)
             {
                 // extra safety to enforce, that the state is not preserved and also assign bogus values
                 ts.ClearAttributes();
                 termAtt.SetTermBuffer("bogusTerm");
-                if (offsetAtt != null) offsetAtt.SetOffset(14584724, 24683243);
-                if (typeAtt != null) typeAtt.SetType("bogusType");
-                if (posIncrAtt != null) posIncrAtt.SetPositionIncrement(45987657);
+                
+                if (offsetAtt != null) 
+                    offsetAtt.SetOffset(14584724, 24683243);
+                
+                if (typeAtt != null) 
+                    typeAtt.SetType("bogusType");
+                
+                if (posIncrAtt != null) 
+                    posIncrAtt.SetPositionIncrement(45987657);
 
                 checkClearAtt.GetAndResetClearCalled(); // reset it, because we called clearAttribute() before
+                
                 Assert.IsTrue(ts.IncrementToken(), "token " + i + " does not exist");
                 Assert.IsTrue(checkClearAtt.GetAndResetClearCalled(), "clearAttributes() was not called correctly in TokenStream chain");
 
                 Assert.AreEqual(output[i], termAtt.Term(), "term " + i);
+                
                 if (startOffsets != null)
                     Assert.AreEqual(startOffsets[i], offsetAtt.StartOffset(), "startOffset " + i);
+
                 if (endOffsets != null)
                     Assert.AreEqual(endOffsets[i], offsetAtt.EndOffset(), "endOffset " + i);
+                
                 if (types != null)
                     Assert.AreEqual(types[i], typeAtt.Type(), "type " + i);
+                
                 if (posIncrements != null)
                     Assert.AreEqual(posIncrements[i], posIncrAtt.GetPositionIncrement(), "posIncrement " + i);
             }
+            
             Assert.IsFalse(ts.IncrementToken(), "end of stream");
+
             ts.End();
+
             if (finalOffset.HasValue)
                 Assert.AreEqual(finalOffset, offsetAtt.EndOffset(), "finalOffset ");
+
             ts.Close();
         }
 

Modified: incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Analysis/TestAnalyzers.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Analysis/TestAnalyzers.cs?rev=1150245&r1=1150244&r2=1150245&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Analysis/TestAnalyzers.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Analysis/TestAnalyzers.cs Sun Jul 24 01:21:27 2011
@@ -24,160 +24,195 @@ using StandardTokenizer = Lucene.Net.Ana
 using PayloadAttribute = Lucene.Net.Analysis.Tokenattributes.PayloadAttribute;
 using TermAttribute = Lucene.Net.Analysis.Tokenattributes.TermAttribute;
 using Payload = Lucene.Net.Index.Payload;
+using Lucene.Net.Util;
 
 namespace Lucene.Net.Analysis
 {
-	
-	[TestFixture]
-	public class TestAnalyzers:BaseTokenStreamTestCase
-	{
-		
-		/*public TestAnalyzers(System.String name):base(name)
-		{
-		}*/
-		
-		[Test]
-		public virtual void  TestSimple()
-		{
-			Analyzer a = new SimpleAnalyzer();
-			AssertAnalyzesTo(a, "foo bar FOO BAR", new System.String[]{"foo", "bar", "foo", "bar"});
-			AssertAnalyzesTo(a, "foo      bar .  FOO <> BAR", new System.String[]{"foo", "bar", "foo", "bar"});
-			AssertAnalyzesTo(a, "foo.bar.FOO.BAR", new System.String[]{"foo", "bar", "foo", "bar"});
-			AssertAnalyzesTo(a, "U.S.A.", new System.String[]{"u", "s", "a"});
-			AssertAnalyzesTo(a, "C++", new System.String[]{"c"});
-			AssertAnalyzesTo(a, "B2B", new System.String[]{"b", "b"});
-			AssertAnalyzesTo(a, "2B", new System.String[]{"b"});
-			AssertAnalyzesTo(a, "\"QUOTED\" word", new System.String[]{"quoted", "word"});
-		}
-		
-		[Test]
-		public virtual void  TestNull()
-		{
-			Analyzer a = new WhitespaceAnalyzer();
-			AssertAnalyzesTo(a, "foo bar FOO BAR", new System.String[]{"foo", "bar", "FOO", "BAR"});
-			AssertAnalyzesTo(a, "foo      bar .  FOO <> BAR", new System.String[]{"foo", "bar", ".", "FOO", "<>", "BAR"});
-			AssertAnalyzesTo(a, "foo.bar.FOO.BAR", new System.String[]{"foo.bar.FOO.BAR"});
-			AssertAnalyzesTo(a, "U.S.A.", new System.String[]{"U.S.A."});
-			AssertAnalyzesTo(a, "C++", new System.String[]{"C++"});
-			AssertAnalyzesTo(a, "B2B", new System.String[]{"B2B"});
-			AssertAnalyzesTo(a, "2B", new System.String[]{"2B"});
-			AssertAnalyzesTo(a, "\"QUOTED\" word", new System.String[]{"\"QUOTED\"", "word"});
-		}
-		
-		[Test]
-		public virtual void  TestStop()
-		{
-			Analyzer a = new StopAnalyzer();
-			AssertAnalyzesTo(a, "foo bar FOO BAR", new System.String[]{"foo", "bar", "foo", "bar"});
-			AssertAnalyzesTo(a, "foo a bar such FOO THESE BAR", new System.String[]{"foo", "bar", "foo", "bar"});
-		}
-		
-		internal virtual void  VerifyPayload(TokenStream ts)
-		{
-			PayloadAttribute payloadAtt = (PayloadAttribute) ts.GetAttribute(typeof(PayloadAttribute));
-			for (byte b = 1; ; b++)
-			{
-				bool hasNext = ts.IncrementToken();
-				if (!hasNext)
-					break;
-				// System.out.println("id="+System.identityHashCode(nextToken) + " " + t);
-				// System.out.println("payload=" + (int)nextToken.getPayload().toByteArray()[0]);
-				Assert.AreEqual(b, payloadAtt.GetPayload().ToByteArray()[0]);
-			}
-		}
-		
-		// Make sure old style next() calls result in a new copy of payloads
-		[Test]
-		public virtual void  TestPayloadCopy()
-		{
-			System.String s = "how now brown cow";
-			TokenStream ts;
-			ts = new WhitespaceTokenizer(new System.IO.StringReader(s));
-			ts = new PayloadSetter(ts);
-			VerifyPayload(ts);
-			
-			ts = new WhitespaceTokenizer(new System.IO.StringReader(s));
-			ts = new PayloadSetter(ts);
-			VerifyPayload(ts);
-		}
-		
-		// LUCENE-1150: Just a compile time test, to ensure the
-		// StandardAnalyzer constants remain publicly accessible
-		public virtual void  _testStandardConstants()
-		{
-			int x = StandardTokenizer.ALPHANUM;
-			x = StandardTokenizer.APOSTROPHE;
-			x = StandardTokenizer.ACRONYM;
-			x = StandardTokenizer.COMPANY;
-			x = StandardTokenizer.EMAIL;
-			x = StandardTokenizer.HOST;
-			x = StandardTokenizer.NUM;
-			x = StandardTokenizer.CJ;
-			System.String[] y = StandardTokenizer.TOKEN_TYPES;
-		}
-		
-		private class MyStandardAnalyzer:StandardAnalyzer
-		{
-			public override TokenStream TokenStream(System.String field, System.IO.TextReader reader)
-			{
-				return new WhitespaceAnalyzer().TokenStream(field, reader);
-			}
-		}
-		
-		[Test]
-		public virtual void  TestSubclassOverridingOnlyTokenStream()
-		{
-			Analyzer a = new MyStandardAnalyzer();
-			TokenStream ts = a.ReusableTokenStream("field", new System.IO.StringReader("the"));
-			// StandardAnalyzer will discard "the" (it's a
-			// stopword), by my subclass will not:
-			Assert.IsTrue(ts.IncrementToken());
-			Assert.IsFalse(ts.IncrementToken());
-		}
+
+    [TestFixture]
+    [Category(Categories.Unit)]
+    public class TestAnalyzers : BaseTokenStreamTestCase
+    {
+
+        /*public TestAnalyzers(System.String name):base(name)
+        {
+        }*/
+
+        [Test]
+        public virtual void TestSimple()
+        {
+            Analyzer a = new SimpleAnalyzer();
+            AssertAnalyzesTo(a, "foo bar FOO BAR", new System.String[] { "foo", "bar", "foo", "bar" });
+            AssertAnalyzesTo(a, "foo      bar .  FOO <> BAR", new System.String[] { "foo", "bar", "foo", "bar" });
+            AssertAnalyzesTo(a, "foo.bar.FOO.BAR", new System.String[] { "foo", "bar", "foo", "bar" });
+            AssertAnalyzesTo(a, "U.S.A.", new System.String[] { "u", "s", "a" });
+            AssertAnalyzesTo(a, "C++", new System.String[] { "c" });
+            AssertAnalyzesTo(a, "B2B", new System.String[] { "b", "b" });
+            AssertAnalyzesTo(a, "2B", new System.String[] { "b" });
+            AssertAnalyzesTo(a, "\"QUOTED\" word", new System.String[] { "quoted", "word" });
+        }
+
+        [Test]
+        public virtual void TestNull()
+        {
+            Analyzer a = new WhitespaceAnalyzer();
+            AssertAnalyzesTo(a, "foo bar FOO BAR", new System.String[] { "foo", "bar", "FOO", "BAR" });
+            AssertAnalyzesTo(a, "foo      bar .  FOO <> BAR", new System.String[] { "foo", "bar", ".", "FOO", "<>", "BAR" });
+            AssertAnalyzesTo(a, "foo.bar.FOO.BAR", new System.String[] { "foo.bar.FOO.BAR" });
+            AssertAnalyzesTo(a, "U.S.A.", new System.String[] { "U.S.A." });
+            AssertAnalyzesTo(a, "C++", new System.String[] { "C++" });
+            AssertAnalyzesTo(a, "B2B", new System.String[] { "B2B" });
+            AssertAnalyzesTo(a, "2B", new System.String[] { "2B" });
+            AssertAnalyzesTo(a, "\"QUOTED\" word", new System.String[] { "\"QUOTED\"", "word" });
+        }
+
+        [Test]
+        public virtual void TestStop()
+        {
+            Analyzer a = new StopAnalyzer(_TestUtil.CurrentVersion);
+            AssertAnalyzesTo(a, "foo bar FOO BAR", new System.String[] { "foo", "bar", "foo", "bar" });
+            AssertAnalyzesTo(a, "foo a bar such FOO THESE BAR", new System.String[] { "foo", "bar", "foo", "bar" });
+        }
+
+        // Make sure old style next() calls result in a new copy of payloads
+        [Test]
+        public virtual void TestPayloadCopy()
+        {
+            System.String s = "how now brown cow";
+            TokenStream ts;
+
+            ts = new WhitespaceTokenizer(new System.IO.StringReader(s));
+            ts = new PayloadSetter(ts);
+            VerifyPayload(ts);
+
+            ts = new WhitespaceTokenizer(new System.IO.StringReader(s));
+            ts = new PayloadSetter(ts);
+
+            VerifyPayload(ts);
+        }
+
+        // LUCENE-1150: Just a compile time test, to ensure the
+        // StandardAnalyzer constants remain publicly accessible
+        [Test]
+        public virtual void StandardConstants()
+        {
+            Assert.AreEqual(0, StandardTokenizer.ALPHANUM);
+            Assert.AreEqual(1, StandardTokenizer.APOSTROPHE);
+            Assert.AreEqual(2, StandardTokenizer.ACRONYM);
+            Assert.AreEqual(3, StandardTokenizer.COMPANY);
+            Assert.AreEqual(4, StandardTokenizer.EMAIL);
+            Assert.AreEqual(5, StandardTokenizer.HOST);
+            Assert.AreEqual(6, StandardTokenizer.NUM);
+            Assert.AreEqual(7, StandardTokenizer.CJ);
+            
+            string[] tokenTypes = new string[]{
+                "<ALPHANUM>", 
+                "<APOSTROPHE>", 
+                "<ACRONYM>", 
+                "<COMPANY>", 
+                "<EMAIL>", 
+                "<HOST>", 
+                "<NUM>", 
+                "<CJ>", 
+                "<ACRONYM_DEP>"
+            };
+
+            Assert.AreEqual(tokenTypes, StandardTokenizer.TOKEN_TYPES);
+        }
+
+       
+
+        [Test]
+        public virtual void TestSubclassOverridingOnlyTokenStream()
+        {
+            Analyzer a = new MyStandardAnalyzer();
+            TokenStream ts = a.ReusableTokenStream("field", new System.IO.StringReader("the"));
+            
+            // StandardAnalyzer will discard "the" (it's a
+            // stopword), by my subclass will not:
+            Assert.IsTrue(ts.IncrementToken());
+            Assert.IsFalse(ts.IncrementToken());
+        }
 
         [Test]
         public void Test_LUCENE_3042_LUCENENET_433()
         {
             String testString = "t";
 
-            Analyzer analyzer = new Lucene.Net.Analysis.Standard.StandardAnalyzer();
+            Analyzer analyzer = new StandardAnalyzer(_TestUtil.CurrentVersion);
+
             TokenStream stream = analyzer.ReusableTokenStream("dummy", new System.IO.StringReader(testString));
             stream.Reset();
+            
             while (stream.IncrementToken())
             {
                 // consume
             }
+
             stream.End();
             stream.Close();
 
             AssertAnalyzesToReuse(analyzer, testString, new String[] { "t" });
         }
-	}
-	
-	class PayloadSetter:TokenFilter
-	{
-		private void  InitBlock()
-		{
-			p = new Payload(data, 0, 1);
-		}
-		internal PayloadAttribute payloadAtt;
-		public PayloadSetter(TokenStream input):base(input)
-		{
-			InitBlock();
-			payloadAtt = (PayloadAttribute) AddAttribute(typeof(PayloadAttribute));
-		}
-		
-		internal byte[] data = new byte[1];
-		internal Payload p;
-		
-		public override bool IncrementToken()
-		{
-			bool hasNext = input.IncrementToken();
-			if (!hasNext)
-				return false;
-			payloadAtt.SetPayload(p); // reuse the payload / byte[]
-			data[0]++;
-			return true;
-		}
-	}
+
+        #region helpers
+
+        internal virtual void VerifyPayload(TokenStream ts)
+        {
+            PayloadAttribute payloadAtt = (PayloadAttribute)ts.GetAttribute(typeof(PayloadAttribute));
+            for (byte b = 1; ; b++)
+            {
+                bool hasNext = ts.IncrementToken();
+                if (!hasNext)
+                    break;
+                // System.out.println("id="+System.identityHashCode(nextToken) + " " + t);
+                // System.out.println("payload=" + (int)nextToken.getPayload().toByteArray()[0]);
+                Assert.AreEqual(b, payloadAtt.GetPayload().ToByteArray()[0]);
+            }
+        }
+
+        class MyStandardAnalyzer : StandardAnalyzer
+        {
+            public MyStandardAnalyzer()
+                : base(_TestUtil.CurrentVersion)
+            {
+
+            }
+
+            public override TokenStream TokenStream(System.String field, System.IO.TextReader reader)
+            {
+                return new WhitespaceAnalyzer().TokenStream(field, reader);
+            }
+        }
+
+        class PayloadSetter : TokenFilter
+        {
+            private void InitBlock()
+            {
+                p = new Payload(data, 0, 1);
+            }
+            internal PayloadAttribute payloadAtt;
+            public PayloadSetter(TokenStream input)
+                : base(input)
+            {
+                InitBlock();
+                payloadAtt = (PayloadAttribute)AddAttribute(typeof(PayloadAttribute));
+            }
+
+            internal byte[] data = new byte[1];
+            internal Payload p;
+
+            public override bool IncrementToken()
+            {
+                bool hasNext = input.IncrementToken();
+                if (!hasNext)
+                    return false;
+                payloadAtt.SetPayload(p); // reuse the payload / byte[]
+                data[0]++;
+                return true;
+            }
+        }
+
+        #endregion
+    }
 }
\ No newline at end of file

Modified: incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Analysis/TestTokenStreamBWComp.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Analysis/TestTokenStreamBWComp.cs?rev=1150245&r1=1150244&r2=1150245&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Analysis/TestTokenStreamBWComp.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Analysis/TestTokenStreamBWComp.cs Sun Jul 24 01:21:27 2011
@@ -21,7 +21,7 @@ using NUnit.Framework;
 
 using Lucene.Net.Analysis.Tokenattributes;
 using Payload = Lucene.Net.Index.Payload;
-using Attribute = Lucene.Net.Util.Attribute;
+using Attribute = Lucene.Net.Util.IAttribute;
 using AttributeImpl = Lucene.Net.Util.AttributeImpl;
 using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 



RE: [Lucene.Net] svn commit: r1150245 [1/2] - in /incubator/lucene.net/branches/Lucene.Net_2_9_4g: ./ src/core/ src/core/Analysis/ src/core/Analysis/Tokenattributes/ src/core/Store/ src/core/Util/ test/core/ test/core/Analysis/ test/core/Analysis/Tokenatt

Posted by Digy <di...@gmail.com>.
Hi Michael,

A very good work, thank you.
But I must also say that some changes in comments remove all reference points while trying to apply a java-patch to Lucene.Net
Java patches line-numbers do not match to Lucene.Net already, and sometimes I just use a string in a comment or in code to locate the correct location.
Adding a new line is not a problem, but removing/changing a string or splitting it into two lines may cause problems in future ports(of course,
if an automated way is not found)

This may not be a correct example, but just  compare the patches for LUCENENET-427 (https://issues.apache.org/jira/browse/LUCENENET-427)
https://issues.apache.org/jira/secure/attachment/12483888/LUCENE-3234.patch
https://issues.apache.org/jira/secure/attachment/12483912/FastVectorHighlighter.patch

DIGY



-----Original Message-----
From: mherndon@apache.org [mailto:mherndon@apache.org] 
Sent: Sunday, July 24, 2011 4:22 AM
To: lucene-net-commits@lucene.apache.org
Subject: [Lucene.Net] svn commit: r1150245 [1/2] - in /incubator/lucene.net/branches/Lucene.Net_2_9_4g: ./ src/core/ src/core/Analysis/ src/core/Analysis/Tokenattributes/ src/core/Store/ src/core/Util/ test/core/ test/core/Analysis/ test/core/Analysis/Tokenattribu

Author: mherndon
Date: Sun Jul 24 01:21:27 2011
New Revision: 1150245

URL: http://svn.apache.org/viewvc?rev=1150245&view=rev
Log:
LUCENENET-439, LUCENENET-438, LUCENENET-436. Added categories constant for Nunit Category attribute. cleaning up tests and classes that they touch starting in the Analysis namespace.  TokenStream, Analyzer, AlreadyClosedException was cleaned up, Attribute interface was change to IAttribute.

Added:
    incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Categories.cs
Modified:
    incubator/lucene.net/branches/Lucene.Net_2_9_4g/DIFFs FROM 2.9.4.txt
    incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Analyzer.cs
    incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/SimpleAnalyzer.cs
    incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Token.cs
    incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/TokenStream.cs
    incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Tokenattributes/FlagsAttribute.cs
    incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Tokenattributes/OffsetAttribute.cs
    incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Tokenattributes/PayloadAttribute.cs
    incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Tokenattributes/PositionIncrementAttribute.cs
    incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Tokenattributes/TermAttribute.cs
    incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Tokenattributes/TypeAttribute.cs
    incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Lucene.Net.csproj
    incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Store/AlreadyClosedException.cs
    incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Util/Attribute.cs
    incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Util/AttributeImpl.cs
    incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Util/AttributeSource.cs
    incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Analysis/BaseTokenStreamTestCase.cs
    incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Analysis/TestAnalyzers.cs
    incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Analysis/TestTokenStreamBWComp.cs
    incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Analysis/Tokenattributes/TestSimpleAttributeImpls.cs
    incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Analysis/Tokenattributes/TestTermAttributeImpl.cs
    incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Lucene.Net.Test.csproj
    incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Util/_TestUtil.cs

Modified: incubator/lucene.net/branches/Lucene.Net_2_9_4g/DIFFs FROM 2.9.4.txt
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net_2_9_4g/DIFFs%20FROM%202.9.4.txt?rev=1150245&r1=1150244&r2=1150245&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net_2_9_4g/DIFFs FROM 2.9.4.txt (original)
+++ incubator/lucene.net/branches/Lucene.Net_2_9_4g/DIFFs FROM 2.9.4.txt Sun Jul 24 01:21:27 2011
@@ -23,6 +23,11 @@
   
 * Filter.Bits(Obsolete) removed.
 
+* LUCENENET-438 JavaDoc are turned into XML Doc Comments
+* LUCENENET-439 fix exception re-throws
+* LUCENENET-435 fix up the test-suite for Lucene.Net Core lib. 
+* LUCENENET-436 fix up deprecated code. 
+
 -------------------------------------------------------------------------------
 
 A few API changes like:

Modified: incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Analyzer.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Analyzer.cs?rev=1150245&r1=1150244&r2=1150245&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Analyzer.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Analyzer.cs Sun Jul 24 01:21:27 2011
@@ -20,148 +20,215 @@ using System;
 using Fieldable = Lucene.Net.Documents.Fieldable;
 using AlreadyClosedException = Lucene.Net.Store.AlreadyClosedException;
 using Lucene.Net.Util;
+using System.Reflection;
+using System.IO;
 
 namespace Lucene.Net.Analysis
 {
+    // JAVA: src/java/org/apache/lucene/analysis/Analyzer.java
 	
-	/// <summary>An Analyzer builds TokenStreams, which analyze text.  It thus represents a
-	/// policy for extracting index terms from text.
-	/// <p/>
-	/// Typical implementations first build a Tokenizer, which breaks the stream of
-	/// characters from the Reader into raw Tokens.  One or more TokenFilters may
-	/// then be applied to the output of the Tokenizer.
+	/// <summary>
+    ///     An <see cref="Analyzer"/> represents a policy for extracting terms that are 
+    ///     indexed from text. The <see cref="Analyzer"/> builds <see cref="TokenStream"/>s, which 
+    ///     breaks down text into tokens. 
 	/// </summary>
+    /// <remarks>
+    ///     <para>
+    ///         A typical <see cref="Analyzer"/> implementation will first build a <see cref="Tokenizer"/>.
+    ///         The <see cref="Tokenizer"/> will break down the stream of characters from the 
+    ///         <see cref="System.IO.TextReader"/> into raw <see cref="Token"/>s.  One or 
+    ///         more <see cref="TokenFilter"/>s may then be applied to the output of the <see cref="Tokenizer"/>.
+    ///     </para>
+    /// </remarks>
+    // REFACTOR: determine if this class should use IDisposable since it has a Close() method.
 	public abstract class Analyzer
 	{
-		/// <summary>Creates a TokenStream which tokenizes all the text in the provided
-		/// Reader.  Must be able to handle null field name for
-		/// backward compatibility.
-		/// </summary>
+        private CloseableThreadLocal<object> tokenStreams = new CloseableThreadLocal<object>();
+
+        /// <summary>
+        /// Gets or sets whether this class overrides the <see cref="TokenStream(String, TextReader)"/> method. 
+        /// </summary>
+        protected internal bool overridesTokenStreamMethod;
+
+		/// <summary>
+        /// Creates a <see cref="TokenStream"/> which tokenizes all the text in 
+        /// the provided <see cref="TextReader"/>.
+        /// </summary>
+		/// <param name="fieldName">The name of the <see cref="Lucene.Net.Documents.Field"/>. the fieldName can be <c>null</c>.</param>
+		/// <param name="reader">The text reader.</param>
+		/// <returns>A <see cref="TokenStream"/>.</returns>
 		public abstract TokenStream TokenStream(System.String fieldName, System.IO.TextReader reader);
 		
-		/// <summary>Creates a TokenStream that is allowed to be re-used
-		/// from the previous time that the same thread called
-		/// this method.  Callers that do not need to use more
-		/// than one TokenStream at the same time from this
-		/// analyzer should use this method for better
-		/// performance.
-		/// </summary>
-		public virtual TokenStream ReusableTokenStream(System.String fieldName, System.IO.TextReader reader)
+		/// <summary>
+        ///     Creates a re-useable previously saved <see cref="TokenStream"/> inside the
+        ///     same thread that called this method. Callers that do not need to use more
+		///     than one TokenStream at the same time from this analyzer should use this 
+        ///     method for better performance.
+		/// </summary>
+        /// <remarks>
+        ///     <para>
+        ///         This method defaults to invoking <see cref="TokenStream(String, TextReader)" />
+        ///     </para>
+        /// </remarks>
+		public virtual TokenStream ReusableTokenStream(String fieldName, TextReader reader)
 		{
 			return TokenStream(fieldName, reader);
 		}
 		
-		private CloseableThreadLocal<object> tokenStreams = new CloseableThreadLocal<object>();
 		
-		/// <summary>Used by Analyzers that implement reusableTokenStream
-		/// to retrieve previously saved TokenStreams for re-use
-		/// by the same thread. 
-		/// </summary>
+		
+		/// <summary>
+        /// Gets the previous <see cref="TokenStream"/> used by Analyzers that implement (overrides) 
+        /// <see cref="Analyzer.ReusableTokenStream(String, TextReader)"/> to retrieve a 
+        /// previously saved <see cref="TokenStream"/> for re-use by the same thread. 
+		/// </summary>
+        /// <remarks>
+        ///     <para>
+        ///         This method uses a <see cref="CloseableThreadLocal{T}"/> to store the previous thread and retrieve it.
+        ///     </para>
+        /// </remarks>
+        /// <exception cref="AlreadyClosedException">Throws when there is a null reference exception and the analyzer is closed.</exception>
+        /// <exception cref="System.NullReferenceException">
+        ///     Throws when there is a null reference to <see cref="CloseableThreadLocal{T}"/> and the
+        ///     analyzer is still open.
+        /// </exception>
+        // REFACTOR: turn into a property.
 		protected internal virtual System.Object GetPreviousTokenStream()
 		{
 			try
 			{
 				return tokenStreams.Get();
 			}
-			catch (System.NullReferenceException npe)
+			catch (System.NullReferenceException ex)
 			{
+                // GLOBALIZATION: get exception message from resource file.
 				if (tokenStreams == null)
-				{
-					throw new AlreadyClosedException("this Analyzer is closed");
-				}
-				else
-				{
-					throw npe;
-				}
+					throw new AlreadyClosedException("this Analyzer is closed", ex);
+
+                // default to re-throw keep stack trace intact.
+				throw;
+				
 			}
 		}
 		
-		/// <summary>Used by Analyzers that implement reusableTokenStream
-		/// to save a TokenStream for later re-use by the same
-		/// thread. 
-		/// </summary>
+		/// <summary>
+        ///     Sets the <see cref="TokenStream"/> used by Analyzers that implement (overrides) 
+        ///     <see cref="Analyzer.ReusableTokenStream(String, TextReader)"/>
+        ///     to save a <see cref="TokenStream" /> for later re-use by the same thread. 
+        /// </summary>
+		/// <param name="obj">The previous <see cref="TokenStream"/>.</param>
 		protected internal virtual void  SetPreviousTokenStream(System.Object obj)
 		{
 			try
 			{
 				tokenStreams.Set(obj);
 			}
-			catch (System.NullReferenceException npe)
+			catch (System.NullReferenceException ex)
 			{
+                // GLOBALIZATION: get exception message from resource file.
 				if (tokenStreams == null)
-				{
-					throw new AlreadyClosedException("this Analyzer is closed");
-				}
-				else
-				{
-					throw npe;
-				}
+					throw new AlreadyClosedException("this Analyzer is closed", ex);
+
+                // default to re-throw keep stack trace intact.
+                throw;
+				
 			}
 		}
 		
-		protected internal bool overridesTokenStreamMethod;
-		
-		/// <deprecated> This is only present to preserve
-		/// back-compat of classes that subclass a core analyzer
-		/// and override tokenStream but not reusableTokenStream 
-		/// </deprecated>
-        [Obsolete("This is only present to preserve back-compat of classes that subclass a core analyzer and override tokenStream but not reusableTokenStream ")]
+       
+		
+	    /// <summary>
+        /// This is only present to preserve
+        /// back-compat of classes that subclass a core analyzer
+        /// and override tokenStream but not reusableTokenStream.
+	    /// </summary>
+	    /// <param name="baseClass">The base class type.</param>
+        [Obsolete("This is only present to preserve backwards compatibility of classes that subclass a core analyzer and override tokenStream but not reusableTokenStream ")]
 		protected internal virtual void  SetOverridesTokenStreamMethod(System.Type baseClass)
 		{
-			
-			System.Type[] params_Renamed = new System.Type[2];
-			params_Renamed[0] = typeof(System.String);
-			params_Renamed[1] = typeof(System.IO.TextReader);
-			
+
+            Type[] paramsRenamed = new Type[] { typeof(String), typeof(TextReader) };
+
 			try
 			{
-				System.Reflection.MethodInfo m = this.GetType().GetMethod("TokenStream", (params_Renamed == null)?new System.Type[0]:(System.Type[]) params_Renamed);
-				if (m != null)
-				{
-					overridesTokenStreamMethod = m.DeclaringType != baseClass;
-				}
-				else
-				{
-					overridesTokenStreamMethod = false;
-				}
+                Type[] types = paramsRenamed ?? new Type[0];
+
+				MethodInfo method = this.GetType().GetMethod("TokenStream", types);
+
+                overridesTokenStreamMethod = (method != null && method.DeclaringType != baseClass);
 			}
-			catch (System.MethodAccessException nsme)
+			catch
 			{
 				overridesTokenStreamMethod = false;
 			}
 		}
 		
 		
-		/// <summary> Invoked before indexing a Fieldable instance if
-		/// terms have already been added to that field.  This allows custom
-		/// analyzers to place an automatic position increment gap between
-		/// Fieldable instances using the same field name.  The default value
-		/// position increment gap is 0.  With a 0 position increment gap and
-		/// the typical default token position increment of 1, all terms in a field,
-		/// including across Fieldable instances, are in successive positions, allowing
-		/// exact PhraseQuery matches, for instance, across Fieldable instance boundaries.
-		/// 
-		/// </summary>
-		/// <param name="fieldName">Fieldable name being indexed.
-		/// </param>
-		/// <returns> position increment gap, added to the next token emitted from {@link #TokenStream(String,Reader)}
+		/// <summary> 
+        ///     Gets the position of the increment gap between two 
+        ///     <see cref="Lucene.Net.Documents.Field"/>s using the same name. This 
+        ///     is called before indexing a <see cref="Fieldable"/> instance if terms 
+        ///     have already been added to that field. 
+        /// </summary>
+        /// <remarks>
+        ///     <para>
+        ///     Specifying the position of the increment gap allows custom
+        ///     <see cref="Analyzer"/>s to place an automatic position increment gap between
+        ///     <see cref="Fieldable"/> instances using the same field name. 
+        ///     </para>
+        ///     <para>
+        ///         The default value position increment gap is 0.  
+        ///     </para>
+        ///     <para>
+        ///         <b>Position Increment Gap</b> - The value that controls the 
+        ///         virtual space between the last <see cref="Token"/> of one <see cref="Field"/> 
+        ///         instance and the first <see cref="Token"/> of the next instance. 
+        ///         Both fields share the same name. 
+        ///     </para>
+        ///     <para>
+        ///         Suppose a document has a multi-valued "author" field. Like this:
+        ///     </para>
+        ///     <ul>
+        ///         <li>author: John Doe</li>
+        ///         <li>author: Bob Smith</li>
+        ///     </ul>
+        ///     <para>
+        ///         With a position increment gap of 0, a phrase query of "doe bob" would
+        ///         be a match.  With a gap of 100, a phrase query of "doe bob" would not
+        ///         match.  The gap of 100 would prevent the phrase queries from matching
+        ///         even with a modest slop factor. 
+        ///     </para>
+        ///     <note>
+        ///         This explanation of the position increment gap was pulled from an entry by Erik Hatcher on the 
+        ///         <a href="http://mail-archives.apache.org/mod_mbox/lucene-solr-user/200810.mbox/%3C045DC0D3-789D-433E-88B9-9252392BB1D6@ehatchersolutions.com%3E">
+        ///         lucene-solr-user list</a>. 
+        ///         This was a better explanation than the one found in the code comments from the Lucene-Solr project.
+        ///     </note>
+		/// </remarks>
+		/// <param name="fieldName">The name of the field being indexed. </param>
+		/// <returns> 
+        ///     The position of the increment gap added to the next token emitted 
+        ///     from <see cref="TokenStream(String,TextReader)" />
 		/// </returns>
 		public virtual int GetPositionIncrementGap(System.String fieldName)
 		{
 			return 0;
 		}
 		
-		/// <summary> Just like {@link #getPositionIncrementGap}, except for
-		/// Token offsets instead.  By default this returns 1 for
-		/// tokenized fields and, as if the fields were joined
-		/// with an extra space character, and 0 for un-tokenized
-		/// fields.  This method is only called if the field
-		/// produced at least one token for indexing.
-		/// 
-		/// </summary>
-		/// <param name="field">the field just indexed
-		/// </param>
-		/// <returns> offset gap, added to the next token emitted from {@link #TokenStream(String,Reader)}
+		/// <summary> 
+        ///     Gets the offset gap for a token in the specified field. By default this method
+        ///     returns 1 for tokenized fields and 0 if the field is untokenized.
+        /// </summary>
+        /// <remarks>
+        ///     <para>
+        ///         This method is similar to <see cref="GetPositionIncrementGap(String)"/>
+        ///         and is only called if the field produced at least one token for indexing.
+        ///     </para>
+        /// </remarks>
+		/// <param name="field">the field that was just analyzed </param>
+		/// <returns> 
+        ///     The offset gap, added to the next token emitted 
+        ///     from <see cref="TokenStream(String,TextReader)" />.
 		/// </returns>
 		public virtual int GetOffsetGap(Fieldable field)
 		{
@@ -171,7 +238,15 @@ namespace Lucene.Net.Analysis
 				return 0;
 		}
 		
-		/// <summary>Frees persistent resources used by this Analyzer </summary>
+		/// <summary>   
+        ///     Frees persistent resources used by the <see cref="Analyzer"/>.
+        /// </summary>
+        /// <remarks>
+        ///     <para>
+        ///         The default implementation closes the internal <see cref="TokenStream"/>s 
+        ///         used by the analyzer.
+        ///     </para>
+        /// </remarks>
 		public virtual void  Close()
 		{
 			tokenStreams.Close();

Modified: incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/SimpleAnalyzer.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/SimpleAnalyzer.cs?rev=1150245&r1=1150244&r2=1150245&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/SimpleAnalyzer.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/SimpleAnalyzer.cs Sun Jul 24 01:21:27 2011
@@ -20,19 +20,33 @@ using System;
 namespace Lucene.Net.Analysis
 {
 	
-	/// <summary>An {@link Analyzer} that filters {@link LetterTokenizer} 
-	/// with {@link LowerCaseFilter} 
+	/// <summary>An <see cref="Analyzer"/> that filters <see cref="LetterTokenizer"/>
+	/// with <see cref="LowerCaseFilter"/>
 	/// </summary>
 	
-	public sealed class SimpleAnalyzer:Analyzer
+	public sealed class SimpleAnalyzer : Analyzer
 	{
+
+        /// <summary>
+        /// 
+        /// </summary>
+        /// <param name="fieldName"></param>
+        /// <param name="reader"></param>
+        /// <returns><see cref="LowerCaseTokenizer"/></returns>
 		public override TokenStream TokenStream(System.String fieldName, System.IO.TextReader reader)
 		{
 			return new LowerCaseTokenizer(reader);
 		}
 		
+        /// <summary>
+        /// 
+        /// </summary>
+        /// <param name="fieldName"></param>
+        /// <param name="reader"></param>
+        /// <returns></returns>
 		public override TokenStream ReusableTokenStream(System.String fieldName, System.IO.TextReader reader)
 		{
+            
 			Tokenizer tokenizer = (Tokenizer) GetPreviousTokenStream();
 			if (tokenizer == null)
 			{

Modified: incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Token.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Token.cs?rev=1150245&r1=1150244&r2=1150245&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Token.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Token.cs Sun Jul 24 01:21:27 2011
@@ -26,7 +26,7 @@ using TypeAttribute = Lucene.Net.Analysi
 using Payload = Lucene.Net.Index.Payload;
 using TermPositions = Lucene.Net.Index.TermPositions;
 using ArrayUtil = Lucene.Net.Util.ArrayUtil;
-using Attribute = Lucene.Net.Util.Attribute;
+using Attribute = Lucene.Net.Util.IAttribute;
 using AttributeImpl = Lucene.Net.Util.AttributeImpl;
 
 namespace Lucene.Net.Analysis

Modified: incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/TokenStream.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/TokenStream.cs?rev=1150245&r1=1150244&r2=1150245&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/TokenStream.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/TokenStream.cs Sun Jul 24 01:21:27 2011
@@ -26,91 +26,94 @@ using TypeAttribute = Lucene.Net.Analysi
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
-using Attribute = Lucene.Net.Util.Attribute;
+using Attribute = Lucene.Net.Util.IAttribute;
 using AttributeImpl = Lucene.Net.Util.AttributeImpl;
 using AttributeSource = Lucene.Net.Util.AttributeSource;
+using Lucene.Net.Util;
 
 namespace Lucene.Net.Analysis
 {
+    // JAVA: src/java/org/apache/lucene/analysis/TokenStream.java
 	
-	/// <summary> A <code>TokenStream</code> enumerates the sequence of tokens, either from
-	/// {@link Field}s of a {@link Document} or from query text.
-	/// <p/>
-	/// This is an abstract class. Concrete subclasses are:
-	/// <ul>
-	/// <li>{@link Tokenizer}, a <code>TokenStream</code> whose input is a Reader; and</li>
-	/// <li>{@link TokenFilter}, a <code>TokenStream</code> whose input is another
-	/// <code>TokenStream</code>.</li>
-	/// </ul>
-	/// A new <code>TokenStream</code> API has been introduced with Lucene 2.9. This API
-	/// has moved from being {@link Token} based to {@link Attribute} based. While
-	/// {@link Token} still exists in 2.9 as a convenience class, the preferred way
-	/// to store the information of a {@link Token} is to use {@link AttributeImpl}s.
-	/// <p/>
-	/// <code>TokenStream</code> now extends {@link AttributeSource}, which provides
-	/// access to all of the token {@link Attribute}s for the <code>TokenStream</code>.
-	/// Note that only one instance per {@link AttributeImpl} is created and reused
-	/// for every token. This approach reduces object creation and allows local
-	/// caching of references to the {@link AttributeImpl}s. See
-	/// {@link #IncrementToken()} for further details.
-	/// <p/>
-	/// <b>The workflow of the new <code>TokenStream</code> API is as follows:</b>
-	/// <ol>
-	/// <li>Instantiation of <code>TokenStream</code>/{@link TokenFilter}s which add/get
-	/// attributes to/from the {@link AttributeSource}.</li>
-	/// <li>The consumer calls {@link TokenStream#Reset()}.</li>
-	/// <li>The consumer retrieves attributes from the stream and stores local
-	/// references to all attributes it wants to access</li>
-	/// <li>The consumer calls {@link #IncrementToken()} until it returns false and
-	/// consumes the attributes after each call.</li>
-	/// <li>The consumer calls {@link #End()} so that any end-of-stream operations
-	/// can be performed.</li>
-	/// <li>The consumer calls {@link #Close()} to release any resource when finished
-	/// using the <code>TokenStream</code></li>
-	/// </ol>
-	/// To make sure that filters and consumers know which attributes are available,
-	/// the attributes must be added during instantiation. Filters and consumers are
-	/// not required to check for availability of attributes in
-	/// {@link #IncrementToken()}.
-	/// <p/>
-	/// You can find some example code for the new API in the analysis package level
-	/// Javadoc.
-	/// <p/>
-	/// Sometimes it is desirable to capture a current state of a <code>TokenStream</code>
-	/// , e. g. for buffering purposes (see {@link CachingTokenFilter},
-	/// {@link TeeSinkTokenFilter}). For this usecase
-	/// {@link AttributeSource#CaptureState} and {@link AttributeSource#RestoreState}
-	/// can be used.
-	/// </summary>
-	public abstract class TokenStream:AttributeSource
+	/// <summary> 
+    ///     A <see cref="Lucene.Net.Analysis.TokenStream"/> enumerates the sequence of tokens, either from
+    ///     <see cref="Lucene.Net.Documents.Field"/>s of a <see cref="Lucene.Net.Documents.Document"/> 
+    ///     or from query text.
+    /// </summary>
+    /// <remarks>
+    ///     <para>
+    ///         A new <see cref="Lucene.Net.Analysis.TokenStream"/>  API has been introduced with Lucene 2.9. This API
+    ///         has moved from being <see cref="Lucene.Net.Analysis.Token"/> based to <see cref="Lucene.Net.Util.IAttribute" /> based. While
+    ///         <see cref="Lucene.Net.Analysis.Token"/> still exists in 2.9 as a convenience class, the preferred way
+    ///         to store the information of a <see cref="Lucene.Net.Analysis.Token"/> is to use <see cref="Lucene.Net.Util.AttributeImpl" />s.
+    ///     </para>
+	///     <para>
+    ///         <c>TokenStream</c> now extends <see cref="Lucene.Net.Util.AttributeSource" />, which provides
+    ///         access to all of the token <see cref="Lucene.Net.Util.IAttribute"/>s for the <c>TokenStream</c>.
+    ///         Note that only one instance per <see cref="Lucene.Net.Util.AttributeImpl" /> is created and reused
+	///         for every token. This approach reduces object creation and allows local
+    ///         caching of references to the <see cref="Lucene.Net.Util.AttributeImpl" />s. See
+	///         <see cref="IncrementToken"/> for further details.
+    ///     </para>
+	///     <para>
+    ///         <b>The workflow of the new <c>TokenStream</c> API is as follows:</b>
+    ///     </para>
+	///     <ol>
+	///         <li>
+    ///             Instantiation of <see cref="TokenStream" /> / <see cref="TokenFilter"/>s which add/get
+	///             attributes to/from the <see cref="Lucene.Net.Util.AttributeSource"/>.
+    ///         </li>
+	///         <li>
+    ///             The consumer calls <see cref="Reset()"/>.
+    ///         </li>
+	///         <li>
+    ///             The consumer retrieves attributes from the stream and stores local
+	///             references to all attributes it wants to access.
+    ///         </li>
+	///         <li>
+    ///             The consumer calls <see cref="IncrementToken()"/> until it returns false and
+	///             consumes the attributes after each call.
+    ///         </li>
+	///         <li>
+    ///             The consumer calls <see cref="End()"/> so that any end-of-stream operations
+	///             can be performed.
+    ///         </li>
+	///         <li>
+    ///             The consumer calls <see cref="Close()"/> to release any resource when finished
+	///             using the <c>TokenStream</c>
+    ///         </li>
+	///     </ol>
+    /// </remarks>
+	public abstract class TokenStream : AttributeSource
 	{
-		private void  InitBlock()
-		{
-			supportedMethods = GetSupportedMethods(this.GetType());
-		}
-		
-		/// <deprecated> Remove this when old API is removed! 
-		/// </deprecated>
+        // REMOVE: in 3.0
         [Obsolete("Remove this when old API is removed! ")]
-		private static readonly AttributeFactory DEFAULT_TOKEN_WRAPPER_ATTRIBUTE_FACTORY = new TokenWrapperAttributeFactory(AttributeFactory.DEFAULT_ATTRIBUTE_FACTORY);
-		
-		/// <deprecated> Remove this when old API is removed! 
-		/// </deprecated>
+        private static readonly AttributeFactory DEFAULT_TOKEN_WRAPPER_ATTRIBUTE_FACTORY = new TokenWrapperAttributeFactory(AttributeFactory.DEFAULT_ATTRIBUTE_FACTORY);
+
+        // REMOVE: in 3.0
         [Obsolete("Remove this when old API is removed! ")]
-		private TokenWrapper tokenWrapper;
-		
-		/// <deprecated> Remove this when old API is removed! 
-		/// </deprecated>
+        private TokenWrapper tokenWrapper;
+
+
+        // REMOVE: in 3.0
         [Obsolete("Remove this when old API is removed! ")]
-		private static bool onlyUseNewAPI = false;
-		
-		/// <deprecated> Remove this when old API is removed! 
-		/// </deprecated>
+        private static bool onlyUseNewAPI = false;
+
+        // REMOVE: in 3.0
         [Obsolete("Remove this when old API is removed! ")]
-		private MethodSupport supportedMethods;
+        private MethodSupport supportedMethods;
+
+		private void  InitBlock()
+        {
+            // REMOVE: in 3.0
+            #pragma warning disable 618
+            supportedMethods = GetSupportedMethods(this.GetType());
+            #pragma warning restore 618
+		}
+		
+		
 		
-		/// <deprecated> Remove this when old API is removed! 
-		/// </deprecated>
+		// REMOVE: in 3.0
         [Obsolete("Remove this when old API is removed! ")]
 		private sealed class MethodSupport
 		{
@@ -141,9 +144,8 @@ namespace Lucene.Net.Analysis
 			private static readonly System.Type[] METHOD_NO_PARAMS = new System.Type[0];
 			private static readonly System.Type[] METHOD_TOKEN_PARAM = new System.Type[]{typeof(Token)};
 		}
-		
-		/// <deprecated> Remove this when old API is removed! 
-		/// </deprecated>
+
+        // REMOVE: in 3.0
         [Obsolete("Remove this when old API is removed! ")]
         private static readonly Support.Dictionary<Type, MethodSupport> knownMethodSupport = new Support.Dictionary<Type, MethodSupport>();
 
@@ -159,8 +161,7 @@ namespace Lucene.Net.Analysis
          */
         // Aroush-2.9}}
 
-		/// <deprecated> Remove this when old API is removed! 
-		/// </deprecated>
+        // REMOVE: in 3.0
         [Obsolete("Remove this when old API is removed! ")]
 		private static MethodSupport GetSupportedMethods(System.Type clazz)
 		{
@@ -175,9 +176,8 @@ namespace Lucene.Net.Analysis
 			}
 			return supportedMethods;
 		}
-		
-		/// <deprecated> Remove this when old API is removed! 
-		/// </deprecated>
+
+        // REMOVE: in 3.0
         [Obsolete("Remove this when old API is removed! ")]
 		private sealed class TokenWrapperAttributeFactory:AttributeFactory
 		{
@@ -212,33 +212,47 @@ namespace Lucene.Net.Analysis
 				return delegate_Renamed.GetHashCode() ^ 0x0a45ff31;
 			}
 		}
-		
-		/// <summary> A TokenStream using the default attribute factory.</summary>
-		protected internal TokenStream():base(onlyUseNewAPI?AttributeFactory.DEFAULT_ATTRIBUTE_FACTORY:TokenStream.DEFAULT_TOKEN_WRAPPER_ATTRIBUTE_FACTORY)
+
+        /// <summary> A <see cref="TokenStream"/> using the default attribute factory.</summary>
+        #pragma warning disable 618
+        protected internal TokenStream() : 
+            base( onlyUseNewAPI ? AttributeFactory.DEFAULT_ATTRIBUTE_FACTORY : TokenStream.DEFAULT_TOKEN_WRAPPER_ATTRIBUTE_FACTORY)
 		{
 			InitBlock();
 			tokenWrapper = InitTokenWrapper(null);
 			Check();
 		}
-		
-		/// <summary> A TokenStream that uses the same attributes as the supplied one.</summary>
+        #pragma warning restore 618
+
+        /// <summary> A <see cref="TokenStream"/> that uses the same attributes as the supplied one.</summary>
 		protected internal TokenStream(AttributeSource input):base(input)
 		{
 			InitBlock();
-			tokenWrapper = InitTokenWrapper(input);
+            
+            // REMOVE: in 3.0
+            #pragma warning disable 618
+            tokenWrapper = InitTokenWrapper(input);
 			Check();
-		}
-		
-		/// <summary> A TokenStream using the supplied AttributeFactory for creating new {@link Attribute} instances.</summary>
-		protected internal TokenStream(AttributeFactory factory):base(onlyUseNewAPI?factory:new TokenWrapperAttributeFactory(factory))
+            #pragma warning restore 618
+        }
+
+        /// <summary> 
+        ///     A <see cref="TokenStream"/> using the supplied AttributeFactory for creating 
+        ///     new <see cref="IAttribute"/> instances.
+        /// </summary>
+        #pragma warning disable 618
+        protected internal TokenStream(AttributeFactory factory)
+            :base( onlyUseNewAPI? factory: new TokenWrapperAttributeFactory(factory))
 		{
 			InitBlock();
-			tokenWrapper = InitTokenWrapper(null);
+
+            // REMOVE: in 3.0
+           
+            tokenWrapper = InitTokenWrapper(null);
 			Check();
-		}
+            #pragma warning restore 618
+        }
 		
-		/// <deprecated> Remove this when old API is removed! 
-		/// </deprecated>
         [Obsolete("Remove this when old API is removed! ")]
 		private TokenWrapper InitTokenWrapper(AttributeSource input)
 		{
@@ -255,7 +269,7 @@ namespace Lucene.Net.Analysis
 					return ((TokenStream) input).tokenWrapper;
 				}
 				// check that all attributes are implemented by the same TokenWrapper instance
-				Attribute att = AddAttribute(typeof(TermAttribute));
+				IAttribute att = AddAttribute(typeof(TermAttribute));
 				if (att is TokenWrapper && AddAttribute(typeof(TypeAttribute)) == att && AddAttribute(typeof(PositionIncrementAttribute)) == att && AddAttribute(typeof(FlagsAttribute)) == att && AddAttribute(typeof(OffsetAttribute)) == att && AddAttribute(typeof(PayloadAttribute)) == att)
 				{
 					return (TokenWrapper) att;
@@ -267,8 +281,7 @@ namespace Lucene.Net.Analysis
 			}
 		}
 		
-		/// <deprecated> Remove this when old API is removed! 
-		/// </deprecated>
+	
         [Obsolete("Remove this when old API is removed! ")]
 		private void  Check()
 		{
@@ -284,81 +297,92 @@ namespace Lucene.Net.Analysis
 			}
 		}
 		
-		/// <summary> For extra performance you can globally enable the new
-		/// {@link #IncrementToken} API using {@link Attribute}s. There will be a
-		/// small, but in most cases negligible performance increase by enabling this,
-		/// but it only works if <b>all</b> <code>TokenStream</code>s use the new API and
-		/// implement {@link #IncrementToken}. This setting can only be enabled
-		/// globally.
-		/// <p/>
-		/// This setting only affects <code>TokenStream</code>s instantiated after this
-		/// call. All <code>TokenStream</code>s already created use the other setting.
-		/// <p/>
-		/// All core {@link Analyzer}s are compatible with this setting, if you have
-		/// your own <code>TokenStream</code>s that are also compatible, you should enable
-		/// this.
-		/// <p/>
-		/// When enabled, tokenization may throw {@link UnsupportedOperationException}
-		/// s, if the whole tokenizer chain is not compatible eg one of the
-		/// <code>TokenStream</code>s does not implement the new <code>TokenStream</code> API.
-		/// <p/>
-		/// The default is <code>false</code>, so there is the fallback to the old API
-		/// available.
-		/// 
-		/// </summary>
-		/// <deprecated> This setting will no longer be needed in Lucene 3.0 as the old
-		/// API will be removed.
-		/// </deprecated>
+		/// <summary> 
+        ///     <para>
+        ///         For extra performance you can globally enable the new
+		///         <see cref="IncrementToken()"/> API using <see cref="IAttribute"/>s. There will be a
+		///         small, but in most cases negligible performance increase by enabling this,
+		///         but it only works if <b>all</b> <c>TokenStream</c>s use the new API and
+		///         implement <see cref="IncrementToken()"/>. This setting can only be enabled
+		///         globally.
+        ///     </para>
+        /// </summary>
+        /// <remarks>
+		///     <para>
+        ///         This setting only affects <see cref="TokenStream"/>s instantiated after this
+		///         call. All <c>TokenStream</c>s already created use the other setting.
+        ///     </para>
+        ///     <para>
+        ///         All core <see cref="Lucene.Net.Analysis.Analyzer"/>s are compatible with this setting, if you have
+		///         your own <c>TokenStream</c>s that are also compatible, you should enable
+		///         this.
+        ///     </para>
+		///     <para>
+		///         When enabled, tokenization may throw <see cref="System.NotSupportedException"/>s. 
+		///         If the whole tokenizer chain is not compatible e.g. one of the
+		///         <c>TokenStream</c>s does not implement the new <c>TokenStream</c> API.
+        ///     </para>
+        ///     <para>
+		///         The default is <c>false</c>, so there is the fallback to the old API
+		///         available.
+        ///     </para>
+        /// </remarks>
+        /// <exception cref="System.NotSupportedException">When enabled, it make throw this exception</exception>
         [Obsolete("This setting will no longer be needed in Lucene 3.0 as the old API will be removed.")]
 		public static void  SetOnlyUseNewAPI(bool onlyUseNewAPI)
-		{
-			TokenStream.onlyUseNewAPI = onlyUseNewAPI;
-		}
+        {
+            #pragma warning disable 618
+            TokenStream.onlyUseNewAPI = onlyUseNewAPI;
+            #pragma warning restore 618
+        }
 		
-		/// <summary> Returns if only the new API is used.
-		/// 
+		/// <summary> 
+        ///     Returns <c>true</c> if the new API is used, otherwise <c>false</c>.
 		/// </summary>
-		/// <seealso cref="setOnlyUseNewAPI">
-		/// </seealso>
-		/// <deprecated> This setting will no longer be needed in Lucene 3.0 as
-		/// the old API will be removed.
-		/// </deprecated>
         [Obsolete("This setting will no longer be needed in Lucene 3.0 as the old API will be removed.")]
 		public static bool GetOnlyUseNewAPI()
-		{
-			return onlyUseNewAPI;
-		}
-		
-		/// <summary> Consumers (i.e., {@link IndexWriter}) use this method to advance the stream to
-		/// the next token. Implementing classes must implement this method and update
-		/// the appropriate {@link AttributeImpl}s with the attributes of the next
-		/// token.
-		/// 
-		/// The producer must make no assumptions about the attributes after the
-		/// method has been returned: the caller may arbitrarily change it. If the
-		/// producer needs to preserve the state for subsequent calls, it can use
-		/// {@link #captureState} to create a copy of the current attribute state.
-		/// 
-		/// This method is called for every token of a document, so an efficient
-		/// implementation is crucial for good performance. To avoid calls to
-		/// {@link #AddAttribute(Class)} and {@link #GetAttribute(Class)} or downcasts,
-		/// references to all {@link AttributeImpl}s that this stream uses should be
-		/// retrieved during instantiation.
-		/// 
-		/// To ensure that filters and consumers know which attributes are available,
-		/// the attributes must be added during instantiation. Filters and consumers
-		/// are not required to check for availability of attributes in
-		/// {@link #IncrementToken()}.
-		/// 
-		/// </summary>
-		/// <returns> false for end of stream; true otherwise
-		/// 
-		/// Note that this method will be defined abstract in Lucene
-		/// 3.0.
-		/// </returns>
+        {
+            #pragma warning disable 618
+            return onlyUseNewAPI;
+            #pragma warning restore 618
+        }
+		
+		/// 
+        /// <summary> 
+        ///     Consumers, like <see cref="Lucene.Net.Index.IndexWriter"/>, use this 
+        ///     method to advance the stream to the next token. Implementing classes must 
+        ///     implement this method and update the appropriate <see cref="Lucene.Net.Util.AttributeImpl"/>s 
+        ///     with the attributes of the next token.
+        /// </summary>
+		/// <remarks>
+        ///     <para>
+		///         The producer must make no assumptions about the attributes after the
+		///         method has been returned: the caller may arbitrarily change it. If the
+		///         producer needs to preserve the state for subsequent calls, it can use
+		///         <see cref="AttributeSource.CaptureState()"/> to create a copy of the 
+        ///         current attribute state.
+        ///     </para>
+        ///     <para>
+		///         This method is called for every token of a document, so an efficient
+		///         implementation is crucial for good performance. To avoid calls to
+		///         <see cref="AttributeSource.AddAttribute(Type)"/> and <see cref="AttributeSource.GetAttribute(Type)"/> or downcasts,
+		///         references to all <see cref="AttributeImpl" />s that this stream uses should be
+		///         retrieved during instantiation.
+        ///     </para>
+        ///     <para>
+		///         To ensure that filters and consumers know which attributes are available,
+		///         the attributes must be added during instantiation. Filters and consumers
+		///         are not required to check for availability of attributes in
+		///         <see cref="IncrementToken()" />.
+        ///     </para>
+        /// </remarks>
+        /// <returns> <c>true</c> if the stream has <b>not</b> reached its end, otherwise <c>false</c>. </returns>
+        
 		public virtual bool IncrementToken()
-		{
-			System.Diagnostics.Debug.Assert(tokenWrapper != null);
+        {
+            // CHANGE: IncrementToken becomes an empty abstract method in 3.0 
+            #pragma warning disable 618
+            System.Diagnostics.Debug.Assert(tokenWrapper != null);
 			
 			Token token;
 			if (supportedMethods.hasReusableNext)
@@ -370,61 +394,78 @@ namespace Lucene.Net.Analysis
 				System.Diagnostics.Debug.Assert(supportedMethods.hasNext);
 				token = Next();
 			}
-			if (token == null)
+			
+            if (token == null)
 				return false;
-			tokenWrapper.delegate_Renamed = token;
+			
+            tokenWrapper.delegate_Renamed = token;
 			return true;
-		}
-		
-		/// <summary> This method is called by the consumer after the last token has been
-		/// consumed, after {@link #IncrementToken()} returned <code>false</code>
-		/// (using the new <code>TokenStream</code> API). Streams implementing the old API
-		/// should upgrade to use this feature.
-		/// <p/>
-		/// This method can be used to perform any end-of-stream operations, such as
-		/// setting the final offset of a stream. The final offset of a stream might
-		/// differ from the offset of the last token eg in case one or more whitespaces
-		/// followed after the last token, but a {@link WhitespaceTokenizer} was used.
-		/// 
-		/// </summary>
-		/// <throws>  IOException </throws>
+            
+            #pragma warning restore 618
+        }
+		
+		/// <summary> 
+        ///     This method is called by the consumer after the last token has been
+		///     consumed, after <see cref="IncrementToken()" /> returned <c>false</c>
+		///     Using the new <c>TokenStream</c> API, Streams implementing the old API
+		///     should upgrade to use this feature.
+        /// </summary>
+		/// <remarks>
+        ///     <para>
+		///         This method can be used to perform any end-of-stream operations, like
+		///         setting the final offset of a stream. The final offset of a stream might
+		///         differ from the offset of the last token. e.g. in case one or more whitespaces
+		///         followed after the last token and a <see cref="WhitespaceTokenizer"/> was used.
+        ///     </para>
+        /// </remarks>
+        /// <exception cref="System.IO.IOException" />
 		public virtual void  End()
 		{
 			// do nothing by default
 		}
 		
-		/// <summary> Returns the next token in the stream, or null at EOS. When possible, the
-		/// input Token should be used as the returned Token (this gives fastest
-		/// tokenization performance), but this is not required and a new Token may be
-		/// returned. Callers may re-use a single Token instance for successive calls
-		/// to this method.
-		/// 
-		/// This implicitly defines a "contract" between consumers (callers of this
-		/// method) and producers (implementations of this method that are the source
-		/// for tokens):
-		/// <ul>
-		/// <li>A consumer must fully consume the previously returned {@link Token}
-		/// before calling this method again.</li>
-		/// <li>A producer must call {@link Token#Clear()} before setting the fields in
-		/// it and returning it</li>
-		/// </ul>
-		/// Also, the producer must make no assumptions about a {@link Token} after it
-		/// has been returned: the caller may arbitrarily change it. If the producer
-		/// needs to hold onto the {@link Token} for subsequent calls, it must clone()
-		/// it before storing it. Note that a {@link TokenFilter} is considered a
-		/// consumer.
-		/// 
-		/// </summary>
-		/// <param name="reusableToken">a {@link Token} that may or may not be used to return;
-		/// this parameter should never be null (the callee is not required to
-		/// check for null before using it, but it is a good idea to assert that
-		/// it is not null.)
+		/// <summary> 
+        ///     Returns the next token in the stream, or <c>null</c> at end-of-stream.
+        /// </summary>
+		/// <remarks>
+        ///     <para>
+        ///         The input Token should be used as the Token that is returned when possible, which will 
+        ///         give the fastest tokenization performance. However, this is not required. A new Token may be
+        ///         returned. Callers may re-use a single Token instance for successive calls
+        ///         to this method.
+        ///     </para>
+        ///     <para>
+		///         This implicitly defines a "contract" between consumers, the callers of this
+		///         method, and producers, the implementations of this method that are the source
+		///         for tokens:
+        ///     </para>
+		///     <ul>
+		///         <li>
+        ///             A consumer must fully consume the previously returned <see cref="Token" />
+		///             before calling this method again.
+        ///         </li>
+		///         <li>
+        ///             A producer must call <see cref="Token.Clear()"/> before setting the fields in
+		///             it and returning it.
+        ///         </li>
+		///     </ul>
+        ///     <para>
+		///         Also, the producer must make no assumptions about a <see cref="Token" /> after it
+		///         has been returned: the caller may arbitrarily change it. If the producer
+		///         needs to hold onto the <see cref="Token" /> for subsequent calls, it must clone()
+		///         it before storing it. Note that a <see cref="TokenFilter" /> is considered a
+		///         consumer.
+        ///     </para>
+        /// </remarks>
+		/// <param name="reusableToken">
+        ///     A <see cref="Token"/> that may or may not be used to return;
+		///     this parameter should never be null. The callee is not required to
+		///     check for null before using it, but it is a good idea to assert that
+		///     it is not null.
 		/// </param>
-		/// <returns> next {@link Token} in the stream or null if end-of-stream was hit
+        /// <returns> 
+        ///     The next <see cref="Token"/> in the stream or <c>null</c> if the end-of-stream was hit.
 		/// </returns>
-		/// <deprecated> The new {@link #IncrementToken()} and {@link AttributeSource}
-		/// APIs should be used instead.
-		/// </deprecated>
         [Obsolete("The new IncrementToken() and AttributeSource APIs should be used instead.")]
 		public virtual Token Next(Token reusableToken)
 		{
@@ -445,17 +486,22 @@ namespace Lucene.Net.Analysis
 			}
 		}
 		
-		/// <summary> Returns the next {@link Token} in the stream, or null at EOS.
-		/// 
+		/// <summary> 
+        /// Returns the next <see cref="Token" /> in the stream, or null at EOS.
 		/// </summary>
-		/// <deprecated> The returned Token is a "full private copy" (not re-used across
-		/// calls to {@link #Next()}) but will be slower than calling
-		/// {@link #Next(Token)} or using the new {@link #IncrementToken()}
-		/// method with the new {@link AttributeSource} API.
-		/// </deprecated>
+		/// <remarks>
+        ///     <para>
+        ///         The returned Token is a "full private copy" (not re-used across
+		///         calls to <see cref="Next()" />) but will be slower than calling
+		///         <see cref="Next(Token)" /> or using the new <see cref="IncrementToken()" />
+		///         method with the new <see cref="AttributeSource" /> API.
+        ///     </para>
+        /// </remarks>
         [Obsolete("The returned Token is a \"full private copy\" (not re-used across calls to Next()) but will be slower than calling {@link #Next(Token)} or using the new IncrementToken() method with the new AttributeSource API.")]
 		public virtual Token Next()
 		{
+            #pragma warning disable 618
+
 			if (tokenWrapper == null)
 				throw new System.NotSupportedException("This TokenStream only supports the new Attributes API.");
 			
@@ -481,18 +527,27 @@ namespace Lucene.Net.Analysis
 					nextToken.SetPayload((Lucene.Net.Index.Payload) p.Clone());
 				}
 			}
-			return nextToken;
+			
+            return nextToken;
+
+            #pragma warning restore 618
 		}
 		
-		/// <summary> Resets this stream to the beginning. This is an optional operation, so
-		/// subclasses may or may not implement this method. {@link #Reset()} is not needed for
-		/// the standard indexing process. However, if the tokens of a
-		/// <code>TokenStream</code> are intended to be consumed more than once, it is
-		/// necessary to implement {@link #Reset()}. Note that if your TokenStream
-		/// caches tokens and feeds them back again after a reset, it is imperative
-		/// that you clone the tokens when you store them away (on the first pass) as
-		/// well as when you return them (on future passes after {@link #Reset()}).
-		/// </summary>
+		/// <summary>
+        /// Resets this stream to the beginning. This is an optional operation, so
+		/// subclasses may or may not implement this method. <see cref="Reset()" /> is not needed for
+		/// the standard indexing process.
+        /// </summary>
+        /// <remarks>
+        ///     <para>
+        ///         However, if the tokens of a <c>TokenStream</c> are intended to be 
+        ///         consumed more than once, it is necessary to implement <see cref="Reset()" />. 
+        ///         Note that if your <c>TokenStream</c> caches tokens and feeds them back again
+        ///         after a reset, it is imperative that you clone the tokens when you 
+        ///         store them away on the first pass as well as when you return 
+        ///         them on future passes after <see cref="Reset()" />.
+        ///     </para>
+        /// </remarks>
 		public virtual void  Reset()
 		{
 		}

Modified: incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Tokenattributes/FlagsAttribute.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Tokenattributes/FlagsAttribute.cs?rev=1150245&r1=1150244&r2=1150245&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Tokenattributes/FlagsAttribute.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Tokenattributes/FlagsAttribute.cs Sun Jul 24 01:21:27 2011
@@ -18,7 +18,7 @@
 using System;
 
 using Tokenizer = Lucene.Net.Analysis.Tokenizer;
-using Attribute = Lucene.Net.Util.Attribute;
+using Attribute = Lucene.Net.Util.IAttribute;
 
 namespace Lucene.Net.Analysis.Tokenattributes
 {

Modified: incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Tokenattributes/OffsetAttribute.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Tokenattributes/OffsetAttribute.cs?rev=1150245&r1=1150244&r2=1150245&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Tokenattributes/OffsetAttribute.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Tokenattributes/OffsetAttribute.cs Sun Jul 24 01:21:27 2011
@@ -17,7 +17,7 @@
 
 using System;
 
-using Attribute = Lucene.Net.Util.Attribute;
+using Attribute = Lucene.Net.Util.IAttribute;
 
 namespace Lucene.Net.Analysis.Tokenattributes
 {

Modified: incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Tokenattributes/PayloadAttribute.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Tokenattributes/PayloadAttribute.cs?rev=1150245&r1=1150244&r2=1150245&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Tokenattributes/PayloadAttribute.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Tokenattributes/PayloadAttribute.cs Sun Jul 24 01:21:27 2011
@@ -18,7 +18,7 @@
 using System;
 
 using Payload = Lucene.Net.Index.Payload;
-using Attribute = Lucene.Net.Util.Attribute;
+using Attribute = Lucene.Net.Util.IAttribute;
 
 namespace Lucene.Net.Analysis.Tokenattributes
 {

Modified: incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Tokenattributes/PositionIncrementAttribute.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Tokenattributes/PositionIncrementAttribute.cs?rev=1150245&r1=1150244&r2=1150245&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Tokenattributes/PositionIncrementAttribute.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Tokenattributes/PositionIncrementAttribute.cs Sun Jul 24 01:21:27 2011
@@ -17,7 +17,7 @@
 
 using System;
 
-using Attribute = Lucene.Net.Util.Attribute;
+using Attribute = Lucene.Net.Util.IAttribute;
 
 namespace Lucene.Net.Analysis.Tokenattributes
 {

Modified: incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Tokenattributes/TermAttribute.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Tokenattributes/TermAttribute.cs?rev=1150245&r1=1150244&r2=1150245&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Tokenattributes/TermAttribute.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Tokenattributes/TermAttribute.cs Sun Jul 24 01:21:27 2011
@@ -17,7 +17,7 @@
 
 using System;
 
-using Attribute = Lucene.Net.Util.Attribute;
+using Attribute = Lucene.Net.Util.IAttribute;
 
 namespace Lucene.Net.Analysis.Tokenattributes
 {

Modified: incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Tokenattributes/TypeAttribute.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Tokenattributes/TypeAttribute.cs?rev=1150245&r1=1150244&r2=1150245&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Tokenattributes/TypeAttribute.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Analysis/Tokenattributes/TypeAttribute.cs Sun Jul 24 01:21:27 2011
@@ -17,7 +17,7 @@
 
 using System;
 
-using Attribute = Lucene.Net.Util.Attribute;
+using Attribute = Lucene.Net.Util.IAttribute;
 
 namespace Lucene.Net.Analysis.Tokenattributes
 {

Modified: incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Lucene.Net.csproj
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Lucene.Net.csproj?rev=1150245&r1=1150244&r2=1150245&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Lucene.Net.csproj (original)
+++ incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Lucene.Net.csproj Sun Jul 24 01:21:27 2011
@@ -53,8 +53,7 @@
     <ConfigurationOverrideFile>
     </ConfigurationOverrideFile>
     <DefineConstants>TRACE;DEBUG</DefineConstants>
-    <DocumentationFile>
-    </DocumentationFile>
+    <DocumentationFile>..\..\bin\core\Debug\Lucene.Net.XML</DocumentationFile>
     <DebugSymbols>true</DebugSymbols>
     <FileAlignment>4096</FileAlignment>
     <NoStdLib>false</NoStdLib>

Modified: incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Store/AlreadyClosedException.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Store/AlreadyClosedException.cs?rev=1150245&r1=1150244&r2=1150245&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Store/AlreadyClosedException.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Store/AlreadyClosedException.cs Sun Jul 24 01:21:27 2011
@@ -20,17 +20,71 @@ using System.Runtime.Serialization;
 
 namespace Lucene.Net.Store
 {
+
+    // JAVA: src/java/org/apache/lucene/store/AlreadyClosedException.java
 	
-	/// <summary> This exception is thrown when there is an attempt to
-	/// access something that has already been closed.
+	/// <summary> 
+    /// This exception is thrown when there is an attempt to access a resource 
+    /// that has already been closed.
 	/// </summary>
+    /// <remarks>
+    ///     <para>
+    ///         An example would be when a <see cref="Lucene.Net.Analysis.TokenStream"/> has already been closed. 
+    ///     </para>
+    /// </remarks>
 	[Serializable]
-	public class AlreadyClosedException:System.SystemException
+	public class AlreadyClosedException : System.SystemException
 	{
+        /// <summary>
+        /// Initializes a new instance of <see cref="AlreadyClosedException"/> with a message and <c>null</c> inner exception.
+        /// </summary>
+        /// <param name="message">
+        ///     A <c>String</c> that describes the error. The content of message is intended to be understood 
+        ///     by humans. The caller of this constructor is required to ensure that this string has been 
+        ///     localized for the current system culture. 
+        /// </param>
+        /// <remarks>
+        ///     <para>
+        ///         The constructor initializes the <see cref="System.Exception.Message"/> property of the new instance using message.
+        ///     </para>
+        /// </remarks>
 		public AlreadyClosedException(System.String message):base(message)
 		{
 		}
 
+        /// <summary>
+        /// Initializes a new instance of <see cref="AlreadyClosedException"/> with a message and inner exception.
+        /// </summary>
+        /// <param name="message">
+        ///     A <c>String</c> that describes the error. The content of message is intended to be understood 
+        ///     by humans. The caller of this constructor is required to ensure that this string has been 
+        ///     localized for the current system culture. 
+        /// </param>
+        /// <param name="innerException">
+        ///     The exception that is the cause of the current exception. If the <paramref name="innerException"/> parameter is not null, the 
+        ///     current exception is raised in a catch block that handles the inner exception. 
+        /// </param>
+        /// <remarks>
+        ///     <para>
+        ///         An exception that is thrown as a direct result of a previous exception should include a reference to the 
+        ///         previous exception in the <see cref="System.Exception.InnerException"/> property. The <see cref="System.Exception.InnerException"/> property 
+        ///         returns the same value that is passed into the constructor, or <c>null</c> if 
+        ///         the <see cref="System.Exception.InnerException"/> property does not supply the inner 
+        ///         exception value to the constructor.
+        ///     </para>
+        /// </remarks>
+        public AlreadyClosedException(string message, Exception innerException)
+            : base(message, innerException)
+        {
+
+        }
+
+        /// <summary>
+        /// Initializes a new instance of the <see cref="AlreadyClosedException"/> class with the specified serialization and context information.
+        /// </summary>
+        /// <param name="info">The data for serializing or deserializing the object. </param>
+        /// <param name="context">The source and destination for the object. </param>
+        // REFACTOR: add build conditional to only compile in the client and full versions of the .NET framework.
         protected AlreadyClosedException(SerializationInfo info, StreamingContext context) : base(info, context)
         {
         }

Modified: incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Util/Attribute.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Util/Attribute.cs?rev=1150245&r1=1150244&r2=1150245&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Util/Attribute.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Util/Attribute.cs Sun Jul 24 01:21:27 2011
@@ -19,9 +19,18 @@ using System;
 
 namespace Lucene.Net.Util
 {
+    // This class might be better off as a base attribute to simplify the way the code queries
+    // for types that implement this interface.  Look at the AttributeSource.cs class. If there
+    // is a good reason to keep this as-is, please notate this in the comments and remove this one.
 	
-	/// <summary> Base interface for attributes.</summary>
-	public interface Attribute
+    // JAVA: src/java/org/apache/lucene/util/Attribute.java
+
+	/// <summary> 
+    /// The contract interface for attributes.
+    /// This interface is used as a way to query types that implement this interface and 
+    /// references to those types that do.
+    /// </summary>
+	public interface IAttribute
 	{
 	}
 }
\ No newline at end of file

Modified: incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Util/AttributeImpl.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Util/AttributeImpl.cs?rev=1150245&r1=1150244&r2=1150245&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Util/AttributeImpl.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Util/AttributeImpl.cs Sun Jul 24 01:21:27 2011
@@ -27,7 +27,7 @@ namespace Lucene.Net.Util
 	/// of usually streamed objects, e. g. a {@link Lucene.Net.Analysis.TokenStream}.
 	/// </summary>
 	[Serializable]
-	public abstract class AttributeImpl : System.ICloneable, Attribute
+	public abstract class AttributeImpl : System.ICloneable, IAttribute
 	{
 		/// <summary> Clears the values in this AttributeImpl and resets it to its 
 		/// default value. If this implementation implements more than one Attribute interface

Modified: incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Util/AttributeSource.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Util/AttributeSource.cs?rev=1150245&r1=1150244&r2=1150245&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Util/AttributeSource.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Util/AttributeSource.cs Sun Jul 24 01:21:27 2011
@@ -302,7 +302,7 @@ namespace Lucene.Net.Util
                         for (int i = 0; i < interfaces.Length; i++)
                         {
                             System.Type curInterface = interfaces[i];
-                            if (curInterface != typeof(Attribute) && typeof(Attribute).IsAssignableFrom(curInterface))
+                            if (curInterface != typeof(IAttribute) && typeof(IAttribute).IsAssignableFrom(curInterface))
                             {
                                 foundInterfaces.Add(new WeakReference(curInterface));
                             }
@@ -338,11 +338,11 @@ namespace Lucene.Net.Util
         /// new instance is created, added to this AttributeSource and returned. 
         /// Signature for Java 1.5: <code>public &lt;T extends Attribute&gt; T addAttribute(Class&lt;T&gt;)</code>
         /// </summary>
-        public virtual Attribute AddAttribute(System.Type attClass)
+        public virtual IAttribute AddAttribute(System.Type attClass)
         {
             if (!attributes.ContainsKey(attClass))
             {
-                if (!(attClass.IsInterface && typeof(Attribute).IsAssignableFrom(attClass)))
+                if (!(attClass.IsInterface && typeof(IAttribute).IsAssignableFrom(attClass)))
                 {
                     throw new ArgumentException(
                         "AddAttribute() only accepts an interface that extends Attribute, but " +
@@ -387,7 +387,7 @@ namespace Lucene.Net.Util
         /// available. If you want to only use the attribute, if it is available (to optimize
         /// consuming), use {@link #hasAttribute}.
         /// </summary>
-        public virtual Attribute GetAttribute(System.Type attClass)
+        public virtual IAttribute GetAttribute(System.Type attClass)
         {
             if (!this.attributes.ContainsKey(attClass))
             {

Modified: incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Analysis/BaseTokenStreamTestCase.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Analysis/BaseTokenStreamTestCase.cs?rev=1150245&r1=1150244&r2=1150245&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Analysis/BaseTokenStreamTestCase.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Analysis/BaseTokenStreamTestCase.cs Sun Jul 24 01:21:27 2011
@@ -25,12 +25,13 @@ using LuceneTestCase = Lucene.Net.Util.L
 namespace Lucene.Net.Analysis
 {
 	
-	/// <summary> Base class for all Lucene unit tests that use TokenStreams.  
-	/// <p/>
-	/// This class runs all tests twice, one time with {@link TokenStream#setOnlyUseNewAPI} <code>false</code>
-	/// and after that one time with <code>true</code>.
+	/// <summary> Base class for all Lucene.Net unit tests that use TokenStreams.  
+	/// <para>
+	/// This class runs all tests twice, one time with <see cref="TokenStream.setOnlyUseNewAPI" /> <c>false</c>
+	/// and after that one time with <c>true</c>.
+    /// </para>
 	/// </summary>
-	public abstract class BaseTokenStreamTestCase:LuceneTestCase
+	public abstract class BaseTokenStreamTestCase : LuceneTestCase
 	{
 		
 		private bool onlyUseNewAPI = false;
@@ -38,12 +39,14 @@ namespace Lucene.Net.Analysis
 		
 		public BaseTokenStreamTestCase():base()
 		{
-			this.testWithNewAPI = null; // run all tests also with onlyUseNewAPI
+            // run all tests also with onlyUseNewAPI
+			this.testWithNewAPI = null; 
 		}
 		
 		public BaseTokenStreamTestCase(System.String name):base(name)
 		{
-			this.testWithNewAPI = null; // run all tests also with onlyUseNewAPI
+            // run all tests also with onlyUseNewAPI
+			this.testWithNewAPI = null; 
 		}
 		
 		public BaseTokenStreamTestCase(System.Collections.Hashtable testWithNewAPI):base()
@@ -61,23 +64,29 @@ namespace Lucene.Net.Analysis
 		public override void  SetUp()
 		{
 			base.SetUp();
-			TokenStream.SetOnlyUseNewAPI(onlyUseNewAPI);
-		}
+
+            // needed for this test.
+            #pragma warning disable 618
+                TokenStream.SetOnlyUseNewAPI(onlyUseNewAPI);
+            #pragma warning restore 618
+        }
 		
 		// @Override
 		public override void  RunBare()
 		{
-			// Do the test with onlyUseNewAPI=false (default)
+			// Test with onlyUseNewAPI=false (default)
 			try
 			{
 				onlyUseNewAPI = false;
-				// base.RunBare();  // {{Aroush-2.9}}
+				
+                // TODO: This needs to be notated why the test has this commented out. 
+                // base.RunBare();  // {{Aroush-2.9}}
                 System.Diagnostics.Debug.Fail("Port issue:", "base.RunBare()"); // {{Aroush-2.9}}
 			}
-			catch (System.Exception e)
+			catch
 			{
-				System.Console.Out.WriteLine("Test failure of '" + GetType() + "' occurred with onlyUseNewAPI=false");
-				throw e;
+				Console.WriteLine("Test failure of '" + GetType() + "' occurred with onlyUseNewAPI=false");
+				throw;
 			}
 			
 			if (testWithNewAPI == null || testWithNewAPI.Contains(GetType()))
@@ -88,17 +97,17 @@ namespace Lucene.Net.Analysis
 					onlyUseNewAPI = true;
 					base.RunBare();
 				}
-				catch (System.Exception e)
+				catch
 				{
-					System.Console.Out.WriteLine("Test failure of '" + GetType() + "' occurred with onlyUseNewAPI=true");
-					throw e;
+					Console.WriteLine("Test failure of '" + GetType() + "' occurred with onlyUseNewAPI=true");
+                    throw;
 				}
 			}
 		}
 		
 		// some helpers to test Analyzers and TokenStreams:
 
-        public interface CheckClearAttributesAttribute : Lucene.Net.Util.Attribute
+        public interface CheckClearAttributesAttribute : Lucene.Net.Util.IAttribute
         {
                bool GetAndResetClearCalled();
         }
@@ -128,9 +137,14 @@ namespace Lucene.Net.Analysis
             //@Override
             public  override bool Equals(Object other) 
             {
+                if (other == null)
+                    throw new ArgumentNullException("other", "The argument 'other' must not be null.");
+
+                CheckClearAttributesAttributeImpl attributeImpl = (CheckClearAttributesAttributeImpl) other;
+
                 return (
-                other is CheckClearAttributesAttributeImpl &&
-                ((CheckClearAttributesAttributeImpl) other).clearCalled == this.clearCalled
+                    other is CheckClearAttributesAttributeImpl &&
+                    attributeImpl.clearCalled == this.clearCalled
                 );
             }
 
@@ -165,6 +179,7 @@ namespace Lucene.Net.Analysis
             }
     
             TypeAttribute typeAtt = null;
+            
             if (types != null)
             {
                 Assert.IsTrue(ts.HasAttribute(typeof(TypeAttribute)), "has no TypeAttribute");
@@ -172,6 +187,7 @@ namespace Lucene.Net.Analysis
             }
             
             PositionIncrementAttribute posIncrAtt = null;
+
             if (posIncrements != null)
             {
                 Assert.IsTrue(ts.HasAttribute(typeof(PositionIncrementAttribute)), "has no PositionIncrementAttribute");
@@ -179,33 +195,49 @@ namespace Lucene.Net.Analysis
             }
 
             ts.Reset();
+            
             for (int i = 0; i < output.Length; i++)
             {
                 // extra safety to enforce, that the state is not preserved and also assign bogus values
                 ts.ClearAttributes();
                 termAtt.SetTermBuffer("bogusTerm");
-                if (offsetAtt != null) offsetAtt.SetOffset(14584724, 24683243);
-                if (typeAtt != null) typeAtt.SetType("bogusType");
-                if (posIncrAtt != null) posIncrAtt.SetPositionIncrement(45987657);
+                
+                if (offsetAtt != null) 
+                    offsetAtt.SetOffset(14584724, 24683243);
+                
+                if (typeAtt != null) 
+                    typeAtt.SetType("bogusType");
+                
+                if (posIncrAtt != null) 
+                    posIncrAtt.SetPositionIncrement(45987657);
 
                 checkClearAtt.GetAndResetClearCalled(); // reset it, because we called clearAttribute() before
+                
                 Assert.IsTrue(ts.IncrementToken(), "token " + i + " does not exist");
                 Assert.IsTrue(checkClearAtt.GetAndResetClearCalled(), "clearAttributes() was not called correctly in TokenStream chain");
 
                 Assert.AreEqual(output[i], termAtt.Term(), "term " + i);
+                
                 if (startOffsets != null)
                     Assert.AreEqual(startOffsets[i], offsetAtt.StartOffset(), "startOffset " + i);
+
                 if (endOffsets != null)
                     Assert.AreEqual(endOffsets[i], offsetAtt.EndOffset(), "endOffset " + i);
+                
                 if (types != null)
                     Assert.AreEqual(types[i], typeAtt.Type(), "type " + i);
+                
                 if (posIncrements != null)
                     Assert.AreEqual(posIncrements[i], posIncrAtt.GetPositionIncrement(), "posIncrement " + i);
             }
+            
             Assert.IsFalse(ts.IncrementToken(), "end of stream");
+
             ts.End();
+
             if (finalOffset.HasValue)
                 Assert.AreEqual(finalOffset, offsetAtt.EndOffset(), "finalOffset ");
+
             ts.Close();
         }
 

Modified: incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Analysis/TestAnalyzers.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Analysis/TestAnalyzers.cs?rev=1150245&r1=1150244&r2=1150245&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Analysis/TestAnalyzers.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Analysis/TestAnalyzers.cs Sun Jul 24 01:21:27 2011
@@ -24,160 +24,195 @@ using StandardTokenizer = Lucene.Net.Ana
 using PayloadAttribute = Lucene.Net.Analysis.Tokenattributes.PayloadAttribute;
 using TermAttribute = Lucene.Net.Analysis.Tokenattributes.TermAttribute;
 using Payload = Lucene.Net.Index.Payload;
+using Lucene.Net.Util;
 
 namespace Lucene.Net.Analysis
 {
-	
-	[TestFixture]
-	public class TestAnalyzers:BaseTokenStreamTestCase
-	{
-		
-		/*public TestAnalyzers(System.String name):base(name)
-		{
-		}*/
-		
-		[Test]
-		public virtual void  TestSimple()
-		{
-			Analyzer a = new SimpleAnalyzer();
-			AssertAnalyzesTo(a, "foo bar FOO BAR", new System.String[]{"foo", "bar", "foo", "bar"});
-			AssertAnalyzesTo(a, "foo      bar .  FOO <> BAR", new System.String[]{"foo", "bar", "foo", "bar"});
-			AssertAnalyzesTo(a, "foo.bar.FOO.BAR", new System.String[]{"foo", "bar", "foo", "bar"});
-			AssertAnalyzesTo(a, "U.S.A.", new System.String[]{"u", "s", "a"});
-			AssertAnalyzesTo(a, "C++", new System.String[]{"c"});
-			AssertAnalyzesTo(a, "B2B", new System.String[]{"b", "b"});
-			AssertAnalyzesTo(a, "2B", new System.String[]{"b"});
-			AssertAnalyzesTo(a, "\"QUOTED\" word", new System.String[]{"quoted", "word"});
-		}
-		
-		[Test]
-		public virtual void  TestNull()
-		{
-			Analyzer a = new WhitespaceAnalyzer();
-			AssertAnalyzesTo(a, "foo bar FOO BAR", new System.String[]{"foo", "bar", "FOO", "BAR"});
-			AssertAnalyzesTo(a, "foo      bar .  FOO <> BAR", new System.String[]{"foo", "bar", ".", "FOO", "<>", "BAR"});
-			AssertAnalyzesTo(a, "foo.bar.FOO.BAR", new System.String[]{"foo.bar.FOO.BAR"});
-			AssertAnalyzesTo(a, "U.S.A.", new System.String[]{"U.S.A."});
-			AssertAnalyzesTo(a, "C++", new System.String[]{"C++"});
-			AssertAnalyzesTo(a, "B2B", new System.String[]{"B2B"});
-			AssertAnalyzesTo(a, "2B", new System.String[]{"2B"});
-			AssertAnalyzesTo(a, "\"QUOTED\" word", new System.String[]{"\"QUOTED\"", "word"});
-		}
-		
-		[Test]
-		public virtual void  TestStop()
-		{
-			Analyzer a = new StopAnalyzer();
-			AssertAnalyzesTo(a, "foo bar FOO BAR", new System.String[]{"foo", "bar", "foo", "bar"});
-			AssertAnalyzesTo(a, "foo a bar such FOO THESE BAR", new System.String[]{"foo", "bar", "foo", "bar"});
-		}
-		
-		internal virtual void  VerifyPayload(TokenStream ts)
-		{
-			PayloadAttribute payloadAtt = (PayloadAttribute) ts.GetAttribute(typeof(PayloadAttribute));
-			for (byte b = 1; ; b++)
-			{
-				bool hasNext = ts.IncrementToken();
-				if (!hasNext)
-					break;
-				// System.out.println("id="+System.identityHashCode(nextToken) + " " + t);
-				// System.out.println("payload=" + (int)nextToken.getPayload().toByteArray()[0]);
-				Assert.AreEqual(b, payloadAtt.GetPayload().ToByteArray()[0]);
-			}
-		}
-		
-		// Make sure old style next() calls result in a new copy of payloads
-		[Test]
-		public virtual void  TestPayloadCopy()
-		{
-			System.String s = "how now brown cow";
-			TokenStream ts;
-			ts = new WhitespaceTokenizer(new System.IO.StringReader(s));
-			ts = new PayloadSetter(ts);
-			VerifyPayload(ts);
-			
-			ts = new WhitespaceTokenizer(new System.IO.StringReader(s));
-			ts = new PayloadSetter(ts);
-			VerifyPayload(ts);
-		}
-		
-		// LUCENE-1150: Just a compile time test, to ensure the
-		// StandardAnalyzer constants remain publicly accessible
-		public virtual void  _testStandardConstants()
-		{
-			int x = StandardTokenizer.ALPHANUM;
-			x = StandardTokenizer.APOSTROPHE;
-			x = StandardTokenizer.ACRONYM;
-			x = StandardTokenizer.COMPANY;
-			x = StandardTokenizer.EMAIL;
-			x = StandardTokenizer.HOST;
-			x = StandardTokenizer.NUM;
-			x = StandardTokenizer.CJ;
-			System.String[] y = StandardTokenizer.TOKEN_TYPES;
-		}
-		
-		private class MyStandardAnalyzer:StandardAnalyzer
-		{
-			public override TokenStream TokenStream(System.String field, System.IO.TextReader reader)
-			{
-				return new WhitespaceAnalyzer().TokenStream(field, reader);
-			}
-		}
-		
-		[Test]
-		public virtual void  TestSubclassOverridingOnlyTokenStream()
-		{
-			Analyzer a = new MyStandardAnalyzer();
-			TokenStream ts = a.ReusableTokenStream("field", new System.IO.StringReader("the"));
-			// StandardAnalyzer will discard "the" (it's a
-			// stopword), by my subclass will not:
-			Assert.IsTrue(ts.IncrementToken());
-			Assert.IsFalse(ts.IncrementToken());
-		}
+
+    [TestFixture]
+    [Category(Categories.Unit)]
+    public class TestAnalyzers : BaseTokenStreamTestCase
+    {
+
+        /*public TestAnalyzers(System.String name):base(name)
+        {
+        }*/
+
+        [Test]
+        public virtual void TestSimple()
+        {
+            Analyzer a = new SimpleAnalyzer();
+            AssertAnalyzesTo(a, "foo bar FOO BAR", new System.String[] { "foo", "bar", "foo", "bar" });
+            AssertAnalyzesTo(a, "foo      bar .  FOO <> BAR", new System.String[] { "foo", "bar", "foo", "bar" });
+            AssertAnalyzesTo(a, "foo.bar.FOO.BAR", new System.String[] { "foo", "bar", "foo", "bar" });
+            AssertAnalyzesTo(a, "U.S.A.", new System.String[] { "u", "s", "a" });
+            AssertAnalyzesTo(a, "C++", new System.String[] { "c" });
+            AssertAnalyzesTo(a, "B2B", new System.String[] { "b", "b" });
+            AssertAnalyzesTo(a, "2B", new System.String[] { "b" });
+            AssertAnalyzesTo(a, "\"QUOTED\" word", new System.String[] { "quoted", "word" });
+        }
+
+        [Test]
+        public virtual void TestNull()
+        {
+            Analyzer a = new WhitespaceAnalyzer();
+            AssertAnalyzesTo(a, "foo bar FOO BAR", new System.String[] { "foo", "bar", "FOO", "BAR" });
+            AssertAnalyzesTo(a, "foo      bar .  FOO <> BAR", new System.String[] { "foo", "bar", ".", "FOO", "<>", "BAR" });
+            AssertAnalyzesTo(a, "foo.bar.FOO.BAR", new System.String[] { "foo.bar.FOO.BAR" });
+            AssertAnalyzesTo(a, "U.S.A.", new System.String[] { "U.S.A." });
+            AssertAnalyzesTo(a, "C++", new System.String[] { "C++" });
+            AssertAnalyzesTo(a, "B2B", new System.String[] { "B2B" });
+            AssertAnalyzesTo(a, "2B", new System.String[] { "2B" });
+            AssertAnalyzesTo(a, "\"QUOTED\" word", new System.String[] { "\"QUOTED\"", "word" });
+        }
+
+        [Test]
+        public virtual void TestStop()
+        {
+            Analyzer a = new StopAnalyzer(_TestUtil.CurrentVersion);
+            AssertAnalyzesTo(a, "foo bar FOO BAR", new System.String[] { "foo", "bar", "foo", "bar" });
+            AssertAnalyzesTo(a, "foo a bar such FOO THESE BAR", new System.String[] { "foo", "bar", "foo", "bar" });
+        }
+
+        // Make sure old style next() calls result in a new copy of payloads
+        [Test]
+        public virtual void TestPayloadCopy()
+        {
+            System.String s = "how now brown cow";
+            TokenStream ts;
+
+            ts = new WhitespaceTokenizer(new System.IO.StringReader(s));
+            ts = new PayloadSetter(ts);
+            VerifyPayload(ts);
+
+            ts = new WhitespaceTokenizer(new System.IO.StringReader(s));
+            ts = new PayloadSetter(ts);
+
+            VerifyPayload(ts);
+        }
+
+        // LUCENE-1150: Just a compile time test, to ensure the
+        // StandardAnalyzer constants remain publicly accessible
+        [Test]
+        public virtual void StandardConstants()
+        {
+            Assert.AreEqual(0, StandardTokenizer.ALPHANUM);
+            Assert.AreEqual(1, StandardTokenizer.APOSTROPHE);
+            Assert.AreEqual(2, StandardTokenizer.ACRONYM);
+            Assert.AreEqual(3, StandardTokenizer.COMPANY);
+            Assert.AreEqual(4, StandardTokenizer.EMAIL);
+            Assert.AreEqual(5, StandardTokenizer.HOST);
+            Assert.AreEqual(6, StandardTokenizer.NUM);
+            Assert.AreEqual(7, StandardTokenizer.CJ);
+            
+            string[] tokenTypes = new string[]{
+                "<ALPHANUM>", 
+                "<APOSTROPHE>", 
+                "<ACRONYM>", 
+                "<COMPANY>", 
+                "<EMAIL>", 
+                "<HOST>", 
+                "<NUM>", 
+                "<CJ>", 
+                "<ACRONYM_DEP>"
+            };
+
+            Assert.AreEqual(tokenTypes, StandardTokenizer.TOKEN_TYPES);
+        }
+
+       
+
+        [Test]
+        public virtual void TestSubclassOverridingOnlyTokenStream()
+        {
+            Analyzer a = new MyStandardAnalyzer();
+            TokenStream ts = a.ReusableTokenStream("field", new System.IO.StringReader("the"));
+            
+            // StandardAnalyzer will discard "the" (it's a
+            // stopword), by my subclass will not:
+            Assert.IsTrue(ts.IncrementToken());
+            Assert.IsFalse(ts.IncrementToken());
+        }
 
         [Test]
         public void Test_LUCENE_3042_LUCENENET_433()
         {
             String testString = "t";
 
-            Analyzer analyzer = new Lucene.Net.Analysis.Standard.StandardAnalyzer();
+            Analyzer analyzer = new StandardAnalyzer(_TestUtil.CurrentVersion);
+
             TokenStream stream = analyzer.ReusableTokenStream("dummy", new System.IO.StringReader(testString));
             stream.Reset();
+            
             while (stream.IncrementToken())
             {
                 // consume
             }
+
             stream.End();
             stream.Close();
 
             AssertAnalyzesToReuse(analyzer, testString, new String[] { "t" });
         }
-	}
-	
-	class PayloadSetter:TokenFilter
-	{
-		private void  InitBlock()
-		{
-			p = new Payload(data, 0, 1);
-		}
-		internal PayloadAttribute payloadAtt;
-		public PayloadSetter(TokenStream input):base(input)
-		{
-			InitBlock();
-			payloadAtt = (PayloadAttribute) AddAttribute(typeof(PayloadAttribute));
-		}
-		
-		internal byte[] data = new byte[1];
-		internal Payload p;
-		
-		public override bool IncrementToken()
-		{
-			bool hasNext = input.IncrementToken();
-			if (!hasNext)
-				return false;
-			payloadAtt.SetPayload(p); // reuse the payload / byte[]
-			data[0]++;
-			return true;
-		}
-	}
+
+        #region helpers
+
+        internal virtual void VerifyPayload(TokenStream ts)
+        {
+            PayloadAttribute payloadAtt = (PayloadAttribute)ts.GetAttribute(typeof(PayloadAttribute));
+            for (byte b = 1; ; b++)
+            {
+                bool hasNext = ts.IncrementToken();
+                if (!hasNext)
+                    break;
+                // System.out.println("id="+System.identityHashCode(nextToken) + " " + t);
+                // System.out.println("payload=" + (int)nextToken.getPayload().toByteArray()[0]);
+                Assert.AreEqual(b, payloadAtt.GetPayload().ToByteArray()[0]);
+            }
+        }
+
+        class MyStandardAnalyzer : StandardAnalyzer
+        {
+            public MyStandardAnalyzer()
+                : base(_TestUtil.CurrentVersion)
+            {
+
+            }
+
+            public override TokenStream TokenStream(System.String field, System.IO.TextReader reader)
+            {
+                return new WhitespaceAnalyzer().TokenStream(field, reader);
+            }
+        }
+
+        class PayloadSetter : TokenFilter
+        {
+            private void InitBlock()
+            {
+                p = new Payload(data, 0, 1);
+            }
+            internal PayloadAttribute payloadAtt;
+            public PayloadSetter(TokenStream input)
+                : base(input)
+            {
+                InitBlock();
+                payloadAtt = (PayloadAttribute)AddAttribute(typeof(PayloadAttribute));
+            }
+
+            internal byte[] data = new byte[1];
+            internal Payload p;
+
+            public override bool IncrementToken()
+            {
+                bool hasNext = input.IncrementToken();
+                if (!hasNext)
+                    return false;
+                payloadAtt.SetPayload(p); // reuse the payload / byte[]
+                data[0]++;
+                return true;
+            }
+        }
+
+        #endregion
+    }
 }
\ No newline at end of file

Modified: incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Analysis/TestTokenStreamBWComp.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Analysis/TestTokenStreamBWComp.cs?rev=1150245&r1=1150244&r2=1150245&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Analysis/TestTokenStreamBWComp.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Analysis/TestTokenStreamBWComp.cs Sun Jul 24 01:21:27 2011
@@ -21,7 +21,7 @@ using NUnit.Framework;
 
 using Lucene.Net.Analysis.Tokenattributes;
 using Payload = Lucene.Net.Index.Payload;
-using Attribute = Lucene.Net.Util.Attribute;
+using Attribute = Lucene.Net.Util.IAttribute;
 using AttributeImpl = Lucene.Net.Util.AttributeImpl;
 using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;