You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucenenet.apache.org by cc...@apache.org on 2012/03/21 07:04:35 UTC

svn commit: r1303294 [1/6] - in /incubator/lucene.net/trunk: src/contrib/Analyzers/AR/ src/contrib/Analyzers/BR/ src/contrib/Analyzers/CJK/ src/contrib/Analyzers/Cn/ src/contrib/Analyzers/Compound/ src/contrib/Analyzers/Cz/ src/contrib/Analyzers/De/ sr...

Author: ccurrens
Date: Wed Mar 21 06:04:26 2012
New Revision: 1303294

URL: http://svn.apache.org/viewvc?rev=1303294&view=rev
Log:
[LUCENENET-467] - additional work on properties.  Added some and reverted some.  Still need to look through others to make sure they're acting as data.

Modified:
    incubator/lucene.net/trunk/src/contrib/Analyzers/AR/ArabicAnalyzer.cs
    incubator/lucene.net/trunk/src/contrib/Analyzers/BR/BrazilianAnalyzer.cs
    incubator/lucene.net/trunk/src/contrib/Analyzers/CJK/CJKAnalyzer.cs
    incubator/lucene.net/trunk/src/contrib/Analyzers/CJK/CJKTokenizer.cs
    incubator/lucene.net/trunk/src/contrib/Analyzers/Cn/ChineseAnalyzer.cs
    incubator/lucene.net/trunk/src/contrib/Analyzers/Compound/CompoundWordTokenFilterBase.cs
    incubator/lucene.net/trunk/src/contrib/Analyzers/Cz/CzechAnalyzer.cs
    incubator/lucene.net/trunk/src/contrib/Analyzers/De/GermanAnalyzer.cs
    incubator/lucene.net/trunk/src/contrib/Analyzers/El/GreekAnalyzer.cs
    incubator/lucene.net/trunk/src/contrib/Analyzers/Fa/PersianAnalyzer.cs
    incubator/lucene.net/trunk/src/contrib/Analyzers/Fr/FrenchAnalyzer.cs
    incubator/lucene.net/trunk/src/contrib/Analyzers/Miscellaneous/PrefixAndSuffixAwareTokenFilter.cs
    incubator/lucene.net/trunk/src/contrib/Analyzers/Miscellaneous/PrefixAwareTokenStream.cs
    incubator/lucene.net/trunk/src/contrib/Analyzers/NGram/EdgeNGramTokenFilter.cs
    incubator/lucene.net/trunk/src/contrib/Analyzers/NGram/NGramTokenFilter.cs
    incubator/lucene.net/trunk/src/contrib/Analyzers/Nl/DutchAnalyzer.cs
    incubator/lucene.net/trunk/src/contrib/Analyzers/Payloads/NumericPayloadTokenFilter.cs
    incubator/lucene.net/trunk/src/contrib/Analyzers/Payloads/TokenOffsetPayloadTokenFilter.cs
    incubator/lucene.net/trunk/src/contrib/Analyzers/Payloads/TypeAsPayloadTokenFilter.cs
    incubator/lucene.net/trunk/src/contrib/Analyzers/Query/QueryAutoStopWordAnalyzer.cs
    incubator/lucene.net/trunk/src/contrib/Analyzers/Ru/RussianAnalyzer.cs
    incubator/lucene.net/trunk/src/contrib/Analyzers/Shingle/ShingleAnalyzerWrapper.cs
    incubator/lucene.net/trunk/src/contrib/Analyzers/Shingle/ShingleFilter.cs
    incubator/lucene.net/trunk/src/contrib/Analyzers/Shingle/ShingleMatrixFilter.cs
    incubator/lucene.net/trunk/src/contrib/Analyzers/Sinks/TokenTypeSinkFilter.cs
    incubator/lucene.net/trunk/src/contrib/Analyzers/Th/ThaiAnalyzer.cs
    incubator/lucene.net/trunk/src/contrib/Core/Index/TermVectorEnumerator.cs
    incubator/lucene.net/trunk/src/contrib/FastVectorHighlighter/FieldTermStack.cs
    incubator/lucene.net/trunk/src/contrib/FastVectorHighlighter/VectorHighlightMapper.cs
    incubator/lucene.net/trunk/src/contrib/Highlighter/QueryTermExtractor.cs
    incubator/lucene.net/trunk/src/contrib/Highlighter/SimpleFragmenter.cs
    incubator/lucene.net/trunk/src/contrib/Highlighter/TokenGroup.cs
    incubator/lucene.net/trunk/src/contrib/Highlighter/TokenSources.cs
    incubator/lucene.net/trunk/src/contrib/Queries/FuzzyLikeThisQuery.cs
    incubator/lucene.net/trunk/src/contrib/Queries/Similar/MoreLikeThis.cs
    incubator/lucene.net/trunk/src/contrib/Similarity/Similar/MoreLikeThis.cs
    incubator/lucene.net/trunk/src/contrib/Snowball/Lucene.Net/Analysis/Snowball/SnowballAnalyzer.cs
    incubator/lucene.net/trunk/src/contrib/SpellChecker/Spell/SpellChecker.cs
    incubator/lucene.net/trunk/src/core/Analysis/Analyzer.cs
    incubator/lucene.net/trunk/src/core/Analysis/KeywordAnalyzer.cs
    incubator/lucene.net/trunk/src/core/Analysis/NumericTokenStream.cs
    incubator/lucene.net/trunk/src/core/Analysis/PerFieldAnalyzerWrapper.cs
    incubator/lucene.net/trunk/src/core/Analysis/PorterStemFilter.cs
    incubator/lucene.net/trunk/src/core/Analysis/PorterStemmer.cs
    incubator/lucene.net/trunk/src/core/Analysis/SimpleAnalyzer.cs
    incubator/lucene.net/trunk/src/core/Analysis/Standard/StandardAnalyzer.cs
    incubator/lucene.net/trunk/src/core/Analysis/Standard/StandardFilter.cs
    incubator/lucene.net/trunk/src/core/Analysis/Standard/StandardTokenizer.cs
    incubator/lucene.net/trunk/src/core/Analysis/StopAnalyzer.cs
    incubator/lucene.net/trunk/src/core/Analysis/TeeSinkTokenFilter.cs
    incubator/lucene.net/trunk/src/core/Analysis/Token.cs
    incubator/lucene.net/trunk/src/core/Analysis/Tokenattributes/OffsetAttribute.cs
    incubator/lucene.net/trunk/src/core/Analysis/Tokenattributes/OffsetAttributeImpl.cs
    incubator/lucene.net/trunk/src/core/Analysis/Tokenattributes/TypeAttribute.cs
    incubator/lucene.net/trunk/src/core/Analysis/Tokenattributes/TypeAttributeImpl.cs
    incubator/lucene.net/trunk/src/core/Analysis/WhitespaceAnalyzer.cs
    incubator/lucene.net/trunk/src/core/Document/AbstractField.cs
    incubator/lucene.net/trunk/src/core/Document/Document.cs
    incubator/lucene.net/trunk/src/core/Document/Fieldable.cs
    incubator/lucene.net/trunk/src/core/Index/AbstractAllTermDocs.cs
    incubator/lucene.net/trunk/src/core/Index/CheckIndex.cs
    incubator/lucene.net/trunk/src/core/Index/CompoundFileReader.cs
    incubator/lucene.net/trunk/src/core/Index/CompoundFileWriter.cs
    incubator/lucene.net/trunk/src/core/Index/ConcurrentMergeScheduler.cs
    incubator/lucene.net/trunk/src/core/Index/DirectoryReader.cs
    incubator/lucene.net/trunk/src/core/Index/DocInverterPerField.cs
    incubator/lucene.net/trunk/src/core/Index/DocumentsWriter.cs
    incubator/lucene.net/trunk/src/core/Index/FieldsWriter.cs
    incubator/lucene.net/trunk/src/core/Index/FilterIndexReader.cs
    incubator/lucene.net/trunk/src/core/Index/IndexReader.cs
    incubator/lucene.net/trunk/src/core/Index/IndexWriter.cs
    incubator/lucene.net/trunk/src/core/Index/LogMergePolicy.cs
    incubator/lucene.net/trunk/src/core/Index/MultiReader.cs
    incubator/lucene.net/trunk/src/core/Index/ParallelReader.cs
    incubator/lucene.net/trunk/src/core/Index/PositionBasedTermVectorMapper.cs
    incubator/lucene.net/trunk/src/core/Index/SegmentInfo.cs
    incubator/lucene.net/trunk/src/core/Index/SegmentInfos.cs
    incubator/lucene.net/trunk/src/core/Index/SegmentMerger.cs
    incubator/lucene.net/trunk/src/core/Index/SegmentReader.cs
    incubator/lucene.net/trunk/src/core/Index/SegmentTermVector.cs
    incubator/lucene.net/trunk/src/core/Index/SortedTermVectorMapper.cs
    incubator/lucene.net/trunk/src/core/Index/TermFreqVector.cs
    incubator/lucene.net/trunk/src/core/Index/TermPositionVector.cs
    incubator/lucene.net/trunk/src/core/Index/TermVectorEntry.cs
    incubator/lucene.net/trunk/src/core/Index/TermVectorMapper.cs
    incubator/lucene.net/trunk/src/core/Index/TermVectorsReader.cs
    incubator/lucene.net/trunk/src/core/Index/TermVectorsTermsWriterPerField.cs
    incubator/lucene.net/trunk/src/core/Index/TermVectorsWriter.cs
    incubator/lucene.net/trunk/src/core/Messages/Message.cs
    incubator/lucene.net/trunk/src/core/Messages/MessageImpl.cs
    incubator/lucene.net/trunk/src/core/QueryParser/CharStream.cs
    incubator/lucene.net/trunk/src/core/QueryParser/FastCharStream.cs
    incubator/lucene.net/trunk/src/core/QueryParser/QueryParser.cs
    incubator/lucene.net/trunk/src/core/QueryParser/QueryParserTokenManager.cs
    incubator/lucene.net/trunk/src/core/Search/BooleanClause.cs
    incubator/lucene.net/trunk/src/core/Search/BooleanQuery.cs
    incubator/lucene.net/trunk/src/core/Search/BooleanScorer.cs
    incubator/lucene.net/trunk/src/core/Search/Collector.cs
    incubator/lucene.net/trunk/src/core/Search/ConstantScoreQuery.cs
    incubator/lucene.net/trunk/src/core/Search/DisjunctionMaxQuery.cs
    incubator/lucene.net/trunk/src/core/Search/Explanation.cs
    incubator/lucene.net/trunk/src/core/Search/FieldComparator.cs
    incubator/lucene.net/trunk/src/core/Search/FilterManager.cs
    incubator/lucene.net/trunk/src/core/Search/FilteredQuery.cs
    incubator/lucene.net/trunk/src/core/Search/Function/CustomScoreQuery.cs
    incubator/lucene.net/trunk/src/core/Search/Function/ValueSourceQuery.cs
    incubator/lucene.net/trunk/src/core/Search/FuzzyQuery.cs
    incubator/lucene.net/trunk/src/core/Search/IndexSearcher.cs
    incubator/lucene.net/trunk/src/core/Search/MatchAllDocsQuery.cs
    incubator/lucene.net/trunk/src/core/Search/MultiPhraseQuery.cs
    incubator/lucene.net/trunk/src/core/Search/MultiSearcher.cs
    incubator/lucene.net/trunk/src/core/Search/MultiTermQuery.cs
    incubator/lucene.net/trunk/src/core/Search/ParallelMultiSearcher.cs
    incubator/lucene.net/trunk/src/core/Search/Payloads/PayloadNearQuery.cs
    incubator/lucene.net/trunk/src/core/Search/Payloads/PayloadSpanUtil.cs
    incubator/lucene.net/trunk/src/core/Search/Payloads/PayloadTermQuery.cs
    incubator/lucene.net/trunk/src/core/Search/PhraseQuery.cs
    incubator/lucene.net/trunk/src/core/Search/PositiveScoresOnlyCollector.cs
    incubator/lucene.net/trunk/src/core/Search/Query.cs
    incubator/lucene.net/trunk/src/core/Search/QueryTermVector.cs
    incubator/lucene.net/trunk/src/core/Search/Similarity.cs
    incubator/lucene.net/trunk/src/core/Search/Spans/NearSpansOrdered.cs
    incubator/lucene.net/trunk/src/core/Search/Spans/NearSpansUnordered.cs
    incubator/lucene.net/trunk/src/core/Search/Spans/SpanFirstQuery.cs
    incubator/lucene.net/trunk/src/core/Search/Spans/SpanNotQuery.cs
    incubator/lucene.net/trunk/src/core/Search/Spans/SpanOrQuery.cs
    incubator/lucene.net/trunk/src/core/Search/Spans/SpanWeight.cs
    incubator/lucene.net/trunk/src/core/Search/Spans/Spans.cs
    incubator/lucene.net/trunk/src/core/Search/Spans/TermSpans.cs
    incubator/lucene.net/trunk/src/core/Search/TermQuery.cs
    incubator/lucene.net/trunk/src/core/Search/TermScorer.cs
    incubator/lucene.net/trunk/src/core/Search/TimeLimitingCollector.cs
    incubator/lucene.net/trunk/src/core/Search/TopDocs.cs
    incubator/lucene.net/trunk/src/core/Search/TopFieldCollector.cs
    incubator/lucene.net/trunk/src/core/Search/TopScoreDocCollector.cs
    incubator/lucene.net/trunk/src/core/Search/Weight.cs
    incubator/lucene.net/trunk/src/core/Search/WildcardQuery.cs
    incubator/lucene.net/trunk/src/core/Store/BufferedIndexInput.cs
    incubator/lucene.net/trunk/src/core/Store/Directory.cs
    incubator/lucene.net/trunk/src/core/Store/FSDirectory.cs
    incubator/lucene.net/trunk/src/core/Util/AttributeSource.cs
    incubator/lucene.net/trunk/src/core/Util/FieldCacheSanityChecker.cs
    incubator/lucene.net/trunk/src/core/Util/OpenBitSet.cs
    incubator/lucene.net/trunk/src/core/Util/ReaderUtil.cs
    incubator/lucene.net/trunk/test/contrib/Analyzers/Cn/TestChineseTokenizer.cs
    incubator/lucene.net/trunk/test/contrib/Analyzers/Payloads/NumericPayloadTokenFilterTest.cs
    incubator/lucene.net/trunk/test/contrib/Analyzers/Payloads/TokenOffsetPayloadTokenFilterTest.cs
    incubator/lucene.net/trunk/test/contrib/Analyzers/Payloads/TypeAsPayloadTokenFilterTest.cs
    incubator/lucene.net/trunk/test/contrib/Analyzers/Shingle/ShingleFilterTest.cs
    incubator/lucene.net/trunk/test/contrib/Analyzers/Shingle/TestShingleMatrixFilter.cs
    incubator/lucene.net/trunk/test/contrib/Analyzers/Sinks/TokenTypeSinkTokenizerTest.cs
    incubator/lucene.net/trunk/test/contrib/Core/Index/SegmentsGenCommitTest.cs
    incubator/lucene.net/trunk/test/contrib/Core/Index/TermVectorEnumeratorTest.cs
    incubator/lucene.net/trunk/test/contrib/Core/Util/Cache/SegmentCacheTest.cs
    incubator/lucene.net/trunk/test/contrib/Snowball/Analysis/Snowball/TestSnowball.cs
    incubator/lucene.net/trunk/test/contrib/SpellChecker/Test/TestSpellChecker.cs
    incubator/lucene.net/trunk/test/core/Analysis/BaseTokenStreamTestCase.cs
    incubator/lucene.net/trunk/test/core/Analysis/TestKeywordAnalyzer.cs
    incubator/lucene.net/trunk/test/core/Analysis/TestNumericTokenStream.cs
    incubator/lucene.net/trunk/test/core/Analysis/TestToken.cs
    incubator/lucene.net/trunk/test/core/Analysis/Tokenattributes/TestSimpleAttributeImpls.cs
    incubator/lucene.net/trunk/test/core/Document/TestBinaryDocument.cs
    incubator/lucene.net/trunk/test/core/Index/TestAddIndexesNoOptimize.cs
    incubator/lucene.net/trunk/test/core/Index/TestAtomicUpdate.cs
    incubator/lucene.net/trunk/test/core/Index/TestBackwardsCompatibility.cs
    incubator/lucene.net/trunk/test/core/Index/TestConcurrentMergeScheduler.cs
    incubator/lucene.net/trunk/test/core/Index/TestCrash.cs
    incubator/lucene.net/trunk/test/core/Index/TestDeletionPolicy.cs
    incubator/lucene.net/trunk/test/core/Index/TestDirectoryReader.cs
    incubator/lucene.net/trunk/test/core/Index/TestDoc.cs
    incubator/lucene.net/trunk/test/core/Index/TestDocumentWriter.cs
    incubator/lucene.net/trunk/test/core/Index/TestFieldsReader.cs
    incubator/lucene.net/trunk/test/core/Index/TestFilterIndexReader.cs
    incubator/lucene.net/trunk/test/core/Index/TestIndexReader.cs
    incubator/lucene.net/trunk/test/core/Index/TestIndexReaderClone.cs
    incubator/lucene.net/trunk/test/core/Index/TestIndexReaderCloneNorms.cs
    incubator/lucene.net/trunk/test/core/Index/TestIndexReaderReopen.cs
    incubator/lucene.net/trunk/test/core/Index/TestIndexWriter.cs
    incubator/lucene.net/trunk/test/core/Index/TestIndexWriterDelete.cs
    incubator/lucene.net/trunk/test/core/Index/TestIndexWriterExceptions.cs
    incubator/lucene.net/trunk/test/core/Index/TestIndexWriterMergePolicy.cs
    incubator/lucene.net/trunk/test/core/Index/TestIndexWriterMerging.cs
    incubator/lucene.net/trunk/test/core/Index/TestIndexWriterReader.cs
    incubator/lucene.net/trunk/test/core/Index/TestIsCurrent.cs
    incubator/lucene.net/trunk/test/core/Index/TestNRTReaderWithThreads.cs
    incubator/lucene.net/trunk/test/core/Index/TestNorms.cs
    incubator/lucene.net/trunk/test/core/Index/TestOmitTf.cs
    incubator/lucene.net/trunk/test/core/Index/TestParallelReader.cs
    incubator/lucene.net/trunk/test/core/Index/TestRollback.cs
    incubator/lucene.net/trunk/test/core/Index/TestSegmentMerger.cs
    incubator/lucene.net/trunk/test/core/Index/TestSegmentReader.cs
    incubator/lucene.net/trunk/test/core/Index/TestStressIndexing2.cs
    incubator/lucene.net/trunk/test/core/Index/TestTermVectorsReader.cs
    incubator/lucene.net/trunk/test/core/Index/TestTermdocPerf.cs
    incubator/lucene.net/trunk/test/core/Index/TestThreadedOptimize.cs
    incubator/lucene.net/trunk/test/core/Index/TestTransactionRollback.cs
    incubator/lucene.net/trunk/test/core/Index/TestTransactions.cs
    incubator/lucene.net/trunk/test/core/QueryParser/TestMultiAnalyzer.cs
    incubator/lucene.net/trunk/test/core/QueryParser/TestQueryParser.cs
    incubator/lucene.net/trunk/test/core/Search/CheckHits.cs
    incubator/lucene.net/trunk/test/core/Search/Function/TestCustomScoreQuery.cs
    incubator/lucene.net/trunk/test/core/Search/Function/TestFieldScoreQuery.cs
    incubator/lucene.net/trunk/test/core/Search/Function/TestOrdValues.cs
    incubator/lucene.net/trunk/test/core/Search/JustCompileSearch.cs
    incubator/lucene.net/trunk/test/core/Search/QueryUtils.cs
    incubator/lucene.net/trunk/test/core/Search/Spans/JustCompileSearchSpans.cs
    incubator/lucene.net/trunk/test/core/Search/Spans/TestBasics.cs
    incubator/lucene.net/trunk/test/core/Search/Spans/TestNearSpansOrdered.cs
    incubator/lucene.net/trunk/test/core/Search/Spans/TestPayloadSpans.cs
    incubator/lucene.net/trunk/test/core/Search/Spans/TestSpansAdvanced.cs
    incubator/lucene.net/trunk/test/core/Search/Spans/TestSpansAdvanced2.cs
    incubator/lucene.net/trunk/test/core/Search/TestBoolean2.cs
    incubator/lucene.net/trunk/test/core/Search/TestBooleanQuery.cs
    incubator/lucene.net/trunk/test/core/Search/TestDocBoost.cs
    incubator/lucene.net/trunk/test/core/Search/TestElevationComparator.cs
    incubator/lucene.net/trunk/test/core/Search/TestFieldCacheRangeFilter.cs
    incubator/lucene.net/trunk/test/core/Search/TestFieldCacheTermsFilter.cs
    incubator/lucene.net/trunk/test/core/Search/TestFuzzyQuery.cs
    incubator/lucene.net/trunk/test/core/Search/TestMultiTermConstantScore.cs
    incubator/lucene.net/trunk/test/core/Search/TestMultiThreadTermVectors.cs
    incubator/lucene.net/trunk/test/core/Search/TestNumericRangeQuery32.cs
    incubator/lucene.net/trunk/test/core/Search/TestNumericRangeQuery64.cs
    incubator/lucene.net/trunk/test/core/Search/TestPositionIncrement.cs
    incubator/lucene.net/trunk/test/core/Search/TestPositiveScoresOnlyCollector.cs
    incubator/lucene.net/trunk/test/core/Search/TestScoreCachingWrappingScorer.cs
    incubator/lucene.net/trunk/test/core/Search/TestScorerPerf.cs
    incubator/lucene.net/trunk/test/core/Search/TestSetNorm.cs
    incubator/lucene.net/trunk/test/core/Search/TestSimilarity.cs
    incubator/lucene.net/trunk/test/core/Search/TestSort.cs
    incubator/lucene.net/trunk/test/core/Search/TestTermRangeFilter.cs
    incubator/lucene.net/trunk/test/core/Search/TestTermRangeQuery.cs
    incubator/lucene.net/trunk/test/core/Search/TestTermScorer.cs
    incubator/lucene.net/trunk/test/core/Search/TestTermVectors.cs
    incubator/lucene.net/trunk/test/core/Search/TestTimeLimitingCollector.cs
    incubator/lucene.net/trunk/test/core/Search/TestTopDocsCollector.cs
    incubator/lucene.net/trunk/test/core/Search/TestTopScoreDocCollector.cs
    incubator/lucene.net/trunk/test/core/Store/TestRAMDirectory.cs
    incubator/lucene.net/trunk/test/core/Support/TestOldPatches.cs
    incubator/lucene.net/trunk/test/core/Util/TestAttributeSource.cs

Modified: incubator/lucene.net/trunk/src/contrib/Analyzers/AR/ArabicAnalyzer.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/contrib/Analyzers/AR/ArabicAnalyzer.cs?rev=1303294&r1=1303293&r2=1303294&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/contrib/Analyzers/AR/ArabicAnalyzer.cs (original)
+++ incubator/lucene.net/trunk/src/contrib/Analyzers/AR/ArabicAnalyzer.cs Wed Mar 21 06:04:26 2012
@@ -183,7 +183,7 @@ namespace Lucene.Net.Analysis.AR
          */
         public override TokenStream ReusableTokenStream(string fieldName, TextReader reader)
         {
-            SavedStreams streams = (SavedStreams)GetPreviousTokenStream();
+            SavedStreams streams = (SavedStreams)PreviousTokenStream;
             if (streams == null)
             {
                 streams = new SavedStreams();
@@ -194,7 +194,7 @@ namespace Lucene.Net.Analysis.AR
                                                 streams.Result, stoptable);
                 streams.Result = new ArabicNormalizationFilter(streams.Result);
                 streams.Result = new ArabicStemFilter(streams.Result);
-                SetPreviousTokenStream(streams);
+                PreviousTokenStream = streams;
             }
             else
             {

Modified: incubator/lucene.net/trunk/src/contrib/Analyzers/BR/BrazilianAnalyzer.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/contrib/Analyzers/BR/BrazilianAnalyzer.cs?rev=1303294&r1=1303293&r2=1303294&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/contrib/Analyzers/BR/BrazilianAnalyzer.cs (original)
+++ incubator/lucene.net/trunk/src/contrib/Analyzers/BR/BrazilianAnalyzer.cs Wed Mar 21 06:04:26 2012
@@ -167,7 +167,7 @@ namespace Lucene.Net.Analysis.BR
         public void SetStemExclusionTable(params string[] exclusionlist)
         {
             excltable = StopFilter.MakeStopSet(exclusionlist);
-            SetPreviousTokenStream(null); // force a new stemmer to be created
+            PreviousTokenStream = null; // force a new stemmer to be created
         }
 
         /**
@@ -178,7 +178,7 @@ namespace Lucene.Net.Analysis.BR
         public void SetStemExclusionTable(IDictionary<string, string> exclusionlist)
         {
             excltable = new HashSet<string>(exclusionlist.Keys);
-            SetPreviousTokenStream(null); // force a new stemmer to be created
+            PreviousTokenStream = null; // force a new stemmer to be created
         }
 
         /**
@@ -189,7 +189,7 @@ namespace Lucene.Net.Analysis.BR
         public void SetStemExclusionTable(FileInfo exclusionlist)
         {
             excltable = WordlistLoader.GetWordSet(exclusionlist);
-            SetPreviousTokenStream(null); // force a new stemmer to be created
+            PreviousTokenStream = null; // force a new stemmer to be created
         }
 
         /**
@@ -227,7 +227,7 @@ namespace Lucene.Net.Analysis.BR
 
         public override TokenStream ReusableTokenStream(String fieldName, TextReader reader)
         {
-            SavedStreams streams = (SavedStreams) GetPreviousTokenStream();
+            SavedStreams streams = (SavedStreams) PreviousTokenStream;
             if (streams == null)
             {
                 streams = new SavedStreams();
@@ -237,7 +237,7 @@ namespace Lucene.Net.Analysis.BR
                 streams.result = new StopFilter(StopFilter.GetEnablePositionIncrementsVersionDefault(matchVersion),
                                                 streams.result, stoptable);
                 streams.result = new BrazilianStemFilter(streams.result, excltable);
-                SetPreviousTokenStream(streams);
+                PreviousTokenStream = streams;
             }
             else
             {

Modified: incubator/lucene.net/trunk/src/contrib/Analyzers/CJK/CJKAnalyzer.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/contrib/Analyzers/CJK/CJKAnalyzer.cs?rev=1303294&r1=1303293&r2=1303294&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/contrib/Analyzers/CJK/CJKAnalyzer.cs (original)
+++ incubator/lucene.net/trunk/src/contrib/Analyzers/CJK/CJKAnalyzer.cs Wed Mar 21 06:04:26 2012
@@ -135,14 +135,14 @@ namespace Lucene.Net.Analysis.CJK
         public override sealed TokenStream ReusableTokenStream(String fieldName, TextReader reader)
         {
             /* tokenStream() is final, no back compat issue */
-            SavedStreams streams = (SavedStreams) GetPreviousTokenStream();
+            SavedStreams streams = (SavedStreams) PreviousTokenStream;
             if (streams == null)
             {
                 streams = new SavedStreams();
                 streams.source = new CJKTokenizer(reader);
                 streams.result = new StopFilter(StopFilter.GetEnablePositionIncrementsVersionDefault(matchVersion),
                                                 streams.source, stopTable);
-                SetPreviousTokenStream(streams);
+                PreviousTokenStream = streams;
             }
             else
             {

Modified: incubator/lucene.net/trunk/src/contrib/Analyzers/CJK/CJKTokenizer.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/contrib/Analyzers/CJK/CJKTokenizer.cs?rev=1303294&r1=1303293&r2=1303294&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/contrib/Analyzers/CJK/CJKTokenizer.cs (original)
+++ incubator/lucene.net/trunk/src/contrib/Analyzers/CJK/CJKTokenizer.cs Wed Mar 21 06:04:26 2012
@@ -361,7 +361,7 @@ namespace Lucene.Net.Analysis.CJK
                 {
                     termAtt.SetTermBuffer(buffer, 0, length);
                     offsetAtt.SetOffset(CorrectOffset(start), CorrectOffset(start + length));
-                    typeAtt.SetType(TOKEN_TYPE_NAMES[tokenType]);
+                    typeAtt.Type = TOKEN_TYPE_NAMES[tokenType];
                     return true;
                 }
                 else if (dataLen == 0)

Modified: incubator/lucene.net/trunk/src/contrib/Analyzers/Cn/ChineseAnalyzer.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/contrib/Analyzers/Cn/ChineseAnalyzer.cs?rev=1303294&r1=1303293&r2=1303294&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/contrib/Analyzers/Cn/ChineseAnalyzer.cs (original)
+++ incubator/lucene.net/trunk/src/contrib/Analyzers/Cn/ChineseAnalyzer.cs Wed Mar 21 06:04:26 2012
@@ -67,13 +67,13 @@ namespace Lucene.Net.Analysis.Cn
         public override TokenStream ReusableTokenStream(String fieldName, TextReader reader)
         {
             /* tokenStream() is final, no back compat issue */
-            SavedStreams streams = (SavedStreams) GetPreviousTokenStream();
+            SavedStreams streams = (SavedStreams) PreviousTokenStream;
             if (streams == null)
             {
                 streams = new SavedStreams();
                 streams.source = new ChineseTokenizer(reader);
                 streams.result = new ChineseFilter(streams.source);
-                SetPreviousTokenStream(streams);
+                PreviousTokenStream = streams;
             }
             else
             {

Modified: incubator/lucene.net/trunk/src/contrib/Analyzers/Compound/CompoundWordTokenFilterBase.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/contrib/Analyzers/Compound/CompoundWordTokenFilterBase.cs?rev=1303294&r1=1303293&r2=1303294&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/contrib/Analyzers/Compound/CompoundWordTokenFilterBase.cs (original)
+++ incubator/lucene.net/trunk/src/contrib/Analyzers/Compound/CompoundWordTokenFilterBase.cs Wed Mar 21 06:04:26 2012
@@ -136,8 +136,8 @@ namespace Lucene.Net.Analysis.Compound
             ClearAttributes();
             termAtt.SetTermBuffer(token.TermBuffer(), 0, token.TermLength());
             flagsAtt.Flags = token.Flags;
-            typeAtt.SetType(token.Type());
-            offsetAtt.SetOffset(token.StartOffset(), token.EndOffset());
+            typeAtt.Type = token.Type;
+            offsetAtt.SetOffset(token.StartOffset, token.EndOffset);
             posIncAtt.PositionIncrement = token.PositionIncrement;
             payloadAtt.Payload = token.Payload;
         }
@@ -155,10 +155,10 @@ namespace Lucene.Net.Analysis.Compound
                 return false;
 
             wrapper.SetTermBuffer(termAtt.TermBuffer(), 0, termAtt.TermLength());
-            wrapper.SetStartOffset(offsetAtt.StartOffset());
-            wrapper.SetEndOffset(offsetAtt.EndOffset());
+            wrapper.StartOffset = offsetAtt.StartOffset;
+            wrapper.EndOffset = offsetAtt.EndOffset;
             wrapper.Flags = flagsAtt.Flags;
-            wrapper.SetType(typeAtt.Type());
+            wrapper.Type = typeAtt.Type;
             wrapper.PositionIncrement = posIncAtt.PositionIncrement;
             wrapper.Payload = payloadAtt.Payload;
 
@@ -200,7 +200,7 @@ namespace Lucene.Net.Analysis.Compound
         protected Token CreateToken(int offset, int length,
             Token prototype)
         {
-            int newStart = prototype.StartOffset() + offset;
+            int newStart = prototype.StartOffset + offset;
             Token t = prototype.Clone(prototype.TermBuffer(), offset, length, newStart, newStart + length);
             t.PositionIncrement = 0;
             return t;

Modified: incubator/lucene.net/trunk/src/contrib/Analyzers/Cz/CzechAnalyzer.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/contrib/Analyzers/Cz/CzechAnalyzer.cs?rev=1303294&r1=1303293&r2=1303294&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/contrib/Analyzers/Cz/CzechAnalyzer.cs (original)
+++ incubator/lucene.net/trunk/src/contrib/Analyzers/Cz/CzechAnalyzer.cs Wed Mar 21 06:04:26 2012
@@ -149,7 +149,7 @@ public sealed class CzechAnalyzer : Anal
      *             and {@link #CzechAnalyzer(Version, Set)} instead
      */
     public void LoadStopWords( Stream wordfile, System.Text.Encoding encoding ) {
-        SetPreviousTokenStream(null); // force a new stopfilter to be created
+        PreviousTokenStream = null; // force a new stopfilter to be created
         if ( wordfile == null )
         {
             stoptable = new HashSet<string>();
@@ -202,7 +202,7 @@ public sealed class CzechAnalyzer : Anal
      */
 	public override TokenStream ReusableTokenStream(String fieldName, TextReader reader)
     {
-      SavedStreams streams = (SavedStreams) GetPreviousTokenStream();
+      SavedStreams streams = (SavedStreams) PreviousTokenStream;
       if (streams == null) {
         streams = new SavedStreams();
         streams.source = new StandardTokenizer(matchVersion, reader);
@@ -210,7 +210,7 @@ public sealed class CzechAnalyzer : Anal
         streams.result = new LowerCaseFilter(streams.result);
         streams.result = new StopFilter(StopFilter.GetEnablePositionIncrementsVersionDefault(matchVersion),
                                         streams.result, stoptable);
-        SetPreviousTokenStream(streams);
+        PreviousTokenStream = streams;
       } else {
         streams.source.Reset(reader);
       }

Modified: incubator/lucene.net/trunk/src/contrib/Analyzers/De/GermanAnalyzer.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/contrib/Analyzers/De/GermanAnalyzer.cs?rev=1303294&r1=1303293&r2=1303294&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/contrib/Analyzers/De/GermanAnalyzer.cs (original)
+++ incubator/lucene.net/trunk/src/contrib/Analyzers/De/GermanAnalyzer.cs Wed Mar 21 06:04:26 2012
@@ -1,4 +1,4 @@
-/*
+/*
  *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -167,7 +167,7 @@ namespace Lucene.Net.Analysis.De
         public void SetStemExclusionTable(String[] exclusionlist)
         {
             exclusionSet = StopFilter.MakeStopSet(exclusionlist);
-            SetPreviousTokenStream(null);
+            PreviousTokenStream = null;
         }
 
         /// <summary>
@@ -177,7 +177,7 @@ namespace Lucene.Net.Analysis.De
         public void SetStemExclusionTable(IDictionary<string, string> exclusionlist)
         {
             exclusionSet = new HashSet<string>(exclusionlist.Keys);
-            SetPreviousTokenStream(null);
+            PreviousTokenStream = null;
         }
 
         /// <summary>
@@ -187,7 +187,7 @@ namespace Lucene.Net.Analysis.De
         public void SetStemExclusionTable(FileInfo exclusionlist)
         {
             exclusionSet = WordlistLoader.GetWordSet(exclusionlist);
-            SetPreviousTokenStream(null);
+            PreviousTokenStream = null;
         }
 
         /// <summary>

Modified: incubator/lucene.net/trunk/src/contrib/Analyzers/El/GreekAnalyzer.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/contrib/Analyzers/El/GreekAnalyzer.cs?rev=1303294&r1=1303293&r2=1303294&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/contrib/Analyzers/El/GreekAnalyzer.cs (original)
+++ incubator/lucene.net/trunk/src/contrib/Analyzers/El/GreekAnalyzer.cs Wed Mar 21 06:04:26 2012
@@ -1,4 +1,4 @@
-/*
+/*
  *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -156,7 +156,7 @@ namespace Lucene.Net.Analyzers.El
          */
         public override TokenStream ReusableTokenStream(String fieldName, TextReader reader)
         {
-            SavedStreams streams = (SavedStreams)GetPreviousTokenStream();
+            SavedStreams streams = (SavedStreams)PreviousTokenStream;
             if (streams == null)
             {
                 streams = new SavedStreams();
@@ -164,7 +164,7 @@ namespace Lucene.Net.Analyzers.El
                 streams.result = new GreekLowerCaseFilter(streams.source);
                 streams.result = new StopFilter(StopFilter.GetEnablePositionIncrementsVersionDefault(matchVersion),
                                                 streams.result, stopSet);
-                SetPreviousTokenStream(streams);
+                PreviousTokenStream = streams;
             }
             else
             {

Modified: incubator/lucene.net/trunk/src/contrib/Analyzers/Fa/PersianAnalyzer.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/contrib/Analyzers/Fa/PersianAnalyzer.cs?rev=1303294&r1=1303293&r2=1303294&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/contrib/Analyzers/Fa/PersianAnalyzer.cs (original)
+++ incubator/lucene.net/trunk/src/contrib/Analyzers/Fa/PersianAnalyzer.cs Wed Mar 21 06:04:26 2012
@@ -209,7 +209,7 @@ namespace Lucene.Net.Analyzers.Fa
          */
         public override TokenStream ReusableTokenStream(String fieldName, TextReader reader)
         {
-            SavedStreams streams = (SavedStreams)GetPreviousTokenStream();
+            SavedStreams streams = (SavedStreams)PreviousTokenStream;
             if (streams == null)
             {
                 streams = new SavedStreams();
@@ -224,7 +224,7 @@ namespace Lucene.Net.Analyzers.Fa
                  */
                 streams.result = new StopFilter(StopFilter.GetEnablePositionIncrementsVersionDefault(matchVersion),
                                                 streams.result, stoptable);
-                SetPreviousTokenStream(streams);
+                PreviousTokenStream = streams;
             }
             else
             {

Modified: incubator/lucene.net/trunk/src/contrib/Analyzers/Fr/FrenchAnalyzer.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/contrib/Analyzers/Fr/FrenchAnalyzer.cs?rev=1303294&r1=1303293&r2=1303294&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/contrib/Analyzers/Fr/FrenchAnalyzer.cs (original)
+++ incubator/lucene.net/trunk/src/contrib/Analyzers/Fr/FrenchAnalyzer.cs Wed Mar 21 06:04:26 2012
@@ -1,4 +1,4 @@
-/*
+/*
  *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -179,7 +179,7 @@ namespace Lucene.Net.Analysis.Fr
         public void SetStemExclusionTable(params string[] exclusionlist)
         {
             excltable = StopFilter.MakeStopSet(exclusionlist);
-            SetPreviousTokenStream(null); // force a new stemmer to be created
+            PreviousTokenStream = null; // force a new stemmer to be created
         }
 
         /**
@@ -189,7 +189,7 @@ namespace Lucene.Net.Analysis.Fr
         public void SetStemExclusionTable(IDictionary<string, string> exclusionlist)
         {
             excltable = new HashSet<string>(exclusionlist.Keys);
-            SetPreviousTokenStream(null); // force a new stemmer to be created
+            PreviousTokenStream = null; // force a new stemmer to be created
         }
 
         /**
@@ -200,7 +200,7 @@ namespace Lucene.Net.Analysis.Fr
         public void SetStemExclusionTable(FileInfo exclusionlist)
         {
             excltable = new HashSet<string>(WordlistLoader.GetWordSet(exclusionlist));
-            SetPreviousTokenStream(null); // force a new stemmer to be created
+            PreviousTokenStream = null; // force a new stemmer to be created
         }
 
         /**
@@ -239,7 +239,7 @@ namespace Lucene.Net.Analysis.Fr
          */
         public override TokenStream ReusableTokenStream(String fieldName, TextReader reader)
         {
-            SavedStreams streams = (SavedStreams)GetPreviousTokenStream();
+            SavedStreams streams = (SavedStreams)PreviousTokenStream;
             if (streams == null)
             {
                 streams = new SavedStreams();
@@ -250,7 +250,7 @@ namespace Lucene.Net.Analysis.Fr
                 streams.result = new FrenchStemFilter(streams.result, excltable);
                 // Convert to lowercase after stemming!
                 streams.result = new LowerCaseFilter(streams.result);
-                SetPreviousTokenStream(streams);
+                PreviousTokenStream = streams;
             }
             else
             {

Modified: incubator/lucene.net/trunk/src/contrib/Analyzers/Miscellaneous/PrefixAndSuffixAwareTokenFilter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/contrib/Analyzers/Miscellaneous/PrefixAndSuffixAwareTokenFilter.cs?rev=1303294&r1=1303293&r2=1303294&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/contrib/Analyzers/Miscellaneous/PrefixAndSuffixAwareTokenFilter.cs (original)
+++ incubator/lucene.net/trunk/src/contrib/Analyzers/Miscellaneous/PrefixAndSuffixAwareTokenFilter.cs Wed Mar 21 06:04:26 2012
@@ -46,15 +46,15 @@ namespace Lucene.Net.Analyzers.Miscellan
 
         public Token UpdateInputToken(Token inputToken, Token lastPrefixToken)
         {
-            inputToken.SetStartOffset(lastPrefixToken.EndOffset() + inputToken.StartOffset());
-            inputToken.SetEndOffset(lastPrefixToken.EndOffset() + inputToken.EndOffset());
+            inputToken.StartOffset = lastPrefixToken.EndOffset + inputToken.StartOffset;
+            inputToken.EndOffset = lastPrefixToken.EndOffset + inputToken.EndOffset;
             return inputToken;
         }
 
         public Token UpdateSuffixToken(Token suffixToken, Token lastInputToken)
         {
-            suffixToken.SetStartOffset(lastInputToken.EndOffset() + suffixToken.StartOffset());
-            suffixToken.SetEndOffset(lastInputToken.EndOffset() + suffixToken.EndOffset());
+            suffixToken.StartOffset = lastInputToken.EndOffset + suffixToken.StartOffset;
+            suffixToken.EndOffset = lastInputToken.EndOffset + suffixToken.EndOffset;
             return suffixToken;
         }
 

Modified: incubator/lucene.net/trunk/src/contrib/Analyzers/Miscellaneous/PrefixAwareTokenStream.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/contrib/Analyzers/Miscellaneous/PrefixAwareTokenStream.cs?rev=1303294&r1=1303293&r2=1303294&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/contrib/Analyzers/Miscellaneous/PrefixAwareTokenStream.cs (original)
+++ incubator/lucene.net/trunk/src/contrib/Analyzers/Miscellaneous/PrefixAwareTokenStream.cs Wed Mar 21 06:04:26 2012
@@ -121,8 +121,8 @@ namespace Lucene.Net.Analyzers.Miscellan
             _termAtt.SetTermBuffer(token.TermBuffer(), 0, token.TermLength());
             _posIncrAtt.PositionIncrement = token.PositionIncrement;
             _flagsAtt.Flags =token.Flags;
-            _offsetAtt.SetOffset(token.StartOffset(), token.EndOffset());
-            _typeAtt.SetType(token.Type());
+            _offsetAtt.SetOffset(token.StartOffset, token.EndOffset);
+            _typeAtt.Type = token.Type;
             _payloadAtt.Payload = token.Payload;
         }
 
@@ -132,8 +132,8 @@ namespace Lucene.Net.Analyzers.Miscellan
             token.SetTermBuffer(_pTermAtt.TermBuffer(), 0, _pTermAtt.TermLength());
             token.PositionIncrement = _pPosIncrAtt.PositionIncrement;
             token.Flags = _pFlagsAtt.Flags;
-            token.SetOffset(_pOffsetAtt.StartOffset(), _pOffsetAtt.EndOffset());
-            token.SetType(_pTypeAtt.Type());
+            token.SetOffset(_pOffsetAtt.StartOffset, _pOffsetAtt.EndOffset);
+            token.Type = _pTypeAtt.Type;
             token.Payload = _pPayloadAtt.Payload;
             return token;
         }
@@ -144,8 +144,8 @@ namespace Lucene.Net.Analyzers.Miscellan
             token.SetTermBuffer(_termAtt.TermBuffer(), 0, _termAtt.TermLength());
             token.PositionIncrement = _posIncrAtt.PositionIncrement;
             token.Flags = _flagsAtt.Flags;
-            token.SetOffset(_offsetAtt.StartOffset(), _offsetAtt.EndOffset());
-            token.SetType(_typeAtt.Type());
+            token.SetOffset(_offsetAtt.StartOffset, _offsetAtt.EndOffset);
+            token.Type = _typeAtt.Type;
             token.Payload = _payloadAtt.Payload;
             return token;
         }
@@ -158,8 +158,8 @@ namespace Lucene.Net.Analyzers.Miscellan
         /// <returns>consumer token</returns>
         public virtual Token UpdateSuffixToken(Token suffixToken, Token lastPrefixToken)
         {
-            suffixToken.SetStartOffset(lastPrefixToken.EndOffset() + suffixToken.StartOffset());
-            suffixToken.SetEndOffset(lastPrefixToken.EndOffset() + suffixToken.EndOffset());
+            suffixToken.StartOffset = lastPrefixToken.EndOffset + suffixToken.StartOffset;
+            suffixToken.EndOffset = lastPrefixToken.EndOffset + suffixToken.EndOffset;
             return suffixToken;
         }
 

Modified: incubator/lucene.net/trunk/src/contrib/Analyzers/NGram/EdgeNGramTokenFilter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/contrib/Analyzers/NGram/EdgeNGramTokenFilter.cs?rev=1303294&r1=1303293&r2=1303294&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/contrib/Analyzers/NGram/EdgeNGramTokenFilter.cs (original)
+++ incubator/lucene.net/trunk/src/contrib/Analyzers/NGram/EdgeNGramTokenFilter.cs Wed Mar 21 06:04:26 2012
@@ -158,7 +158,7 @@ namespace Lucene.Net.Analysis.NGram
                         curTermBuffer = (char[])termAtt.TermBuffer().Clone();
                         curTermLength = termAtt.TermLength();
                         curGramSize = minGram;
-                        tokStart = offsetAtt.StartOffset();
+                        tokStart = offsetAtt.StartOffset;
                     }
                 }
                 if (curGramSize <= maxGram)

Modified: incubator/lucene.net/trunk/src/contrib/Analyzers/NGram/NGramTokenFilter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/contrib/Analyzers/NGram/NGramTokenFilter.cs?rev=1303294&r1=1303293&r2=1303294&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/contrib/Analyzers/NGram/NGramTokenFilter.cs (original)
+++ incubator/lucene.net/trunk/src/contrib/Analyzers/NGram/NGramTokenFilter.cs Wed Mar 21 06:04:26 2012
@@ -95,7 +95,7 @@ namespace Lucene.Net.Analysis.NGram
                         curTermLength = termAtt.TermLength();
                         curGramSize = minGram;
                         curPos = 0;
-                        tokStart = offsetAtt.StartOffset();
+                        tokStart = offsetAtt.StartOffset;
                     }
                 }
                 while (curGramSize <= maxGram)

Modified: incubator/lucene.net/trunk/src/contrib/Analyzers/Nl/DutchAnalyzer.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/contrib/Analyzers/Nl/DutchAnalyzer.cs?rev=1303294&r1=1303293&r2=1303294&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/contrib/Analyzers/Nl/DutchAnalyzer.cs (original)
+++ incubator/lucene.net/trunk/src/contrib/Analyzers/Nl/DutchAnalyzer.cs Wed Mar 21 06:04:26 2012
@@ -175,7 +175,7 @@ namespace Lucene.Net.Analysis.Nl
         public void SetStemExclusionTable(params string[] exclusionlist)
         {
             excltable = StopFilter.MakeStopSet(exclusionlist);
-            SetPreviousTokenStream(null); // force a new stemmer to be created
+            PreviousTokenStream = null; // force a new stemmer to be created
         }
 
         /**
@@ -185,7 +185,7 @@ namespace Lucene.Net.Analysis.Nl
         public void SetStemExclusionTable(HashSet<string> exclusionlist)
         {
             excltable = exclusionlist;
-            SetPreviousTokenStream(null); // force a new stemmer to be created
+            PreviousTokenStream = null; // force a new stemmer to be created
         }
 
         /**
@@ -197,7 +197,7 @@ namespace Lucene.Net.Analysis.Nl
             try
             {
                 excltable = WordlistLoader.GetWordSet(exclusionlist);
-                SetPreviousTokenStream(null); // force a new stemmer to be created
+                PreviousTokenStream = null; // force a new stemmer to be created
             }
             catch (IOException e)
             {
@@ -216,7 +216,7 @@ namespace Lucene.Net.Analysis.Nl
             try
             {
                 stemdict = WordlistLoader.GetStemDict(stemdictFile);
-                SetPreviousTokenStream(null); // force a new stemmer to be created
+                PreviousTokenStream = null; // force a new stemmer to be created
             }
             catch (IOException e)
             {
@@ -267,7 +267,7 @@ namespace Lucene.Net.Analysis.Nl
                 return TokenStream(fieldName, reader);
             }
 
-            SavedStreams streams = (SavedStreams)GetPreviousTokenStream();
+            SavedStreams streams = (SavedStreams)PreviousTokenStream;
             if (streams == null)
             {
                 streams = new SavedStreams();
@@ -276,7 +276,7 @@ namespace Lucene.Net.Analysis.Nl
                 streams.result = new StopFilter(StopFilter.GetEnablePositionIncrementsVersionDefault(matchVersion),
                                                 streams.result, stoptable);
                 streams.result = new DutchStemFilter(streams.result, excltable, stemdict);
-                SetPreviousTokenStream(streams);
+                PreviousTokenStream = streams;
             }
             else
             {

Modified: incubator/lucene.net/trunk/src/contrib/Analyzers/Payloads/NumericPayloadTokenFilter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/contrib/Analyzers/Payloads/NumericPayloadTokenFilter.cs?rev=1303294&r1=1303293&r2=1303294&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/contrib/Analyzers/Payloads/NumericPayloadTokenFilter.cs (original)
+++ incubator/lucene.net/trunk/src/contrib/Analyzers/Payloads/NumericPayloadTokenFilter.cs Wed Mar 21 06:04:26 2012
@@ -54,7 +54,7 @@ namespace Lucene.Net.Analyzers.Payloads
         {
             if (input.IncrementToken())
             {
-                if (typeAtt.Type().Equals(typeMatch))
+                if (typeAtt.Type.Equals(typeMatch))
                     payloadAtt.Payload = thePayload;
                 return true;
             }

Modified: incubator/lucene.net/trunk/src/contrib/Analyzers/Payloads/TokenOffsetPayloadTokenFilter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/contrib/Analyzers/Payloads/TokenOffsetPayloadTokenFilter.cs?rev=1303294&r1=1303293&r2=1303294&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/contrib/Analyzers/Payloads/TokenOffsetPayloadTokenFilter.cs (original)
+++ incubator/lucene.net/trunk/src/contrib/Analyzers/Payloads/TokenOffsetPayloadTokenFilter.cs Wed Mar 21 06:04:26 2012
@@ -51,8 +51,8 @@ namespace Lucene.Net.Analyzers.Payloads
             if (input.IncrementToken())
             {
                 byte[] data = new byte[8];
-                PayloadHelper.EncodeInt(offsetAtt.StartOffset(), data, 0);
-                PayloadHelper.EncodeInt(offsetAtt.EndOffset(), data, 4);
+                PayloadHelper.EncodeInt(offsetAtt.StartOffset, data, 0);
+                PayloadHelper.EncodeInt(offsetAtt.EndOffset, data, 4);
                 Payload payload = new Payload(data);
                 payAtt.Payload = payload;
                 return true;

Modified: incubator/lucene.net/trunk/src/contrib/Analyzers/Payloads/TypeAsPayloadTokenFilter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/contrib/Analyzers/Payloads/TypeAsPayloadTokenFilter.cs?rev=1303294&r1=1303293&r2=1303294&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/contrib/Analyzers/Payloads/TypeAsPayloadTokenFilter.cs (original)
+++ incubator/lucene.net/trunk/src/contrib/Analyzers/Payloads/TypeAsPayloadTokenFilter.cs Wed Mar 21 06:04:26 2012
@@ -49,7 +49,7 @@ namespace Lucene.Net.Analyzers.Payloads
         {
             if (input.IncrementToken())
             {
-                String type = typeAtt.Type();
+                String type = typeAtt.Type;
                 if (type != null && type.Equals("") == false)
                 {
                     payloadAtt.Payload = new Payload(Encoding.UTF8.GetBytes(type));

Modified: incubator/lucene.net/trunk/src/contrib/Analyzers/Query/QueryAutoStopWordAnalyzer.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/contrib/Analyzers/Query/QueryAutoStopWordAnalyzer.cs?rev=1303294&r1=1303293&r2=1303294&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/contrib/Analyzers/Query/QueryAutoStopWordAnalyzer.cs (original)
+++ incubator/lucene.net/trunk/src/contrib/Analyzers/Query/QueryAutoStopWordAnalyzer.cs Wed Mar 21 06:04:26 2012
@@ -133,7 +133,7 @@ public class QueryAutoStopWordAnalyzer :
    */
   public int AddStopWords(IndexReader reader, String fieldName, float maxPercentDocs) 
   {
-    return AddStopWords(reader, fieldName, (int) (reader.NumDocs * maxPercentDocs));
+    return AddStopWords(reader, fieldName, (int) (reader.GetNumDocs() * maxPercentDocs));
   }
 
   /**
@@ -170,7 +170,7 @@ public class QueryAutoStopWordAnalyzer :
     /* if the stopwords for a field are changed,
      * then saved streams for that field are erased.
      */
-    IDictionary<String,SavedStreams> streamMap = (IDictionary<String,SavedStreams>) GetPreviousTokenStream();
+    IDictionary<String,SavedStreams> streamMap = (IDictionary<String,SavedStreams>) PreviousTokenStream;
     if (streamMap != null)
       streamMap.Remove(fieldName);
     
@@ -213,10 +213,10 @@ public class QueryAutoStopWordAnalyzer :
     }
 
     /* map of SavedStreams for each field */
-    IDictionary<String, SavedStreams> streamMap = (IDictionary<String, SavedStreams>)GetPreviousTokenStream();
+    IDictionary<String, SavedStreams> streamMap = (IDictionary<String, SavedStreams>)PreviousTokenStream;
     if (streamMap == null) {
       streamMap = new HashMap<String, SavedStreams>();
-      SetPreviousTokenStream(streamMap);
+      PreviousTokenStream = streamMap;
     }
 
     SavedStreams streams = streamMap[fieldName];

Modified: incubator/lucene.net/trunk/src/contrib/Analyzers/Ru/RussianAnalyzer.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/contrib/Analyzers/Ru/RussianAnalyzer.cs?rev=1303294&r1=1303293&r2=1303294&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/contrib/Analyzers/Ru/RussianAnalyzer.cs (original)
+++ incubator/lucene.net/trunk/src/contrib/Analyzers/Ru/RussianAnalyzer.cs Wed Mar 21 06:04:26 2012
@@ -151,7 +151,7 @@ namespace Lucene.Net.Analysis.Ru
          */
         public override TokenStream ReusableTokenStream(String fieldName, TextReader reader)
         {
-            SavedStreams streams = (SavedStreams)GetPreviousTokenStream();
+            SavedStreams streams = (SavedStreams)PreviousTokenStream;
             if (streams == null)
             {
                 streams = new SavedStreams();
@@ -160,7 +160,7 @@ namespace Lucene.Net.Analysis.Ru
                 streams.result = new StopFilter(StopFilter.GetEnablePositionIncrementsVersionDefault(matchVersion),
                                                 streams.result, stopSet);
                 streams.result = new RussianStemFilter(streams.result);
-                SetPreviousTokenStream(streams);
+                PreviousTokenStream = streams;
             }
             else
             {

Modified: incubator/lucene.net/trunk/src/contrib/Analyzers/Shingle/ShingleAnalyzerWrapper.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/contrib/Analyzers/Shingle/ShingleAnalyzerWrapper.cs?rev=1303294&r1=1303293&r2=1303294&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/contrib/Analyzers/Shingle/ShingleAnalyzerWrapper.cs (original)
+++ incubator/lucene.net/trunk/src/contrib/Analyzers/Shingle/ShingleAnalyzerWrapper.cs Wed Mar 21 06:04:26 2012
@@ -137,13 +137,13 @@ namespace Lucene.Net.Analyzers.Shingle
                 return TokenStream(fieldName, reader);
             }
 
-            SavedStreams streams = (SavedStreams)GetPreviousTokenStream();
+            SavedStreams streams = (SavedStreams)PreviousTokenStream;
             if (streams == null)
             {
                 streams = new SavedStreams();
                 streams.wrapped = defaultAnalyzer.ReusableTokenStream(fieldName, reader);
                 streams.shingle = new ShingleFilter(streams.wrapped);
-                SetPreviousTokenStream(streams);
+                PreviousTokenStream = streams;
             }
             else
             {

Modified: incubator/lucene.net/trunk/src/contrib/Analyzers/Shingle/ShingleFilter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/contrib/Analyzers/Shingle/ShingleFilter.cs?rev=1303294&r1=1303293&r2=1303294&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/contrib/Analyzers/Shingle/ShingleFilter.cs (original)
+++ incubator/lucene.net/trunk/src/contrib/Analyzers/Shingle/ShingleFilter.cs Wed Mar 21 06:04:26 2012
@@ -201,8 +201,8 @@ namespace Lucene.Net.Analyzers.Shingle
                 if (shingleBufferPosition < shingleBuf.Count)
                 {
                     RestoreState(nextToken);
-                    typeAtt.SetType(tokenType);
-                    offsetAtt.SetOffset(offsetAtt.StartOffset(), endOffsets[shingleBufferPosition]);
+                    typeAtt.Type = tokenType;
+                    offsetAtt.SetOffset(offsetAtt.StartOffset, endOffsets[shingleBufferPosition]);
                     StringBuilder buf = shingles[shingleBufferPosition];
                     int termLength = buf.Length;
                     char[] TermBuffer = termAtt.TermBuffer();
@@ -269,7 +269,7 @@ namespace Lucene.Net.Analyzers.Shingle
                     }
                     numFillerTokensToInsert--;
                     // A filler token occupies no space
-                    offsetAtt.SetOffset(offsetAtt.StartOffset(), offsetAtt.StartOffset());
+                    offsetAtt.SetOffset(offsetAtt.StartOffset, offsetAtt.StartOffset);
                     termAtt.SetTermBuffer(FILLER_TOKEN, 0, FILLER_TOKEN.Length);
                     return true;
                 }
@@ -361,7 +361,7 @@ namespace Lucene.Net.Analyzers.Shingle
                     shingles[j].Append(termAtt.TermBuffer().Take(termAtt.TermLength()).ToArray());
                 }
 
-                endOffsets[i] = offsetAtt.EndOffset();
+                endOffsets[i] = offsetAtt.EndOffset;
                 i++;
             }
 

Modified: incubator/lucene.net/trunk/src/contrib/Analyzers/Shingle/ShingleMatrixFilter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/contrib/Analyzers/Shingle/ShingleMatrixFilter.cs?rev=1303294&r1=1303293&r2=1303294&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/contrib/Analyzers/Shingle/ShingleMatrixFilter.cs (original)
+++ incubator/lucene.net/trunk/src/contrib/Analyzers/Shingle/ShingleMatrixFilter.cs Wed Mar 21 06:04:26 2012
@@ -319,8 +319,8 @@ namespace Lucene.Net.Analyzers.Shingle
             _termAtt.SetTermBuffer(token.TermBuffer(), 0, token.TermLength());
             _posIncrAtt.PositionIncrement = token.PositionIncrement;
             _flagsAtt.Flags = token.Flags;
-            _offsetAtt.SetOffset(token.StartOffset(), token.EndOffset());
-            _typeAtt.SetType(token.Type());
+            _offsetAtt.SetOffset(token.StartOffset, token.EndOffset);
+            _typeAtt.Type = token.Type;
             _payloadAtt.Payload = token.Payload;
 
             return true;
@@ -333,8 +333,8 @@ namespace Lucene.Net.Analyzers.Shingle
             token.SetTermBuffer(_inTermAtt.TermBuffer(), 0, _inTermAtt.TermLength());
             token.PositionIncrement = _inPosIncrAtt.PositionIncrement;
             token.Flags = _inFlagsAtt.Flags;
-            token.SetOffset(_inOffsetAtt.StartOffset(), _inOffsetAtt.EndOffset());
-            token.SetType(_inTypeAtt.Type());
+            token.SetOffset(_inOffsetAtt.StartOffset, _inOffsetAtt.EndOffset);
+            token.Type = _inTypeAtt.Type;
             token.Payload = _inPayloadAtt.Payload;
             return token;
         }
@@ -345,8 +345,8 @@ namespace Lucene.Net.Analyzers.Shingle
             token.SetTermBuffer(_termAtt.TermBuffer(), 0, _termAtt.TermLength());
             token.PositionIncrement = _posIncrAtt.PositionIncrement;
             token.Flags = _flagsAtt.Flags;
-            token.SetOffset(_offsetAtt.StartOffset(), _offsetAtt.EndOffset());
-            token.SetType(_typeAtt.Type());
+            token.SetOffset(_offsetAtt.StartOffset, _offsetAtt.EndOffset);
+            token.Type = _typeAtt.Type;
             token.Payload = _payloadAtt.Payload;
             return token;
         }
@@ -529,11 +529,11 @@ namespace Lucene.Net.Analyzers.Shingle
         /// <param name="currentPermuationTokens">tokens of the current permutation of rows in the matrix. </param>
         public void UpdateToken(Token token, List<Token> shingle, int currentPermutationStartOffset, List<Row> currentPermutationRows, List<Token> currentPermuationTokens)
         {
-            token.SetType(typeof(ShingleMatrixFilter).Name);
+            token.Type = typeof(ShingleMatrixFilter).Name;
             token.Flags = 0;
             token.PositionIncrement = 1;
-            token.SetStartOffset((shingle[0]).StartOffset());
-            token.SetEndOffset(shingle[shingle.Count - 1].EndOffset());
+            token.StartOffset = (shingle[0]).StartOffset;
+            token.EndOffset = shingle[shingle.Count - 1].EndOffset;
 
             _settingsCodec.SetWeight(
                 token, 

Modified: incubator/lucene.net/trunk/src/contrib/Analyzers/Sinks/TokenTypeSinkFilter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/contrib/Analyzers/Sinks/TokenTypeSinkFilter.cs?rev=1303294&r1=1303293&r2=1303294&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/contrib/Analyzers/Sinks/TokenTypeSinkFilter.cs (original)
+++ incubator/lucene.net/trunk/src/contrib/Analyzers/Sinks/TokenTypeSinkFilter.cs Wed Mar 21 06:04:26 2012
@@ -45,7 +45,7 @@ namespace Lucene.Net.Analysis.Sinks
                 typeAtt = source.AddAttribute<TypeAttribute>();
             }
 
-            return typeToMatch.Equals(typeAtt.Type());
+            return typeToMatch.Equals(typeAtt.Type);
         }
     }
 }

Modified: incubator/lucene.net/trunk/src/contrib/Analyzers/Th/ThaiAnalyzer.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/contrib/Analyzers/Th/ThaiAnalyzer.cs?rev=1303294&r1=1303293&r2=1303294&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/contrib/Analyzers/Th/ThaiAnalyzer.cs (original)
+++ incubator/lucene.net/trunk/src/contrib/Analyzers/Th/ThaiAnalyzer.cs Wed Mar 21 06:04:26 2012
@@ -72,7 +72,7 @@ namespace Lucene.Net.Analysis.Th
                 return TokenStream(fieldName, reader);
             }
 
-            SavedStreams streams = (SavedStreams)GetPreviousTokenStream();
+            SavedStreams streams = (SavedStreams)PreviousTokenStream;
             if (streams == null)
             {
                 streams = new SavedStreams();
@@ -81,7 +81,7 @@ namespace Lucene.Net.Analysis.Th
                 streams.result = new ThaiWordFilter(streams.result);
                 streams.result = new StopFilter(StopFilter.GetEnablePositionIncrementsVersionDefault(matchVersion),
                                                 streams.result, StopAnalyzer.ENGLISH_STOP_WORDS_SET);
-                SetPreviousTokenStream(streams);
+                PreviousTokenStream = streams;
             }
             else
             {

Modified: incubator/lucene.net/trunk/src/contrib/Core/Index/TermVectorEnumerator.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/contrib/Core/Index/TermVectorEnumerator.cs?rev=1303294&r1=1303293&r2=1303294&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/contrib/Core/Index/TermVectorEnumerator.cs (original)
+++ incubator/lucene.net/trunk/src/contrib/Core/Index/TermVectorEnumerator.cs Wed Mar 21 06:04:26 2012
@@ -27,7 +27,7 @@ namespace Lucene.Net.Index
     /// Class to allow for enumerating over the documents in the index to 
     /// retrieve the term vector for each one.
     /// </summary>
-    public class TermVectorEnumerator : IEnumerator<TermFreqVector>, IEnumerable<TermFreqVector>
+    public class TermVectorEnumerator : IEnumerator<ITermFreqVector>, IEnumerable<ITermFreqVector>
     {
         /// <summary>
         /// Current document being accessed.
@@ -63,7 +63,7 @@ namespace Lucene.Net.Index
 
         #region IEnumerator<TermFreqVector> Members
 
-        public TermFreqVector Current
+        public ITermFreqVector Current
         {
             get { return this.CurrentVector(); }
         }
@@ -101,9 +101,9 @@ namespace Lucene.Net.Index
 
         #region IEnumerable<TermFreqVector> Members
 
-        public IEnumerator<TermFreqVector> GetEnumerator()
+        public IEnumerator<ITermFreqVector> GetEnumerator()
         {
-            return (IEnumerator<TermFreqVector>)this;
+            return (IEnumerator<ITermFreqVector>)this;
         }
 
         #endregion
@@ -112,7 +112,7 @@ namespace Lucene.Net.Index
 
         IEnumerator IEnumerable.GetEnumerator()
         {
-            return (IEnumerator<TermFreqVector>)this;
+            return (IEnumerator<ITermFreqVector>)this;
         }
 
         #endregion
@@ -121,7 +121,7 @@ namespace Lucene.Net.Index
         /// Retrieve the current TermFreqVector from the index.
         /// </summary>
         /// <returns>The current TermFreqVector.</returns>
-        private TermFreqVector CurrentVector()
+        private ITermFreqVector CurrentVector()
         {
             if (this.reader.IsDeleted(this.document))
             {
@@ -129,7 +129,7 @@ namespace Lucene.Net.Index
             }
             else
             {
-                TermFreqVector vector = this.reader.GetTermFreqVector(this.document, this.fieldName);
+                ITermFreqVector vector = this.reader.GetTermFreqVector(this.document, this.fieldName);
                 if (vector == null)
                 {
                     vector = this.emptyVector;
@@ -144,7 +144,7 @@ namespace Lucene.Net.Index
     /// with a deleted document or a document that does not have the field
     /// that is being enumerated.
     /// </summary>
-    public class EmptyVector : TermFreqVector
+    public class EmptyVector : ITermFreqVector
     {
         private string field;
 
@@ -159,14 +159,14 @@ namespace Lucene.Net.Index
 
         #region TermFreqVector Members
 
-        public string GetField()
+        public string Field
         {
-            return this.field;
+            get { return this.field; }
         }
 
-        public int Size()
+        public int Size
         {
-            return 0;
+            get { return 0; }
         }
 
         public string[] GetTerms()

Modified: incubator/lucene.net/trunk/src/contrib/FastVectorHighlighter/FieldTermStack.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/contrib/FastVectorHighlighter/FieldTermStack.cs?rev=1303294&r1=1303293&r2=1303294&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/contrib/FastVectorHighlighter/FieldTermStack.cs (original)
+++ incubator/lucene.net/trunk/src/contrib/FastVectorHighlighter/FieldTermStack.cs Wed Mar 21 06:04:26 2012
@@ -80,7 +80,7 @@ namespace Lucene.Net.Search.Vectorhighli
             VectorHighlightMapper tfv = new VectorHighlightMapper(termSet);    
             reader.GetTermFreqVector(docId, fieldName, tfv);
             
-            if (tfv.Size()==0) return; // just return to make null snippets
+            if (tfv.Size==0) return; // just return to make null snippets
             
             string[] terms = tfv.GetTerms();
             foreach (String term in terms)

Modified: incubator/lucene.net/trunk/src/contrib/FastVectorHighlighter/VectorHighlightMapper.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/contrib/FastVectorHighlighter/VectorHighlightMapper.cs?rev=1303294&r1=1303293&r2=1303294&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/contrib/FastVectorHighlighter/VectorHighlightMapper.cs (original)
+++ incubator/lucene.net/trunk/src/contrib/FastVectorHighlighter/VectorHighlightMapper.cs Wed Mar 21 06:04:26 2012
@@ -23,7 +23,7 @@ using Lucene.Net.Index;
 
 namespace Lucene.Net.Search.Vectorhighlight
 {
-    public class VectorHighlightMapper : TermVectorMapper, TermFreqVector, TermPositionVector
+    public class VectorHighlightMapper : TermVectorMapper, ITermFreqVector, TermPositionVector
     {
         private readonly List<string> _terms;
         private Dictionary<string, TermVectorOffsetInfo[]> _tvoi;
@@ -69,14 +69,14 @@ namespace Lucene.Net.Search.Vectorhighli
             }
         }
 
-        public string GetField()
+        public string Field
         {
-            return _field;
+            get { return _field; }
         }
 
-        public int Size()
+        public int Size
         {
-            return _tvoi.Count;
+            get { return _tvoi.Count; }
         }
 
         public string[] GetTerms()

Modified: incubator/lucene.net/trunk/src/contrib/Highlighter/QueryTermExtractor.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/contrib/Highlighter/QueryTermExtractor.cs?rev=1303294&r1=1303293&r2=1303294&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/contrib/Highlighter/QueryTermExtractor.cs (original)
+++ incubator/lucene.net/trunk/src/contrib/Highlighter/QueryTermExtractor.cs Wed Mar 21 06:04:26 2012
@@ -63,7 +63,7 @@ namespace Lucene.Net.Highlight
 		public static WeightedTerm[] GetIdfWeightedTerms(Query query, IndexReader reader, System.String fieldName)
 		{
 			WeightedTerm[] terms = GetTerms(query, false, fieldName);
-			int totalNumDocs = reader.NumDocs;
+			int totalNumDocs = reader.GetNumDocs();
 			for (int i = 0; i < terms.Length; i++)
 			{
 				try

Modified: incubator/lucene.net/trunk/src/contrib/Highlighter/SimpleFragmenter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/contrib/Highlighter/SimpleFragmenter.cs?rev=1303294&r1=1303293&r2=1303294&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/contrib/Highlighter/SimpleFragmenter.cs (original)
+++ incubator/lucene.net/trunk/src/contrib/Highlighter/SimpleFragmenter.cs Wed Mar 21 06:04:26 2012
@@ -59,7 +59,7 @@ namespace Lucene.Net.Highlight
 		*/
 		public virtual bool IsNewFragment(Token token)
 		{
-			bool isNewFrag = token.EndOffset() >= (fragmentSize * currentNumFrags);
+			bool isNewFrag = token.EndOffset >= (fragmentSize * currentNumFrags);
 			if (isNewFrag)
 			{
 				currentNumFrags++;

Modified: incubator/lucene.net/trunk/src/contrib/Highlighter/TokenGroup.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/contrib/Highlighter/TokenGroup.cs?rev=1303294&r1=1303293&r2=1303294&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/contrib/Highlighter/TokenGroup.cs (original)
+++ incubator/lucene.net/trunk/src/contrib/Highlighter/TokenGroup.cs Wed Mar 21 06:04:26 2012
@@ -46,25 +46,25 @@ namespace Lucene.Net.Highlight
 			{
 				if (numTokens == 0)
 				{
-					startOffset = matchStartOffset = token.StartOffset();
-					endOffset = matchEndOffset = token.EndOffset();
+					startOffset = matchStartOffset = token.StartOffset;
+					endOffset = matchEndOffset = token.EndOffset;
 					tot += score;
 				}
 				else
 				{
-					startOffset = Math.Min(startOffset, token.StartOffset());
-					endOffset = Math.Max(endOffset, token.EndOffset());
+					startOffset = Math.Min(startOffset, token.StartOffset);
+					endOffset = Math.Max(endOffset, token.EndOffset);
 					if (score > 0)
 					{
 						if (tot == 0)
 						{
-							matchStartOffset = token.StartOffset();
-							matchEndOffset = token.EndOffset();
+							matchStartOffset = token.StartOffset;
+							matchEndOffset = token.EndOffset;
 						}
 						else
 						{
-							matchStartOffset = Math.Min(matchStartOffset, token.StartOffset());
-							matchEndOffset = Math.Max(matchEndOffset, token.EndOffset());
+							matchStartOffset = Math.Min(matchStartOffset, token.StartOffset);
+							matchEndOffset = Math.Max(matchEndOffset, token.EndOffset);
 						}
 						tot += score;
 					}
@@ -77,7 +77,7 @@ namespace Lucene.Net.Highlight
 		
 		internal virtual bool IsDistinct(Token token)
 		{
-			return token.StartOffset() >= endOffset;
+			return token.StartOffset >= endOffset;
 		}
 		
 		

Modified: incubator/lucene.net/trunk/src/contrib/Highlighter/TokenSources.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/contrib/Highlighter/TokenSources.cs?rev=1303294&r1=1303293&r2=1303294&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/contrib/Highlighter/TokenSources.cs (original)
+++ incubator/lucene.net/trunk/src/contrib/Highlighter/TokenSources.cs Wed Mar 21 06:04:26 2012
@@ -20,12 +20,12 @@
 */
 using System;
 using Lucene.Net.Analysis.Tokenattributes;
+using Lucene.Net.Index;
 using Analyzer = Lucene.Net.Analysis.Analyzer;
 using Token = Lucene.Net.Analysis.Token;
 using TokenStream = Lucene.Net.Analysis.TokenStream;
 using Document = Lucene.Net.Documents.Document;
 using IndexReader = Lucene.Net.Index.IndexReader;
-using TermFreqVector = Lucene.Net.Index.TermFreqVector;
 using TermPositionVector = Lucene.Net.Index.TermPositionVector;
 using TermVectorOffsetInfo = Lucene.Net.Index.TermVectorOffsetInfo;
 
@@ -62,7 +62,7 @@ namespace Lucene.Net.Highlight
                 ClearAttributes();
                 Token token = tokens[currentToken++];
                 termAtt.SetTermBuffer(token.Term());
-                offsetAtt.SetOffset(token.StartOffset(), token.EndOffset());
+                offsetAtt.SetOffset(token.StartOffset, token.EndOffset);
                 return true;
 			}
 
@@ -77,9 +77,9 @@ namespace Lucene.Net.Highlight
 			{
 				Token t1 = (Token) o1;
 				Token t2 = (Token) o2;
-				if (t1.StartOffset() > t2.StartOffset())
+				if (t1.StartOffset > t2.StartOffset)
 					return 1;
-				if (t1.StartOffset() < t2.StartOffset())
+				if (t1.StartOffset < t2.StartOffset)
 					return - 1;
 				return 0;
 			}
@@ -98,7 +98,7 @@ namespace Lucene.Net.Highlight
 		{
 			TokenStream ts = null;
 			
-			TermFreqVector tfv = (TermFreqVector) reader.GetTermFreqVector(docId, field);
+			ITermFreqVector tfv = (ITermFreqVector) reader.GetTermFreqVector(docId, field);
 			if (tfv != null)
 			{
 				if (tfv is TermPositionVector)
@@ -210,7 +210,7 @@ namespace Lucene.Net.Highlight
 		
 		public static TokenStream GetTokenStream(IndexReader reader, int docId, System.String field)
 		{
-			TermFreqVector tfv = (TermFreqVector) reader.GetTermFreqVector(docId, field);
+			ITermFreqVector tfv = (ITermFreqVector) reader.GetTermFreqVector(docId, field);
 			if (tfv == null)
 			{
 				throw new System.ArgumentException(field + " in doc #" + docId + "does not have any term position data stored");

Modified: incubator/lucene.net/trunk/src/contrib/Queries/FuzzyLikeThisQuery.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/contrib/Queries/FuzzyLikeThisQuery.cs?rev=1303294&r1=1303293&r2=1303294&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/contrib/Queries/FuzzyLikeThisQuery.cs (original)
+++ incubator/lucene.net/trunk/src/contrib/Queries/FuzzyLikeThisQuery.cs Wed Mar 21 06:04:26 2012
@@ -193,7 +193,7 @@ namespace Lucene.Net.Search
             TokenStream ts = analyzer.TokenStream(f.fieldName, new System.IO.StringReader(f.queryString));
             TermAttribute termAtt = ts.AddAttribute<TermAttribute>();
 
-            int corpusNumDocs = reader.NumDocs;
+            int corpusNumDocs = reader.GetNumDocs();
             Term internSavingTemplateTerm = new Term(f.fieldName); //optimization to avoid constructing new Term() objects
             HashSet<string> processedTerms = new HashSet<string>();
             while (ts.IncrementToken())

Modified: incubator/lucene.net/trunk/src/contrib/Queries/Similar/MoreLikeThis.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/contrib/Queries/Similar/MoreLikeThis.cs?rev=1303294&r1=1303293&r2=1303294&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/contrib/Queries/Similar/MoreLikeThis.cs (original)
+++ incubator/lucene.net/trunk/src/contrib/Queries/Similar/MoreLikeThis.cs Wed Mar 21 06:04:26 2012
@@ -19,12 +19,12 @@ using System;
 using System.Collections.Generic;
 using System.IO;
 using System.Linq;
+using Lucene.Net.Index;
 using Lucene.Net.Store;
 using Lucene.Net.Support;
 using Lucene.Net.Util;
 using IndexReader = Lucene.Net.Index.IndexReader;
 using Term = Lucene.Net.Index.Term;
-using TermFreqVector = Lucene.Net.Index.TermFreqVector;
 using BooleanClause = Lucene.Net.Search.BooleanClause;
 using DefaultSimilarity = Lucene.Net.Search.DefaultSimilarity;
 using TermQuery = Lucene.Net.Search.TermQuery;
@@ -403,7 +403,7 @@ namespace Lucene.Net.Search.Similar
         /// </param>
         public void SetMaxDocFreqPct(int maxPercentage)
         {
-            this.maxDocfreq = maxPercentage * ir.NumDocs / 100;
+            this.maxDocfreq = maxPercentage * ir.GetNumDocs() / 100;
         }
 
         /// <summary> Returns whether to boost terms in query based on "score" or not. The default is
@@ -675,7 +675,7 @@ namespace Lucene.Net.Search.Similar
         private PriorityQueue<object[]> CreateQueue(IDictionary<string,Int> words)
         {
             // have collected all words in doc and their freqs
-            int numDocs = ir.NumDocs;
+            int numDocs = ir.GetNumDocs();
             FreqQ res = new FreqQ(words.Count); // will order words by score
 
             var it = words.Keys.GetEnumerator();
@@ -777,7 +777,7 @@ namespace Lucene.Net.Search.Similar
             System.IO.StreamWriter o = temp_writer;
             FSDirectory dir = FSDirectory.Open(new DirectoryInfo(indexName));
             IndexReader r = IndexReader.Open(dir, true);
-            o.WriteLine("Open index " + indexName + " which has " + r.NumDocs + " docs");
+            o.WriteLine("Open index " + indexName + " which has " + r.GetNumDocs() + " docs");
 
             MoreLikeThis mlt = new MoreLikeThis(r);
 
@@ -830,7 +830,7 @@ namespace Lucene.Net.Search.Similar
             for (int i = 0; i < fieldNames.Length; i++)
             {
                 System.String fieldName = fieldNames[i];
-                TermFreqVector vector = ir.GetTermFreqVector(docNum, fieldName);
+                ITermFreqVector vector = ir.GetTermFreqVector(docNum, fieldName);
 
                 // field does not store term vector info
                 if (vector == null)
@@ -859,7 +859,7 @@ namespace Lucene.Net.Search.Similar
         /// </param>
         /// <param name="vector">List of terms and their frequencies for a doc/field
         /// </param>
-        private void AddTermFrequencies(IDictionary<string, Int> termFreqMap, TermFreqVector vector)
+        private void AddTermFrequencies(IDictionary<string, Int> termFreqMap, ITermFreqVector vector)
         {
             System.String[] terms = vector.GetTerms();
             int[] freqs = vector.GetTermFrequencies();

Modified: incubator/lucene.net/trunk/src/contrib/Similarity/Similar/MoreLikeThis.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/contrib/Similarity/Similar/MoreLikeThis.cs?rev=1303294&r1=1303293&r2=1303294&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/contrib/Similarity/Similar/MoreLikeThis.cs (original)
+++ incubator/lucene.net/trunk/src/contrib/Similarity/Similar/MoreLikeThis.cs Wed Mar 21 06:04:26 2012
@@ -18,12 +18,12 @@
 using System;
 using System.IO;
 using Lucene.Net.Analysis.Tokenattributes;
+using Lucene.Net.Index;
 using Lucene.Net.Search;
 using Lucene.Net.Store;
 using Lucene.Net.Util;
 using IndexReader = Lucene.Net.Index.IndexReader;
 using Term = Lucene.Net.Index.Term;
-using TermFreqVector = Lucene.Net.Index.TermFreqVector;
 using BooleanClause = Lucene.Net.Search.BooleanClause;
 using DefaultSimilarity = Lucene.Net.Search.DefaultSimilarity;
 using TermQuery = Lucene.Net.Search.TermQuery;
@@ -611,7 +611,7 @@ namespace Similarity.Net
         private PriorityQueue<object[]> CreateQueue(System.Collections.IDictionary words)
         {
             // have collected all words in doc and their freqs
-            int numDocs = ir.NumDocs;
+            int numDocs = ir.GetNumDocs();
             FreqQ res = new FreqQ(words.Count); // will order words by score
 			
             System.Collections.IEnumerator it = words.Keys.GetEnumerator();
@@ -706,7 +706,7 @@ namespace Similarity.Net
                               {AutoFlush = true};
             var dir = FSDirectory.Open(new DirectoryInfo(indexName));
             IndexReader r = IndexReader.Open(dir, true);
-            o.WriteLine("Open index " + indexName + " which has " + r.NumDocs + " docs");
+            o.WriteLine("Open index " + indexName + " which has " + r.GetNumDocs() + " docs");
 			
             MoreLikeThis mlt = new MoreLikeThis(r);
 			
@@ -760,7 +760,7 @@ namespace Similarity.Net
             for (int i = 0; i < fieldNames.Length; i++)
             {
                 System.String fieldName = fieldNames[i];
-                TermFreqVector vector = ir.GetTermFreqVector(docNum, fieldName);
+                ITermFreqVector vector = ir.GetTermFreqVector(docNum, fieldName);
 				
                 // field does not store term vector info
                 if (vector == null)
@@ -789,7 +789,7 @@ namespace Similarity.Net
         /// </param>
         /// <param name="vector">List of terms and their frequencies for a doc/field
         /// </param>
-        private void  AddTermFrequencies(System.Collections.IDictionary termFreqMap, TermFreqVector vector)
+        private void  AddTermFrequencies(System.Collections.IDictionary termFreqMap, ITermFreqVector vector)
         {
             System.String[] terms = vector.GetTerms();
             int[] freqs = vector.GetTermFrequencies();

Modified: incubator/lucene.net/trunk/src/contrib/Snowball/Lucene.Net/Analysis/Snowball/SnowballAnalyzer.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/contrib/Snowball/Lucene.Net/Analysis/Snowball/SnowballAnalyzer.cs?rev=1303294&r1=1303293&r2=1303294&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/contrib/Snowball/Lucene.Net/Analysis/Snowball/SnowballAnalyzer.cs (original)
+++ incubator/lucene.net/trunk/src/contrib/Snowball/Lucene.Net/Analysis/Snowball/SnowballAnalyzer.cs Wed Mar 21 06:04:26 2012
@@ -102,7 +102,7 @@ namespace Lucene.Net.Analysis.Snowball
                 return TokenStream(fieldName, reader);
             }
 
-            SavedStreams streams = (SavedStreams)GetPreviousTokenStream();
+            SavedStreams streams = (SavedStreams)PreviousTokenStream;
             if (streams == null)
             {
                 streams = new SavedStreams();
@@ -113,7 +113,7 @@ namespace Lucene.Net.Analysis.Snowball
                     streams.result = new StopFilter(StopFilter.GetEnablePositionIncrementsVersionDefault(matchVersion),
                                                     streams.result, stopSet);
                 streams.result = new SnowballFilter(streams.result, name);
-                SetPreviousTokenStream(streams);
+                PreviousTokenStream = streams;
             }
             else
             {

Modified: incubator/lucene.net/trunk/src/contrib/SpellChecker/Spell/SpellChecker.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/contrib/SpellChecker/Spell/SpellChecker.cs?rev=1303294&r1=1303293&r2=1303294&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/contrib/SpellChecker/Spell/SpellChecker.cs (original)
+++ incubator/lucene.net/trunk/src/contrib/SpellChecker/Spell/SpellChecker.cs Wed Mar 21 06:04:26 2012
@@ -396,7 +396,7 @@ namespace SpellChecker.Net.Search.Spell
                 EnsureOpen();
                 Directory dir = this.spellindex;
                 IndexWriter writer = new IndexWriter(spellindex, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
-                writer.SetMergeFactor(mergeFactor);
+                writer.MergeFactor = mergeFactor;
                 writer.SetMaxBufferedDocs(ramMB);
 
                 System.Collections.IEnumerator iter = dict.GetWordsIterator();

Modified: incubator/lucene.net/trunk/src/core/Analysis/Analyzer.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/core/Analysis/Analyzer.cs?rev=1303294&r1=1303293&r2=1303294&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/core/Analysis/Analyzer.cs (original)
+++ incubator/lucene.net/trunk/src/core/Analysis/Analyzer.cs Wed Mar 21 06:04:26 2012
@@ -52,52 +52,31 @@ namespace Lucene.Net.Analysis
 		private CloseableThreadLocal<Object> tokenStreams = new CloseableThreadLocal<Object>();
 	    private bool isDisposed;
 
-        /// <summary>Used by Analyzers that implement reusableTokenStream
-        /// to save a TokenStream for later re-use by the same
-        /// thread. 
-        /// </summary>
+	    /// <summary>Used by Analyzers that implement reusableTokenStream
+	    /// to retrieve previously saved TokenStreams for re-use
+	    /// by the same thread. 
+	    /// </summary>
 	    protected internal virtual object PreviousTokenStream
 	    {
 	        get
-            {
-                if (tokenStreams == null)
-                {
-                    throw new AlreadyClosedException("this Analyzer is closed");
-                }
-                return tokenStreams.Get();
+	        {
+	            if (tokenStreams == null)
+	            {
+	                throw new AlreadyClosedException("this Analyzer is closed");
+	            }
+	            return tokenStreams.Get();
+	        }
+	        set
+	        {
+	            if (tokenStreams == null)
+	            {
+	                throw new AlreadyClosedException("this Analyzer is closed");
+	            }
+	            tokenStreams.Set(value);
 	        }
-
-            set
-            {
-                if (tokenStreams == null)
-                {
-                    throw new AlreadyClosedException("this Analyzer is closed");
-                }
-                tokenStreams.Set(value);
-            }
 	    }
 
-		/// <summary>Used by Analyzers that implement reusableTokenStream
-		/// to retrieve previously saved TokenStreams for re-use
-		/// by the same thread. 
-        /// </summary>
-        [Obsolete("Use PreviousTokenStream property instead")]
-		protected internal virtual System.Object GetPreviousTokenStream()
-		{
-		    return PreviousTokenStream;
-		}
-		
-		/// <summary>Used by Analyzers that implement reusableTokenStream
-		/// to save a TokenStream for later re-use by the same
-		/// thread. 
-		/// </summary>
-		[Obsolete("Use PreviousTokenStream property instead")]
-		protected internal virtual void  SetPreviousTokenStream(System.Object obj)
-		{
-		    PreviousTokenStream = obj;
-		}
-		
-        [Obsolete()]
+	    [Obsolete()]
 		protected internal bool overridesTokenStreamMethod = false;
 		
 		/// <deprecated> This is only present to preserve

Modified: incubator/lucene.net/trunk/src/core/Analysis/KeywordAnalyzer.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/core/Analysis/KeywordAnalyzer.cs?rev=1303294&r1=1303293&r2=1303294&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/core/Analysis/KeywordAnalyzer.cs (original)
+++ incubator/lucene.net/trunk/src/core/Analysis/KeywordAnalyzer.cs Wed Mar 21 06:04:26 2012
@@ -42,11 +42,11 @@ namespace Lucene.Net.Analysis
 				// tokenStream but not reusableTokenStream
 				return TokenStream(fieldName, reader);
 			}
-			Tokenizer tokenizer = (Tokenizer) GetPreviousTokenStream();
+			Tokenizer tokenizer = (Tokenizer) PreviousTokenStream;
 			if (tokenizer == null)
 			{
 				tokenizer = new KeywordTokenizer(reader);
-				SetPreviousTokenStream(tokenizer);
+				PreviousTokenStream = tokenizer;
 			}
 			else
 				tokenizer.Reset(reader);

Modified: incubator/lucene.net/trunk/src/core/Analysis/NumericTokenStream.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/core/Analysis/NumericTokenStream.cs?rev=1303294&r1=1303293&r2=1303294&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/core/Analysis/NumericTokenStream.cs (original)
+++ incubator/lucene.net/trunk/src/core/Analysis/NumericTokenStream.cs Wed Mar 21 06:04:26 2012
@@ -246,7 +246,7 @@ namespace Lucene.Net.Analysis
 				
 			}
 			
-			typeAtt.SetType((shift == 0)?TOKEN_TYPE_FULL_PREC:TOKEN_TYPE_LOWER_PREC);
+			typeAtt.Type = (shift == 0)?TOKEN_TYPE_FULL_PREC:TOKEN_TYPE_LOWER_PREC;
 			posIncrAtt.PositionIncrement = (shift == 0)?1:0;
 			shift += precisionStep;
 			return true;

Modified: incubator/lucene.net/trunk/src/core/Analysis/PerFieldAnalyzerWrapper.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/core/Analysis/PerFieldAnalyzerWrapper.cs?rev=1303294&r1=1303293&r2=1303294&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/core/Analysis/PerFieldAnalyzerWrapper.cs (original)
+++ incubator/lucene.net/trunk/src/core/Analysis/PerFieldAnalyzerWrapper.cs Wed Mar 21 06:04:26 2012
@@ -122,10 +122,8 @@ namespace Lucene.Net.Analysis
 		/// <summary>Return the positionIncrementGap from the analyzer assigned to fieldName </summary>
 		public override int GetPositionIncrementGap(string fieldName)
 		{
-			Analyzer analyzer = analyzerMap[fieldName];
-			if (analyzer == null)
-				analyzer = defaultAnalyzer;
-			return analyzer.GetPositionIncrementGap(fieldName);
+			Analyzer analyzer = analyzerMap[fieldName] ?? defaultAnalyzer;
+		    return analyzer.GetPositionIncrementGap(fieldName);
 		}
 
         /// <summary> Return the offsetGap from the analyzer assigned to field </summary>

Modified: incubator/lucene.net/trunk/src/core/Analysis/PorterStemFilter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/core/Analysis/PorterStemFilter.cs?rev=1303294&r1=1303293&r2=1303294&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/core/Analysis/PorterStemFilter.cs (original)
+++ incubator/lucene.net/trunk/src/core/Analysis/PorterStemFilter.cs Wed Mar 21 06:04:26 2012
@@ -57,7 +57,7 @@ namespace Lucene.Net.Analysis
 				return false;
 			
 			if (stemmer.Stem(termAtt.TermBuffer(), 0, termAtt.TermLength()))
-				termAtt.SetTermBuffer(stemmer.GetResultBuffer(), 0, stemmer.GetResultLength());
+				termAtt.SetTermBuffer(stemmer.ResultBuffer, 0, stemmer.ResultLength);
 			return true;
 		}
 	}