You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucenenet.apache.org by sy...@apache.org on 2014/09/16 22:20:51 UTC

[3/7] .NETification

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e2f44e8e/src/Lucene.Net.Tests/core/Index/TestAddIndexes.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/core/Index/TestAddIndexes.cs b/src/Lucene.Net.Tests/core/Index/TestAddIndexes.cs
index 2e5b133..ff95cca 100644
--- a/src/Lucene.Net.Tests/core/Index/TestAddIndexes.cs
+++ b/src/Lucene.Net.Tests/core/Index/TestAddIndexes.cs
@@ -68,27 +68,27 @@ namespace Lucene.Net.Index
             writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode_e.CREATE));
             // add 100 documents
             AddDocs(writer, 100);
-            Assert.AreEqual(100, writer.MaxDoc());
+            Assert.AreEqual(100, writer.MaxDoc);
             writer.Dispose();
             TestUtil.CheckIndex(dir);
 
             writer = NewWriter(aux, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode_e.CREATE).SetMergePolicy(NewLogMergePolicy(false)));
             // add 40 documents in separate files
             AddDocs(writer, 40);
-            Assert.AreEqual(40, writer.MaxDoc());
+            Assert.AreEqual(40, writer.MaxDoc);
             writer.Dispose();
 
             writer = NewWriter(aux2, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode_e.CREATE));
             // add 50 documents in compound files
             AddDocs2(writer, 50);
-            Assert.AreEqual(50, writer.MaxDoc());
+            Assert.AreEqual(50, writer.MaxDoc);
             writer.Dispose();
 
             // test doc count before segments are merged
             writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode_e.APPEND));
-            Assert.AreEqual(100, writer.MaxDoc());
+            Assert.AreEqual(100, writer.MaxDoc);
             writer.AddIndexes(aux, aux2);
-            Assert.AreEqual(190, writer.MaxDoc());
+            Assert.AreEqual(190, writer.MaxDoc);
             writer.Dispose();
             TestUtil.CheckIndex(dir);
 
@@ -103,14 +103,14 @@ namespace Lucene.Net.Index
             writer = NewWriter(aux3, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
             // add 40 documents
             AddDocs(writer, 40);
-            Assert.AreEqual(40, writer.MaxDoc());
+            Assert.AreEqual(40, writer.MaxDoc);
             writer.Dispose();
 
             // test doc count before segments are merged
             writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode_e.APPEND));
-            Assert.AreEqual(190, writer.MaxDoc());
+            Assert.AreEqual(190, writer.MaxDoc);
             writer.AddIndexes(aux3);
-            Assert.AreEqual(230, writer.MaxDoc());
+            Assert.AreEqual(230, writer.MaxDoc);
             writer.Dispose();
 
             // make sure the new index is correct
@@ -139,9 +139,9 @@ namespace Lucene.Net.Index
             writer.Dispose();
 
             writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode_e.APPEND));
-            Assert.AreEqual(230, writer.MaxDoc());
+            Assert.AreEqual(230, writer.MaxDoc);
             writer.AddIndexes(aux4);
-            Assert.AreEqual(231, writer.MaxDoc());
+            Assert.AreEqual(231, writer.MaxDoc);
             writer.Dispose();
 
             VerifyNumDocs(dir, 231);
@@ -289,7 +289,7 @@ namespace Lucene.Net.Index
             writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
             // add 100 documents
             AddDocs(writer, 100);
-            Assert.AreEqual(100, writer.MaxDoc());
+            Assert.AreEqual(100, writer.MaxDoc);
             writer.Dispose();
 
             writer = NewWriter(aux, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode_e.CREATE).SetMaxBufferedDocs(1000).SetMergePolicy(NewLogMergePolicy(false)));
@@ -309,7 +309,7 @@ namespace Lucene.Net.Index
             }
             catch (System.ArgumentException e)
             {
-                Assert.AreEqual(100, writer.MaxDoc());
+                Assert.AreEqual(100, writer.MaxDoc);
             }
             writer.Dispose();
 
@@ -336,7 +336,7 @@ namespace Lucene.Net.Index
             AddDocs(writer, 10);
 
             writer.AddIndexes(aux);
-            Assert.AreEqual(1040, writer.MaxDoc());
+            Assert.AreEqual(1040, writer.MaxDoc);
             Assert.AreEqual(1000, writer.GetDocCount(0));
             writer.Dispose();
 
@@ -361,7 +361,7 @@ namespace Lucene.Net.Index
             AddDocs(writer, 2);
 
             writer.AddIndexes(aux);
-            Assert.AreEqual(1032, writer.MaxDoc());
+            Assert.AreEqual(1032, writer.MaxDoc);
             Assert.AreEqual(1000, writer.GetDocCount(0));
             writer.Dispose();
 
@@ -385,7 +385,7 @@ namespace Lucene.Net.Index
             IndexWriter writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode_e.APPEND).SetMaxBufferedDocs(10).SetMergePolicy(NewLogMergePolicy(4)));
 
             writer.AddIndexes(aux, new MockDirectoryWrapper(Random(), new RAMDirectory(aux, NewIOContext(Random()))));
-            Assert.AreEqual(1060, writer.MaxDoc());
+            Assert.AreEqual(1060, writer.MaxDoc);
             Assert.AreEqual(1000, writer.GetDocCount(0));
             writer.Dispose();
 
@@ -414,7 +414,7 @@ namespace Lucene.Net.Index
             }
             writer.Dispose();
             IndexReader reader = DirectoryReader.Open(aux);
-            Assert.AreEqual(10, reader.NumDocs());
+            Assert.AreEqual(10, reader.NumDocs);
             reader.Dispose();
 
             writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode_e.APPEND).SetMaxBufferedDocs(4).SetMergePolicy(NewLogMergePolicy(4)));
@@ -424,7 +424,7 @@ namespace Lucene.Net.Index
                 Console.WriteLine("\nTEST: now addIndexes");
             }
             writer.AddIndexes(aux, new MockDirectoryWrapper(Random(), new RAMDirectory(aux, NewIOContext(Random()))));
-            Assert.AreEqual(1020, writer.MaxDoc());
+            Assert.AreEqual(1020, writer.MaxDoc);
             Assert.AreEqual(1000, writer.GetDocCount(0));
             writer.Dispose();
             dir.Dispose();
@@ -445,7 +445,7 @@ namespace Lucene.Net.Index
 
             IndexWriter writer = NewWriter(aux2, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode_e.CREATE).SetMaxBufferedDocs(100).SetMergePolicy(NewLogMergePolicy(10)));
             writer.AddIndexes(aux);
-            Assert.AreEqual(30, writer.MaxDoc());
+            Assert.AreEqual(30, writer.MaxDoc);
             Assert.AreEqual(3, writer.SegmentCount);
             writer.Dispose();
 
@@ -457,7 +457,7 @@ namespace Lucene.Net.Index
             }
             writer.Dispose();
             IndexReader reader = DirectoryReader.Open(aux);
-            Assert.AreEqual(3, reader.NumDocs());
+            Assert.AreEqual(3, reader.NumDocs);
             reader.Dispose();
 
             dontMergeConfig = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))).SetMergePolicy(NoMergePolicy.COMPOUND_FILES);
@@ -468,13 +468,13 @@ namespace Lucene.Net.Index
             }
             writer.Dispose();
             reader = DirectoryReader.Open(aux2);
-            Assert.AreEqual(22, reader.NumDocs());
+            Assert.AreEqual(22, reader.NumDocs);
             reader.Dispose();
 
             writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode_e.APPEND).SetMaxBufferedDocs(6).SetMergePolicy(NewLogMergePolicy(4)));
 
             writer.AddIndexes(aux, aux2);
-            Assert.AreEqual(1040, writer.MaxDoc());
+            Assert.AreEqual(1040, writer.MaxDoc);
             Assert.AreEqual(1000, writer.GetDocCount(0));
             writer.Dispose();
             dir.Dispose();
@@ -512,8 +512,8 @@ namespace Lucene.Net.Index
         private void VerifyNumDocs(Directory dir, int numDocs)
         {
             IndexReader reader = DirectoryReader.Open(dir);
-            Assert.AreEqual(numDocs, reader.MaxDoc());
-            Assert.AreEqual(numDocs, reader.NumDocs());
+            Assert.AreEqual(numDocs, reader.MaxDoc);
+            Assert.AreEqual(numDocs, reader.NumDocs);
             reader.Dispose();
         }
 
@@ -549,7 +549,7 @@ namespace Lucene.Net.Index
             {
                 AddDocs(writer, 1000);
             }
-            Assert.AreEqual(1000, writer.MaxDoc());
+            Assert.AreEqual(1000, writer.MaxDoc);
             Assert.AreEqual(1, writer.SegmentCount);
             writer.Dispose();
 
@@ -568,7 +568,7 @@ namespace Lucene.Net.Index
                 writer.Dispose();
                 writer = NewWriter(aux, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode_e.APPEND).SetMaxBufferedDocs(1000).SetMergePolicy(NewLogMergePolicy(false, 10)));
             }
-            Assert.AreEqual(30, writer.MaxDoc());
+            Assert.AreEqual(30, writer.MaxDoc);
             Assert.AreEqual(3, writer.SegmentCount);
             writer.Dispose();
         }
@@ -840,7 +840,7 @@ namespace Lucene.Net.Index
             Assert.IsTrue(c.Failures.Count == 0, "found unexpected failures: " + c.Failures);
 
             IndexReader reader = DirectoryReader.Open(c.Dir2);
-            Assert.AreEqual(expectedNumDocs, reader.NumDocs());
+            Assert.AreEqual(expectedNumDocs, reader.NumDocs);
             reader.Dispose();
 
             c.CloseDir();
@@ -1096,7 +1096,7 @@ namespace Lucene.Net.Index
             writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode_e.CREATE).SetCodec(codec));
             // add 100 documents
             AddDocsWithID(writer, 100, 0);
-            Assert.AreEqual(100, writer.MaxDoc());
+            Assert.AreEqual(100, writer.MaxDoc);
             writer.Commit();
             writer.Dispose();
             TestUtil.CheckIndex(dir);
@@ -1104,22 +1104,22 @@ namespace Lucene.Net.Index
             writer = NewWriter(aux, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode_e.CREATE).SetCodec(codec).SetMaxBufferedDocs(10).SetMergePolicy(NewLogMergePolicy(false)));
             // add 40 documents in separate files
             AddDocs(writer, 40);
-            Assert.AreEqual(40, writer.MaxDoc());
+            Assert.AreEqual(40, writer.MaxDoc);
             writer.Commit();
             writer.Dispose();
 
             writer = NewWriter(aux2, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode_e.CREATE).SetCodec(codec));
             // add 40 documents in compound files
             AddDocs2(writer, 50);
-            Assert.AreEqual(50, writer.MaxDoc());
+            Assert.AreEqual(50, writer.MaxDoc);
             writer.Commit();
             writer.Dispose();
 
             // test doc count before segments are merged
             writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode_e.APPEND).SetCodec(codec));
-            Assert.AreEqual(100, writer.MaxDoc());
+            Assert.AreEqual(100, writer.MaxDoc);
             writer.AddIndexes(aux, aux2);
-            Assert.AreEqual(190, writer.MaxDoc());
+            Assert.AreEqual(190, writer.MaxDoc);
             writer.Dispose();
 
             dir.Dispose();
@@ -1243,7 +1243,7 @@ namespace Lucene.Net.Index
                 }
                 w.Dispose();
                 IndexReader open = DirectoryReader.Open(dir);
-                Assert.AreEqual(0, open.NumDocs());
+                Assert.AreEqual(0, open.NumDocs);
                 open.Dispose();
                 dir.Dispose();
             }*/
@@ -1292,7 +1292,7 @@ namespace Lucene.Net.Index
 
             IndexReader r3 = w.Reader;
             w.Dispose();
-            Assert.AreEqual(2, r3.NumDocs());
+            Assert.AreEqual(2, r3.NumDocs);
             for (int docID = 0; docID < 2; docID++)
             {
                 Document d = r3.Document(docID);
@@ -1318,9 +1318,9 @@ namespace Lucene.Net.Index
             w.AddIndexes(empty);
             w.Dispose();
             DirectoryReader dr = DirectoryReader.Open(d1);
-            foreach (AtomicReaderContext ctx in dr.Leaves())
+            foreach (AtomicReaderContext ctx in dr.Leaves)
             {
-                Assert.IsTrue(ctx.Reader().MaxDoc() > 0, "empty segments should be dropped by addIndexes");
+                Assert.IsTrue(ctx.Reader.MaxDoc > 0, "empty segments should be dropped by addIndexes");
             }
             dr.Dispose();
             d1.Dispose();
@@ -1336,16 +1336,16 @@ namespace Lucene.Net.Index
             Directory src = NewDirectory(), dest = NewDirectory();
             RandomIndexWriter w = new RandomIndexWriter(Random(), src);
             w.AddDocument(new Document());
-            IndexReader allDeletedReader = new AllDeletedFilterReader((AtomicReader)w.Reader.Leaves()[0].Reader());
+            IndexReader allDeletedReader = new AllDeletedFilterReader((AtomicReader)w.Reader.Leaves[0].Reader);
             w.Dispose();
 
             w = new RandomIndexWriter(Random(), dest);
             w.AddIndexes(allDeletedReader);
             w.Dispose();
             DirectoryReader dr = DirectoryReader.Open(src);
-            foreach (AtomicReaderContext ctx in dr.Leaves())
+            foreach (AtomicReaderContext ctx in dr.Leaves)
             {
-                Assert.IsTrue(ctx.Reader().MaxDoc() > 0, "empty segments should be dropped by addIndexes");
+                Assert.IsTrue(ctx.Reader.MaxDoc > 0, "empty segments should be dropped by addIndexes");
             }
             dr.Dispose();
             allDeletedReader.Dispose();

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e2f44e8e/src/Lucene.Net.Tests/core/Index/TestAtomicUpdate.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/core/Index/TestAtomicUpdate.cs b/src/Lucene.Net.Tests/core/Index/TestAtomicUpdate.cs
index d662e56..41b63b2 100644
--- a/src/Lucene.Net.Tests/core/Index/TestAtomicUpdate.cs
+++ b/src/Lucene.Net.Tests/core/Index/TestAtomicUpdate.cs
@@ -121,7 +121,7 @@ namespace Lucene.Net.Index
             public override void DoWork()
             {
                 IndexReader r = DirectoryReader.Open(Directory);
-                Assert.AreEqual(100, r.NumDocs());
+                Assert.AreEqual(100, r.NumDocs);
                 r.Dispose();
             }
         }
@@ -154,7 +154,7 @@ namespace Lucene.Net.Index
             writer.Commit();
 
             IndexReader r = DirectoryReader.Open(directory);
-            Assert.AreEqual(100, r.NumDocs());
+            Assert.AreEqual(100, r.NumDocs);
             r.Dispose();
 
             IndexerThread indexerThread = new IndexerThread(writer, threads);

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e2f44e8e/src/Lucene.Net.Tests/core/Index/TestBagOfPositions.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/core/Index/TestBagOfPositions.cs b/src/Lucene.Net.Tests/core/Index/TestBagOfPositions.cs
index 3579f5a..e7cf985 100644
--- a/src/Lucene.Net.Tests/core/Index/TestBagOfPositions.cs
+++ b/src/Lucene.Net.Tests/core/Index/TestBagOfPositions.cs
@@ -128,8 +128,8 @@ namespace Lucene.Net.Index
 
             iw.ForceMerge(1);
             DirectoryReader ir = iw.Reader;
-            Assert.AreEqual(1, ir.Leaves().Count);
-            AtomicReader air = (AtomicReader)ir.Leaves()[0].Reader();
+            Assert.AreEqual(1, ir.Leaves.Count);
+            AtomicReader air = (AtomicReader)ir.Leaves[0].Reader;
             Terms terms = air.Terms("field");
             // numTerms-1 because there cannot be a term 0 with 0 postings:
             Assert.AreEqual(numTerms - 1, terms.Size());

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e2f44e8e/src/Lucene.Net.Tests/core/Index/TestBagOfPostings.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/core/Index/TestBagOfPostings.cs b/src/Lucene.Net.Tests/core/Index/TestBagOfPostings.cs
index b1a0c67..38d6886 100644
--- a/src/Lucene.Net.Tests/core/Index/TestBagOfPostings.cs
+++ b/src/Lucene.Net.Tests/core/Index/TestBagOfPostings.cs
@@ -104,11 +104,11 @@ namespace Lucene.Net.Index
 
             iw.ForceMerge(1);
             DirectoryReader ir = iw.Reader;
-            Assert.AreEqual(1, ir.Leaves().Count);
-            AtomicReader air = (AtomicReader)ir.Leaves()[0].Reader();
+            Assert.AreEqual(1, ir.Leaves.Count);
+            AtomicReader air = (AtomicReader)ir.Leaves[0].Reader;
             Terms terms = air.Terms("field");
             // numTerms-1 because there cannot be a term 0 with 0 postings:
-            Assert.AreEqual(numTerms - 1, air.Fields().UniqueTermCount);
+            Assert.AreEqual(numTerms - 1, air.Fields.UniqueTermCount);
             if (iwc.Codec is Lucene3xCodec == false)
             {
                 Assert.AreEqual(numTerms - 1, terms.Size());

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e2f44e8e/src/Lucene.Net.Tests/core/Index/TestBinaryDocValuesUpdates.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/core/Index/TestBinaryDocValuesUpdates.cs b/src/Lucene.Net.Tests/core/Index/TestBinaryDocValuesUpdates.cs
index 4af1950..9cc5f21 100644
--- a/src/Lucene.Net.Tests/core/Index/TestBinaryDocValuesUpdates.cs
+++ b/src/Lucene.Net.Tests/core/Index/TestBinaryDocValuesUpdates.cs
@@ -143,8 +143,8 @@ namespace Lucene.Net.Index
                 writer.Dispose();
             }
 
-            Assert.AreEqual(1, reader.Leaves().Count);
-            AtomicReader r = (AtomicReader)reader.Leaves()[0].Reader();
+            Assert.AreEqual(1, reader.Leaves.Count);
+            AtomicReader r = (AtomicReader)reader.Leaves[0].Reader;
             BinaryDocValues bdv = r.GetBinaryDocValues("val");
             BytesRef scratch = new BytesRef();
             Assert.AreEqual(2, GetValue(bdv, 0, scratch));
@@ -195,12 +195,12 @@ namespace Lucene.Net.Index
             }
 
             BytesRef scratch = new BytesRef();
-            foreach (AtomicReaderContext context in reader.Leaves())
+            foreach (AtomicReaderContext context in reader.Leaves)
             {
-                AtomicReader r = (AtomicReader)context.Reader();
+                AtomicReader r = context.AtomicReader;
                 BinaryDocValues bdv = r.GetBinaryDocValues("val");
                 Assert.IsNotNull(bdv);
-                for (int i = 0; i < r.MaxDoc(); i++)
+                for (int i = 0; i < r.MaxDoc; i++)
                 {
                     long expected = expectedValues[i + context.DocBase];
                     long actual = GetValue(bdv, i, scratch);
@@ -246,8 +246,8 @@ namespace Lucene.Net.Index
             Assert.IsTrue(reader1 != reader2);
 
             BytesRef scratch = new BytesRef();
-            BinaryDocValues bdv1 = ((AtomicReader)reader1.Leaves()[0].Reader()).GetBinaryDocValues("val");
-            BinaryDocValues bdv2 = ((AtomicReader)reader2.Leaves()[0].Reader()).GetBinaryDocValues("val");
+            BinaryDocValues bdv1 = ((AtomicReader)reader1.Leaves[0].Reader).GetBinaryDocValues("val");
+            BinaryDocValues bdv2 = ((AtomicReader)reader2.Leaves[0].Reader).GetBinaryDocValues("val");
             Assert.AreEqual(1, GetValue(bdv1, 0, scratch));
             Assert.AreEqual(10, GetValue(bdv2, 0, scratch));
 
@@ -346,7 +346,7 @@ namespace Lucene.Net.Index
                 writer.Dispose();
             }
 
-            AtomicReader r = (AtomicReader)reader.Leaves()[0].Reader();
+            AtomicReader r = (AtomicReader)reader.Leaves[0].Reader;
             Assert.IsFalse(r.LiveDocs.Get(0));
             Assert.AreEqual(17, GetValue(r.GetBinaryDocValues("val"), 1, new BytesRef()));
 
@@ -386,7 +386,7 @@ namespace Lucene.Net.Index
                 writer.Dispose();
             }
 
-            AtomicReader r = (AtomicReader)reader.Leaves()[0].Reader();
+            AtomicReader r = (AtomicReader)reader.Leaves[0].Reader;
             Assert.IsFalse(r.LiveDocs.Get(0));
             Assert.AreEqual(1, GetValue(r.GetBinaryDocValues("val"), 0, new BytesRef())); // deletes are currently applied first
 
@@ -420,13 +420,13 @@ namespace Lucene.Net.Index
             writer.Dispose();
 
             DirectoryReader reader = DirectoryReader.Open(dir);
-            AtomicReader r = (AtomicReader)reader.Leaves()[0].Reader();
+            AtomicReader r = (AtomicReader)reader.Leaves[0].Reader;
             NumericDocValues ndv = r.GetNumericDocValues("ndv");
             BinaryDocValues bdv = r.GetBinaryDocValues("bdv");
             SortedDocValues sdv = r.GetSortedDocValues("sdv");
             SortedSetDocValues ssdv = r.GetSortedSetDocValues("ssdv");
             BytesRef scratch = new BytesRef();
-            for (int i = 0; i < r.MaxDoc(); i++)
+            for (int i = 0; i < r.MaxDoc; i++)
             {
                 Assert.AreEqual(i, ndv.Get(i));
                 Assert.AreEqual(17, GetValue(bdv, i, scratch));
@@ -472,12 +472,12 @@ namespace Lucene.Net.Index
             writer.Dispose();
 
             DirectoryReader reader = DirectoryReader.Open(dir);
-            AtomicReader r = (AtomicReader)reader.Leaves()[0].Reader();
+            AtomicReader r = (AtomicReader)reader.Leaves[0].Reader;
 
             BinaryDocValues bdv1 = r.GetBinaryDocValues("bdv1");
             BinaryDocValues bdv2 = r.GetBinaryDocValues("bdv2");
             BytesRef scratch = new BytesRef();
-            for (int i = 0; i < r.MaxDoc(); i++)
+            for (int i = 0; i < r.MaxDoc; i++)
             {
                 Assert.AreEqual(17, GetValue(bdv1, i, scratch));
                 Assert.AreEqual(i, GetValue(bdv2, i, scratch));
@@ -511,10 +511,10 @@ namespace Lucene.Net.Index
             writer.Dispose();
 
             DirectoryReader reader = DirectoryReader.Open(dir);
-            AtomicReader r = (AtomicReader)reader.Leaves()[0].Reader();
+            AtomicReader r = (AtomicReader)reader.Leaves[0].Reader;
             BinaryDocValues bdv = r.GetBinaryDocValues("bdv");
             BytesRef scratch = new BytesRef();
-            for (int i = 0; i < r.MaxDoc(); i++)
+            for (int i = 0; i < r.MaxDoc; i++)
             {
                 Assert.AreEqual(17, GetValue(bdv, i, scratch));
             }
@@ -545,10 +545,10 @@ namespace Lucene.Net.Index
             writer.Dispose();
 
             DirectoryReader reader = DirectoryReader.Open(dir);
-            AtomicReader r = (AtomicReader)reader.Leaves()[0].Reader();
+            AtomicReader r = (AtomicReader)reader.Leaves[0].Reader;
             BinaryDocValues bdv = r.GetBinaryDocValues("bdv");
             BytesRef scratch = new BytesRef();
-            for (int i = 0; i < r.MaxDoc(); i++)
+            for (int i = 0; i < r.MaxDoc; i++)
             {
                 if (i == 0)
                 {
@@ -591,10 +591,10 @@ namespace Lucene.Net.Index
             writer.Dispose();
 
             DirectoryReader reader = DirectoryReader.Open(dir);
-            AtomicReader r = (AtomicReader)reader.Leaves()[0].Reader();
+            AtomicReader r = (AtomicReader)reader.Leaves[0].Reader;
             BinaryDocValues bdv = r.GetBinaryDocValues("bdv");
             BytesRef scratch = new BytesRef();
-            for (int i = 0; i < r.MaxDoc(); i++)
+            for (int i = 0; i < r.MaxDoc; i++)
             {
                 bdv.Get(i, scratch);
                 Assert.AreEqual(0, scratch.Length);
@@ -673,7 +673,7 @@ namespace Lucene.Net.Index
             BinaryDocValues bdv = r.GetBinaryDocValues("bdv");
             SortedDocValues sdv = r.GetSortedDocValues("sorted");
             BytesRef scratch = new BytesRef();
-            for (int i = 0; i < r.MaxDoc(); i++)
+            for (int i = 0; i < r.MaxDoc; i++)
             {
                 Assert.AreEqual(17, GetValue(bdv, i, scratch));
                 sdv.Get(i, scratch);
@@ -721,7 +721,7 @@ namespace Lucene.Net.Index
             AtomicReader r = SlowCompositeReaderWrapper.Wrap(reader);
             BinaryDocValues bdv = r.GetBinaryDocValues("bdv");
             BytesRef scratch = new BytesRef();
-            for (int i = 0; i < r.MaxDoc(); i++)
+            for (int i = 0; i < r.MaxDoc; i++)
             {
                 Assert.AreEqual(3, GetValue(bdv, i, scratch));
             }
@@ -795,13 +795,13 @@ namespace Lucene.Net.Index
                     reader = DirectoryReader.Open(writer, true);
                 }
 
-                Assert.AreEqual(1, reader.Leaves().Count);
-                AtomicReader r = (AtomicReader)reader.Leaves()[0].Reader();
+                Assert.AreEqual(1, reader.Leaves.Count);
+                AtomicReader r = (AtomicReader)reader.Leaves[0].Reader;
                 Assert.IsNull(r.LiveDocs, "index should have no deletes after forceMerge");
                 BinaryDocValues bdv = r.GetBinaryDocValues("bdv");
                 Assert.IsNotNull(bdv);
                 BytesRef scratch = new BytesRef();
-                for (int i = 0; i < r.MaxDoc(); i++)
+                for (int i = 0; i < r.MaxDoc; i++)
                 {
                     Assert.AreEqual(value, GetValue(bdv, i, scratch));
                 }
@@ -836,7 +836,7 @@ namespace Lucene.Net.Index
             AtomicReader r = SlowCompositeReaderWrapper.Wrap(reader);
             BinaryDocValues bdv = r.GetBinaryDocValues("bdv");
             BytesRef scratch = new BytesRef();
-            for (int i = 0; i < r.MaxDoc(); i++)
+            for (int i = 0; i < r.MaxDoc; i++)
             {
                 Assert.AreEqual(3, GetValue(bdv, i, scratch));
             }
@@ -939,11 +939,11 @@ namespace Lucene.Net.Index
                 reader.Dispose();
                 reader = newReader;
                 //      System.out.println("[" + Thread.currentThread().getName() + "]: reopened reader: " + reader);
-                Assert.IsTrue(reader.NumDocs() > 0); // we delete at most one document per round
+                Assert.IsTrue(reader.NumDocs > 0); // we delete at most one document per round
                 BytesRef scratch = new BytesRef();
-                foreach (AtomicReaderContext context in reader.Leaves())
+                foreach (AtomicReaderContext context in reader.Leaves)
                 {
-                    AtomicReader r = (AtomicReader)context.Reader();
+                    AtomicReader r = context.AtomicReader;
                     //        System.out.println(((SegmentReader) r).getSegmentName());
                     Bits liveDocs = r.LiveDocs;
                     for (int field = 0; field < fieldValues.Length; field++)
@@ -952,7 +952,7 @@ namespace Lucene.Net.Index
                         BinaryDocValues bdv = r.GetBinaryDocValues(f);
                         Bits docsWithField = r.GetDocsWithField(f);
                         Assert.IsNotNull(bdv);
-                        int maxDoc = r.MaxDoc();
+                        int maxDoc = r.MaxDoc;
                         for (int doc = 0; doc < maxDoc; doc++)
                         {
                             if (liveDocs == null || liveDocs.Get(doc))
@@ -1018,9 +1018,9 @@ namespace Lucene.Net.Index
 
             DirectoryReader reader = DirectoryReader.Open(dir);
             BytesRef scratch = new BytesRef();
-            foreach (AtomicReaderContext context in reader.Leaves())
+            foreach (AtomicReaderContext context in reader.Leaves)
             {
-                AtomicReader r = (AtomicReader)context.Reader();
+                AtomicReader r = context.AtomicReader;
                 BinaryDocValues bdv = r.GetBinaryDocValues("bdv");
                 Bits docsWithField = r.GetDocsWithField("bdv");
                 Assert.IsNotNull(docsWithField);
@@ -1066,11 +1066,11 @@ namespace Lucene.Net.Index
 
             DirectoryReader reader = DirectoryReader.Open(dir);
             BytesRef scratch = new BytesRef();
-            foreach (AtomicReaderContext context in reader.Leaves())
+            foreach (AtomicReaderContext context in reader.Leaves)
             {
-                AtomicReader r = (AtomicReader)context.Reader();
+                AtomicReader r = context.AtomicReader;
                 BinaryDocValues bdv = r.GetBinaryDocValues("bdv");
-                for (int i = 0; i < r.MaxDoc(); i++)
+                for (int i = 0; i < r.MaxDoc; i++)
                 {
                     Assert.AreEqual(5L, GetValue(bdv, i, scratch));
                 }
@@ -1098,7 +1098,7 @@ namespace Lucene.Net.Index
             writer.Dispose();
 
             DirectoryReader r = DirectoryReader.Open(dir);
-            BinaryDocValues bdv = ((AtomicReader)r.Leaves()[0].Reader()).GetBinaryDocValues("f");
+            BinaryDocValues bdv = ((AtomicReader)r.Leaves[0].Reader).GetBinaryDocValues("f");
             Assert.AreEqual(17, GetValue(bdv, 0, new BytesRef()));
             r.Dispose();
 
@@ -1207,9 +1207,9 @@ namespace Lucene.Net.Index
 
             DirectoryReader reader = DirectoryReader.Open(dir);
             BytesRef scratch = new BytesRef();
-            foreach (AtomicReaderContext context in reader.Leaves())
+            foreach (AtomicReaderContext context in reader.Leaves)
             {
-                AtomicReader r = (AtomicReader)context.Reader();
+                AtomicReader r = context.AtomicReader;
                 for (int i = 0; i < numThreads; i++)
                 {
                     BinaryDocValues bdv = r.GetBinaryDocValues("f" + i);
@@ -1217,7 +1217,7 @@ namespace Lucene.Net.Index
                     Bits docsWithBdv = r.GetDocsWithField("f" + i);
                     Bits docsWithControl = r.GetDocsWithField("cf" + i);
                     Bits liveDocs = r.LiveDocs;
-                    for (int j = 0; j < r.MaxDoc(); j++)
+                    for (int j = 0; j < r.MaxDoc; j++)
                     {
                         if (liveDocs == null || liveDocs.Get(j))
                         {
@@ -1388,12 +1388,12 @@ namespace Lucene.Net.Index
                 writer.UpdateBinaryDocValue(t, "f", ToBytes(value));
                 writer.UpdateBinaryDocValue(t, "cf", ToBytes(value * 2));
                 DirectoryReader reader = DirectoryReader.Open(writer, true);
-                foreach (AtomicReaderContext context in reader.Leaves())
+                foreach (AtomicReaderContext context in reader.Leaves)
                 {
-                    AtomicReader r = (AtomicReader)context.Reader();
+                    AtomicReader r = context.AtomicReader;
                     BinaryDocValues fbdv = r.GetBinaryDocValues("f");
                     BinaryDocValues cfbdv = r.GetBinaryDocValues("cf");
-                    for (int j = 0; j < r.MaxDoc(); j++)
+                    for (int j = 0; j < r.MaxDoc; j++)
                     {
                         Assert.AreEqual(GetValue(cfbdv, j, scratch), GetValue(fbdv, j, scratch) * 2);
                     }
@@ -1527,12 +1527,12 @@ namespace Lucene.Net.Index
 
             DirectoryReader reader_ = DirectoryReader.Open(dir2);
             BytesRef scratch = new BytesRef();
-            foreach (AtomicReaderContext context in reader_.Leaves())
+            foreach (AtomicReaderContext context in reader_.Leaves)
             {
-                AtomicReader r = (AtomicReader)context.Reader();
+                AtomicReader r = context.AtomicReader;
                 BinaryDocValues bdv = r.GetBinaryDocValues("bdv");
                 BinaryDocValues control = r.GetBinaryDocValues("control");
-                for (int i = 0; i < r.MaxDoc(); i++)
+                for (int i = 0; i < r.MaxDoc; i++)
                 {
                     Assert.AreEqual(GetValue(bdv, i, scratch) * 2, GetValue(control, i, scratch));
                 }
@@ -1561,7 +1561,7 @@ namespace Lucene.Net.Index
 
             DirectoryReader r = DirectoryReader.Open(dir);
             BytesRef scratch = new BytesRef();
-            Assert.AreEqual(2L, GetValue(((AtomicReader)r.Leaves()[0].Reader()).GetBinaryDocValues("f"), 0, scratch));
+            Assert.AreEqual(2L, GetValue(((AtomicReader)r.Leaves[0].Reader).GetBinaryDocValues("f"), 0, scratch));
             r.Dispose();
 
             // create second gen of update files, first gen should be deleted
@@ -1570,7 +1570,7 @@ namespace Lucene.Net.Index
             Assert.AreEqual(numFiles, dir.ListAll().Length);
 
             r = DirectoryReader.Open(dir);
-            Assert.AreEqual(5L, GetValue(((AtomicReader)r.Leaves()[0].Reader()).GetBinaryDocValues("f"), 0, scratch));
+            Assert.AreEqual(5L, GetValue(((AtomicReader)r.Leaves[0].Reader).GetBinaryDocValues("f"), 0, scratch));
             r.Dispose();
 
             writer.Dispose();
@@ -1638,14 +1638,14 @@ namespace Lucene.Net.Index
 
             DirectoryReader reader = DirectoryReader.Open(dir);
             BytesRef scratch = new BytesRef();
-            foreach (AtomicReaderContext context in reader.Leaves())
+            foreach (AtomicReaderContext context in reader.Leaves)
             {
                 for (int i = 0; i < numBinaryFields; i++)
                 {
-                    AtomicReader r = (AtomicReader)context.Reader();
+                    AtomicReader r = context.AtomicReader;
                     BinaryDocValues f = r.GetBinaryDocValues("f" + i);
                     BinaryDocValues cf = r.GetBinaryDocValues("cf" + i);
-                    for (int j = 0; j < r.MaxDoc(); j++)
+                    for (int j = 0; j < r.MaxDoc; j++)
                     {
                         Assert.AreEqual(GetValue(cf, j, scratch), GetValue(f, j, scratch) * 2, "reader=" + r + ", field=f" + i + ", doc=" + j);
                     }
@@ -1678,8 +1678,8 @@ namespace Lucene.Net.Index
 
             DirectoryReader reader = DirectoryReader.Open(dir);
             BytesRef scratch = new BytesRef();
-            Assert.AreEqual(4, GetValue(((AtomicReader)reader.Leaves()[0].Reader()).GetBinaryDocValues("f1"), 0, scratch));
-            Assert.AreEqual(3, GetValue(((AtomicReader)reader.Leaves()[0].Reader()).GetBinaryDocValues("f2"), 0, scratch));
+            Assert.AreEqual(4, GetValue(((AtomicReader)reader.Leaves[0].Reader).GetBinaryDocValues("f1"), 0, scratch));
+            Assert.AreEqual(3, GetValue(((AtomicReader)reader.Leaves[0].Reader).GetBinaryDocValues("f2"), 0, scratch));
             reader.Dispose();
 
             dir.Dispose();
@@ -1704,8 +1704,8 @@ namespace Lucene.Net.Index
             writer.Dispose();
 
             DirectoryReader reader = DirectoryReader.Open(dir);
-            Assert.AreEqual(1, reader.Leaves().Count);
-            Assert.AreEqual(2L, GetValue(((AtomicReader)reader.Leaves()[0].Reader()).GetBinaryDocValues("f1"), 0, new BytesRef()));
+            Assert.AreEqual(1, reader.Leaves.Count);
+            Assert.AreEqual(2L, GetValue(((AtomicReader)reader.Leaves[0].Reader).GetBinaryDocValues("f1"), 0, new BytesRef()));
             reader.Dispose();
 
             dir.Dispose();
@@ -1728,8 +1728,8 @@ namespace Lucene.Net.Index
             writer.Dispose();
 
             DirectoryReader reader = DirectoryReader.Open(dir);
-            Assert.AreEqual(1, reader.Leaves().Count);
-            Assert.AreEqual(1L, GetValue(((AtomicReader)reader.Leaves()[0].Reader()).GetBinaryDocValues("f1"), 0, new BytesRef()));
+            Assert.AreEqual(1, reader.Leaves.Count);
+            Assert.AreEqual(1L, GetValue(((AtomicReader)reader.Leaves[0].Reader).GetBinaryDocValues("f1"), 0, new BytesRef()));
             reader.Dispose();
 
             dir.Dispose();

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e2f44e8e/src/Lucene.Net.Tests/core/Index/TestCodecHoldsOpenFiles.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/core/Index/TestCodecHoldsOpenFiles.cs b/src/Lucene.Net.Tests/core/Index/TestCodecHoldsOpenFiles.cs
index 5e2446c..db7e926 100644
--- a/src/Lucene.Net.Tests/core/Index/TestCodecHoldsOpenFiles.cs
+++ b/src/Lucene.Net.Tests/core/Index/TestCodecHoldsOpenFiles.cs
@@ -60,9 +60,9 @@ namespace Lucene.Net.Index
                 }
             }
 
-            foreach (AtomicReaderContext cxt in r.Leaves())
+            foreach (AtomicReaderContext cxt in r.Leaves)
             {
-                TestUtil.CheckReader(cxt.Reader());
+                TestUtil.CheckReader(cxt.Reader);
             }
 
             r.Dispose();
@@ -99,9 +99,9 @@ namespace Lucene.Net.Index
                 }
             }
 
-            foreach (AtomicReaderContext cxt in r.Leaves())
+            foreach (AtomicReaderContext cxt in r.Leaves)
             {
-                TestUtil.CheckReader(cxt.Reader());
+                TestUtil.CheckReader(cxt.Reader);
             }
 
             r.Dispose();

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e2f44e8e/src/Lucene.Net.Tests/core/Index/TestCodecs.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/core/Index/TestCodecs.cs b/src/Lucene.Net.Tests/core/Index/TestCodecs.cs
index 6c0804e..c54c2d3 100644
--- a/src/Lucene.Net.Tests/core/Index/TestCodecs.cs
+++ b/src/Lucene.Net.Tests/core/Index/TestCodecs.cs
@@ -874,9 +874,9 @@ namespace Lucene.Net.Index
 
             Term term = new Term("f", new BytesRef("doc"));
             DirectoryReader reader = DirectoryReader.Open(dir);
-            foreach (AtomicReaderContext ctx in reader.Leaves())
+            foreach (AtomicReaderContext ctx in reader.Leaves)
             {
-                DocsEnum de = ((AtomicReader)ctx.Reader()).TermDocsEnum(term);
+                DocsEnum de = ((AtomicReader)ctx.Reader).TermDocsEnum(term);
                 while (de.NextDoc() != DocIdSetIterator.NO_MORE_DOCS)
                 {
                     Assert.AreEqual(1, de.Freq(), "wrong freq for doc " + de.DocID());

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e2f44e8e/src/Lucene.Net.Tests/core/Index/TestConcurrentMergeScheduler.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/core/Index/TestConcurrentMergeScheduler.cs b/src/Lucene.Net.Tests/core/Index/TestConcurrentMergeScheduler.cs
index 13c7370..dc9d644 100644
--- a/src/Lucene.Net.Tests/core/Index/TestConcurrentMergeScheduler.cs
+++ b/src/Lucene.Net.Tests/core/Index/TestConcurrentMergeScheduler.cs
@@ -159,7 +159,7 @@ namespace Lucene.Net.Index
 
             writer.Dispose();
             IndexReader reader = DirectoryReader.Open(directory);
-            Assert.AreEqual(200 + extraCount, reader.NumDocs());
+            Assert.AreEqual(200 + extraCount, reader.NumDocs);
             reader.Dispose();
             directory.Dispose();
         }
@@ -210,7 +210,7 @@ namespace Lucene.Net.Index
             writer.Dispose();
             IndexReader reader = DirectoryReader.Open(directory);
             // Verify that we did not lose any deletes...
-            Assert.AreEqual(450, reader.NumDocs());
+            Assert.AreEqual(450, reader.NumDocs);
             reader.Dispose();
             directory.Dispose();
         }
@@ -281,7 +281,7 @@ namespace Lucene.Net.Index
                 writer.Dispose(false);
 
                 IndexReader reader = DirectoryReader.Open(directory);
-                Assert.AreEqual((1 + iter) * 182, reader.NumDocs());
+                Assert.AreEqual((1 + iter) * 182, reader.NumDocs);
                 reader.Dispose();
 
                 // Reopen

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e2f44e8e/src/Lucene.Net.Tests/core/Index/TestCrash.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/core/Index/TestCrash.cs b/src/Lucene.Net.Tests/core/Index/TestCrash.cs
index d6e8456..5447e03 100644
--- a/src/Lucene.Net.Tests/core/Index/TestCrash.cs
+++ b/src/Lucene.Net.Tests/core/Index/TestCrash.cs
@@ -86,7 +86,7 @@ namespace Lucene.Net.Index
             Crash(writer);
 
             IndexReader reader = DirectoryReader.Open(dir);
-            Assert.IsTrue(reader.NumDocs() < 157);
+            Assert.IsTrue(reader.NumDocs < 157);
             reader.Dispose();
 
             // Make a new dir, copying from the crashed dir, and
@@ -121,7 +121,7 @@ namespace Lucene.Net.Index
             writer.Dispose();
 
             IndexReader reader = DirectoryReader.Open(dir);
-            Assert.IsTrue(reader.NumDocs() < 314);
+            Assert.IsTrue(reader.NumDocs < 314);
             reader.Dispose();
 
             // Make a new dir, copying from the crashed dir, and
@@ -146,7 +146,7 @@ namespace Lucene.Net.Index
 
             writer.Dispose();
             writer = InitIndex(Random(), dir, false);
-            Assert.AreEqual(314, writer.MaxDoc());
+            Assert.AreEqual(314, writer.MaxDoc);
             Crash(writer);
 
             /*
@@ -159,7 +159,7 @@ namespace Lucene.Net.Index
             */
 
             IndexReader reader = DirectoryReader.Open(dir);
-            Assert.IsTrue(reader.NumDocs() >= 157);
+            Assert.IsTrue(reader.NumDocs >= 157);
             reader.Dispose();
 
             // Make a new dir, copying from the crashed dir, and
@@ -189,7 +189,7 @@ namespace Lucene.Net.Index
             */
 
             IndexReader reader = DirectoryReader.Open(dir);
-            Assert.AreEqual(157, reader.NumDocs());
+            Assert.AreEqual(157, reader.NumDocs);
             reader.Dispose();
             dir.Dispose();
         }
@@ -211,7 +211,7 @@ namespace Lucene.Net.Index
               System.out.println("file " + i + " = " + l[i] + " " + dir.FileLength(l[i]) + " bytes");
             */
             IndexReader reader = DirectoryReader.Open(dir);
-            Assert.AreEqual(157, reader.NumDocs());
+            Assert.AreEqual(157, reader.NumDocs);
             reader.Dispose();
             dir.Dispose();
         }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e2f44e8e/src/Lucene.Net.Tests/core/Index/TestCustomNorms.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/core/Index/TestCustomNorms.cs b/src/Lucene.Net.Tests/core/Index/TestCustomNorms.cs
index a723dbb..9cae1e4 100644
--- a/src/Lucene.Net.Tests/core/Index/TestCustomNorms.cs
+++ b/src/Lucene.Net.Tests/core/Index/TestCustomNorms.cs
@@ -76,7 +76,7 @@ namespace Lucene.Net.Index
             AtomicReader open = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir));
             NumericDocValues norms = open.GetNormValues(FloatTestField);
             Assert.IsNotNull(norms);
-            for (int i = 0; i < open.MaxDoc(); i++)
+            for (int i = 0; i < open.MaxDoc; i++)
             {
                 Document document = open.Document(i);
                 float expected = Convert.ToSingle(document.Get(FloatTestField));

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e2f44e8e/src/Lucene.Net.Tests/core/Index/TestDeletionPolicy.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/core/Index/TestDeletionPolicy.cs b/src/Lucene.Net.Tests/core/Index/TestDeletionPolicy.cs
index 9953a99..04ec9c4 100644
--- a/src/Lucene.Net.Tests/core/Index/TestDeletionPolicy.cs
+++ b/src/Lucene.Net.Tests/core/Index/TestDeletionPolicy.cs
@@ -89,7 +89,7 @@ namespace Lucene.Net.Index
             {
                 IndexCommit lastCommit = commits[commits.Count - 1];
                 DirectoryReader r = DirectoryReader.Open(Dir);
-                Assert.AreEqual(r.Leaves().Count, lastCommit.SegmentCount, "lastCommit.segmentCount()=" + lastCommit.SegmentCount + " vs IndexReader.segmentCount=" + r.Leaves().Count);
+                Assert.AreEqual(r.Leaves.Count, lastCommit.SegmentCount, "lastCommit.segmentCount()=" + lastCommit.SegmentCount + " vs IndexReader.segmentCount=" + r.Leaves.Count);
                 r.Dispose();
                 OuterInstance.VerifyCommitOrder(commits);
                 NumOnCommit++;
@@ -377,7 +377,7 @@ namespace Lucene.Net.Index
                 bool needsMerging;
                 {
                     DirectoryReader r = DirectoryReader.Open(dir);
-                    needsMerging = r.Leaves().Count != 1;
+                    needsMerging = r.Leaves.Count != 1;
                     r.Dispose();
                 }
                 if (needsMerging)
@@ -493,8 +493,8 @@ namespace Lucene.Net.Index
 
             DirectoryReader r = DirectoryReader.Open(dir);
             // Still merged, still 11 docs
-            Assert.AreEqual(1, r.Leaves().Count);
-            Assert.AreEqual(11, r.NumDocs());
+            Assert.AreEqual(1, r.Leaves.Count);
+            Assert.AreEqual(11, r.NumDocs);
             r.Dispose();
 
             writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetIndexDeletionPolicy(policy).SetIndexCommit(lastCommit));
@@ -508,8 +508,8 @@ namespace Lucene.Net.Index
             r = DirectoryReader.Open(dir);
             // Not fully merged because we rolled it back, and now only
             // 10 docs
-            Assert.IsTrue(r.Leaves().Count > 1);
-            Assert.AreEqual(10, r.NumDocs());
+            Assert.IsTrue(r.Leaves.Count > 1);
+            Assert.AreEqual(10, r.NumDocs);
             r.Dispose();
 
             // Re-merge
@@ -518,8 +518,8 @@ namespace Lucene.Net.Index
             writer.Dispose();
 
             r = DirectoryReader.Open(dir);
-            Assert.AreEqual(1, r.Leaves().Count);
-            Assert.AreEqual(10, r.NumDocs());
+            Assert.AreEqual(1, r.Leaves.Count);
+            Assert.AreEqual(10, r.NumDocs);
             r.Dispose();
 
             // Now open writer on the commit just before merging,
@@ -530,16 +530,16 @@ namespace Lucene.Net.Index
             // Reader still sees fully merged index, because writer
             // opened on the prior commit has not yet committed:
             r = DirectoryReader.Open(dir);
-            Assert.AreEqual(1, r.Leaves().Count);
-            Assert.AreEqual(10, r.NumDocs());
+            Assert.AreEqual(1, r.Leaves.Count);
+            Assert.AreEqual(10, r.NumDocs);
             r.Dispose();
 
             writer.Dispose();
 
             // Now reader sees not-fully-merged index:
             r = DirectoryReader.Open(dir);
-            Assert.IsTrue(r.Leaves().Count > 1);
-            Assert.AreEqual(10, r.NumDocs());
+            Assert.IsTrue(r.Leaves.Count > 1);
+            Assert.AreEqual(10, r.NumDocs);
             r.Dispose();
 
             dir.Dispose();

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e2f44e8e/src/Lucene.Net.Tests/core/Index/TestDirectoryReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/core/Index/TestDirectoryReader.cs b/src/Lucene.Net.Tests/core/Index/TestDirectoryReader.cs
index 6a0a2c0..d3c3a9d 100644
--- a/src/Lucene.Net.Tests/core/Index/TestDirectoryReader.cs
+++ b/src/Lucene.Net.Tests/core/Index/TestDirectoryReader.cs
@@ -384,7 +384,7 @@ namespace Lucene.Net.Index
             writer.AddDocument(doc);
             writer.Dispose();
             DirectoryReader reader = DirectoryReader.Open(dir);
-            Document doc2 = reader.Document(reader.MaxDoc() - 1);
+            Document doc2 = reader.Document(reader.MaxDoc - 1);
             IndexableField[] fields = doc2.GetFields("bin1");
             Assert.IsNotNull(fields);
             Assert.AreEqual(1, fields.Length);
@@ -403,7 +403,7 @@ namespace Lucene.Net.Index
             writer.ForceMerge(1);
             writer.Dispose();
             reader = DirectoryReader.Open(dir);
-            doc2 = reader.Document(reader.MaxDoc() - 1);
+            doc2 = reader.Document(reader.MaxDoc - 1);
             fields = doc2.GetFields("bin1");
             Assert.IsNotNull(fields);
             Assert.AreEqual(1, fields.Length);
@@ -557,10 +557,10 @@ namespace Lucene.Net.Index
         // TODO: maybe this can reuse the logic of test dueling codecs?
         public static void AssertIndexEquals(DirectoryReader index1, DirectoryReader index2)
         {
-            Assert.AreEqual(index1.NumDocs(), index2.NumDocs(), "IndexReaders have different values for numDocs.");
-            Assert.AreEqual(index1.MaxDoc(), index2.MaxDoc(), "IndexReaders have different values for maxDoc.");
-            Assert.AreEqual(index1.HasDeletions(), index2.HasDeletions(), "Only one IndexReader has deletions.");
-            Assert.AreEqual(index1.Leaves().Count == 1, index2.Leaves().Count == 1, "Single segment test differs.");
+            Assert.AreEqual(index1.NumDocs, index2.NumDocs, "IndexReaders have different values for numDocs.");
+            Assert.AreEqual(index1.MaxDoc, index2.MaxDoc, "IndexReaders have different values for maxDoc.");
+            Assert.AreEqual(index1.HasDeletions, index2.HasDeletions, "Only one IndexReader has deletions.");
+            Assert.AreEqual(index1.Leaves.Count == 1, index2.Leaves.Count == 1, "Single segment test differs.");
 
             // check field names
             FieldInfos fieldInfos1 = MultiFields.GetMergedFieldInfos(index1);
@@ -583,7 +583,7 @@ namespace Lucene.Net.Index
                 if (norms1 != null && norms2 != null)
                 {
                     // todo: generalize this (like TestDuelingCodecs assert)
-                    for (int i = 0; i < index1.MaxDoc(); i++)
+                    for (int i = 0; i < index1.MaxDoc; i++)
                     {
                         Assert.AreEqual(norms1.Get(i), norms2.Get(i), "Norm different for doc " + i + " and field '" + curField + "'.");
                     }
@@ -598,13 +598,13 @@ namespace Lucene.Net.Index
             // check deletions
             Bits liveDocs1 = MultiFields.GetLiveDocs(index1);
             Bits liveDocs2 = MultiFields.GetLiveDocs(index2);
-            for (int i = 0; i < index1.MaxDoc(); i++)
+            for (int i = 0; i < index1.MaxDoc; i++)
             {
                 Assert.AreEqual(liveDocs1 == null || !liveDocs1.Get(i), liveDocs2 == null || !liveDocs2.Get(i), "Doc " + i + " only deleted in one index.");
             }
 
             // check stored fields
-            for (int i = 0; i < index1.MaxDoc(); i++)
+            for (int i = 0; i < index1.MaxDoc; i++)
             {
                 if (liveDocs1 == null || liveDocs1.Get(i))
                 {
@@ -804,7 +804,7 @@ namespace Lucene.Net.Index
             DirectoryReader r2 = DirectoryReader.OpenIfChanged(r);
             Assert.IsNotNull(r2);
             r.Dispose();
-            AtomicReader sub0 = (AtomicReader)r2.Leaves()[0].Reader();
+            AtomicReader sub0 = (AtomicReader)r2.Leaves[0].Reader;
             FieldCache_Fields.Ints ints2 = FieldCache_Fields.DEFAULT.GetInts(sub0, "number", false);
             r2.Dispose();
             Assert.IsTrue(ints == ints2);
@@ -828,16 +828,16 @@ namespace Lucene.Net.Index
 
             DirectoryReader r = DirectoryReader.Open(dir);
             AtomicReader r1 = GetOnlySegmentReader(r);
-            Assert.AreEqual(36, r1.Fields().UniqueTermCount);
+            Assert.AreEqual(36, r1.Fields.UniqueTermCount);
             writer.AddDocument(doc);
             writer.Commit();
             DirectoryReader r2 = DirectoryReader.OpenIfChanged(r);
             Assert.IsNotNull(r2);
             r.Dispose();
 
-            foreach (AtomicReaderContext s in r2.Leaves())
+            foreach (AtomicReaderContext s in r2.Leaves)
             {
-                Assert.AreEqual(36, ((AtomicReader)s.Reader()).Fields().UniqueTermCount);
+                Assert.AreEqual(36, ((AtomicReader)s.Reader).Fields.UniqueTermCount);
             }
             r2.Dispose();
             writer.Dispose();
@@ -868,7 +868,7 @@ namespace Lucene.Net.Index
                 // expected
             }
 
-            Assert.AreEqual(-1, ((SegmentReader)r.Leaves()[0].Reader()).TermInfosIndexDivisor);
+            Assert.AreEqual(-1, ((SegmentReader)r.Leaves[0].Reader).TermInfosIndexDivisor);
             writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetCodec(TestUtil.AlwaysPostingsFormat(new Lucene41PostingsFormat())).SetMergePolicy(NewLogMergePolicy(10)));
             writer.AddDocument(doc);
             writer.Dispose();
@@ -878,13 +878,13 @@ namespace Lucene.Net.Index
             Assert.IsNotNull(r2);
             Assert.IsNull(DirectoryReader.OpenIfChanged(r2));
             r.Dispose();
-            IList<AtomicReaderContext> leaves = r2.Leaves();
+            IList<AtomicReaderContext> leaves = r2.Leaves;
             Assert.AreEqual(2, leaves.Count);
             foreach (AtomicReaderContext ctx in leaves)
             {
                 try
                 {
-                    ctx.Reader().DocFreq(new Term("field", "f"));
+                    ctx.Reader.DocFreq(new Term("field", "f"));
                     Assert.Fail("did not hit expected exception");
                 }
                 catch (InvalidOperationException ise)
@@ -1190,7 +1190,7 @@ namespace Lucene.Net.Index
                 {
                     while (ToInc.TryIncRef())
                     {
-                        Assert.IsFalse(ToInc.HasDeletions());
+                        Assert.IsFalse(ToInc.HasDeletions);
                         ToInc.DecRef();
                     }
                     Assert.IsFalse(ToInc.TryIncRef());
@@ -1265,7 +1265,7 @@ namespace Lucene.Net.Index
             {
                 // expected
             }
-            Assert.AreEqual(-1, ((SegmentReader)r.Leaves()[0].Reader()).TermInfosIndexDivisor);
+            Assert.AreEqual(-1, ((SegmentReader)r.Leaves[0].Reader).TermInfosIndexDivisor);
             r.Dispose();
 
             // open(IndexCommit, int)
@@ -1279,7 +1279,7 @@ namespace Lucene.Net.Index
             {
                 // expected
             }
-            Assert.AreEqual(-1, ((SegmentReader)r.Leaves()[0].Reader()).TermInfosIndexDivisor);
+            Assert.AreEqual(-1, ((SegmentReader)r.Leaves[0].Reader).TermInfosIndexDivisor);
             r.Dispose();
             dir.Dispose();
         }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e2f44e8e/src/Lucene.Net.Tests/core/Index/TestDirectoryReaderReopen.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/core/Index/TestDirectoryReaderReopen.cs b/src/Lucene.Net.Tests/core/Index/TestDirectoryReaderReopen.cs
index d9eca04..524047b 100644
--- a/src/Lucene.Net.Tests/core/Index/TestDirectoryReaderReopen.cs
+++ b/src/Lucene.Net.Tests/core/Index/TestDirectoryReaderReopen.cs
@@ -401,7 +401,7 @@ namespace Lucene.Net.Index
                         }
 
                         IndexSearcher searcher = NewSearcher(refreshed);
-                        ScoreDoc[] hits = searcher.Search(new TermQuery(new Term("field1", "a" + rnd.Next(refreshed.MaxDoc()))), null, 1000).ScoreDocs;
+                        ScoreDoc[] hits = searcher.Search(new TermQuery(new Term("field1", "a" + rnd.Next(refreshed.MaxDoc))), null, 1000).ScoreDocs;
                         if (hits.Length > 0)
                         {
                             searcher.Doc(hits[0].Doc);
@@ -583,11 +583,11 @@ namespace Lucene.Net.Index
             DirectoryReader r = DirectoryReader.Open(dir);
             if (multiSegment)
             {
-                Assert.IsTrue(r.Leaves().Count > 1);
+                Assert.IsTrue(r.Leaves.Count > 1);
             }
             else
             {
-                Assert.IsTrue(r.Leaves().Count == 1);
+                Assert.IsTrue(r.Leaves.Count == 1);
             }
             r.Dispose();
         }
@@ -719,7 +719,7 @@ namespace Lucene.Net.Index
             writer.Dispose();
 
             DirectoryReader r = DirectoryReader.Open(dir);
-            Assert.AreEqual(0, r.NumDocs());
+            Assert.AreEqual(0, r.NumDocs);
 
             ICollection<IndexCommit> commits = DirectoryReader.ListCommits(dir);
             foreach (IndexCommit commit in commits)
@@ -741,11 +741,11 @@ namespace Lucene.Net.Index
                 }
                 if (v < 4)
                 {
-                    Assert.AreEqual(1 + v, r2.NumDocs());
+                    Assert.AreEqual(1 + v, r2.NumDocs);
                 }
                 else
                 {
-                    Assert.AreEqual(7 - v, r2.NumDocs());
+                    Assert.AreEqual(7 - v, r2.NumDocs);
                 }
                 r.Dispose();
                 r = r2;
@@ -770,11 +770,11 @@ namespace Lucene.Net.Index
             w.AddDocument(doc);
             DirectoryReader r = DirectoryReader.Open(w, true);
 
-            Assert.AreEqual(2, r.NumDocs());
+            Assert.AreEqual(2, r.NumDocs);
             IndexReader r2 = DirectoryReader.OpenIfChanged(r, commits[0]);
             Assert.IsNotNull(r2);
             r.Dispose();
-            Assert.AreEqual(1, r2.NumDocs());
+            Assert.AreEqual(1, r2.NumDocs);
             w.Dispose();
             r2.Dispose();
             dir.Dispose();

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e2f44e8e/src/Lucene.Net.Tests/core/Index/TestDoc.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/core/Index/TestDoc.cs b/src/Lucene.Net.Tests/core/Index/TestDoc.cs
index a61831d..c4ea03b 100644
--- a/src/Lucene.Net.Tests/core/Index/TestDoc.cs
+++ b/src/Lucene.Net.Tests/core/Index/TestDoc.cs
@@ -240,12 +240,12 @@ namespace Lucene.Net.Index
         {
             SegmentReader reader = new SegmentReader(si, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, NewIOContext(Random()));
 
-            for (int i = 0; i < reader.NumDocs(); i++)
+            for (int i = 0; i < reader.NumDocs; i++)
             {
                 @out.WriteLine(reader.Document(i));
             }
 
-            Fields fields = reader.Fields();
+            Fields fields = reader.Fields;
             foreach (string field in fields)
             {
                 Terms terms = fields.Terms(field);

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e2f44e8e/src/Lucene.Net.Tests/core/Index/TestDocCount.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/core/Index/TestDocCount.cs b/src/Lucene.Net.Tests/core/Index/TestDocCount.cs
index dc2ecdb..113088c 100644
--- a/src/Lucene.Net.Tests/core/Index/TestDocCount.cs
+++ b/src/Lucene.Net.Tests/core/Index/TestDocCount.cs
@@ -83,7 +83,7 @@ namespace Lucene.Net.Index
                     continue;
                 }
                 int docCount = terms.DocCount;
-                FixedBitSet visited = new FixedBitSet(ir.MaxDoc());
+                FixedBitSet visited = new FixedBitSet(ir.MaxDoc);
                 TermsEnum te = terms.Iterator(null);
                 while (te.Next() != null)
                 {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e2f44e8e/src/Lucene.Net.Tests/core/Index/TestDocTermOrds.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/core/Index/TestDocTermOrds.cs b/src/Lucene.Net.Tests/core/Index/TestDocTermOrds.cs
index d819e42..c985e67 100644
--- a/src/Lucene.Net.Tests/core/Index/TestDocTermOrds.cs
+++ b/src/Lucene.Net.Tests/core/Index/TestDocTermOrds.cs
@@ -170,13 +170,13 @@ namespace Lucene.Net.Index
                 Console.WriteLine("TEST: reader=" + r);
             }
 
-            foreach (AtomicReaderContext ctx in r.Leaves())
+            foreach (AtomicReaderContext ctx in r.Leaves)
             {
                 if (VERBOSE)
                 {
-                    Console.WriteLine("\nTEST: sub=" + ctx.Reader());
+                    Console.WriteLine("\nTEST: sub=" + ctx.Reader);
                 }
-                Verify((AtomicReader)ctx.Reader(), idToOrds, termsArray, null);
+                Verify((AtomicReader)ctx.Reader, idToOrds, termsArray, null);
             }
 
             // Also test top-level reader: its enum does not support
@@ -309,13 +309,13 @@ namespace Lucene.Net.Index
                     idToOrdsPrefix[id] = newOrdsArray;
                 }
 
-                foreach (AtomicReaderContext ctx in r.Leaves())
+                foreach (AtomicReaderContext ctx in r.Leaves)
                 {
                     if (VERBOSE)
                     {
-                        Console.WriteLine("\nTEST: sub=" + ctx.Reader());
+                        Console.WriteLine("\nTEST: sub=" + ctx.Reader);
                     }
-                    Verify((AtomicReader)ctx.Reader(), idToOrdsPrefix, termsArray, prefixRef);
+                    Verify((AtomicReader)ctx.Reader, idToOrdsPrefix, termsArray, prefixRef);
                 }
 
                 // Also test top-level reader: its enum does not support
@@ -339,7 +339,7 @@ namespace Lucene.Net.Index
 
             FieldCache_Fields.Ints docIDToID = FieldCache_Fields.DEFAULT.GetInts(r, "id", false);
             /*
-              for(int docID=0;docID<subR.MaxDoc();docID++) {
+              for(int docID=0;docID<subR.MaxDoc;docID++) {
               System.out.println("  docID=" + docID + " id=" + docIDToID[docID]);
               }
             */
@@ -356,7 +356,7 @@ namespace Lucene.Net.Index
                 }
             }
 
-            //final TermsEnum te = subR.Fields().Terms("field").iterator();
+            //final TermsEnum te = subR.Fields.Terms("field").iterator();
             TermsEnum te = dto.GetOrdTermsEnum(r);
             if (dto.NumTerms() == 0)
             {
@@ -403,11 +403,11 @@ namespace Lucene.Net.Index
             }
 
             SortedSetDocValues iter = dto.GetIterator(r);
-            for (int docID = 0; docID < r.MaxDoc(); docID++)
+            for (int docID = 0; docID < r.MaxDoc; docID++)
             {
                 if (VERBOSE)
                 {
-                    Console.WriteLine("TEST: docID=" + docID + " of " + r.MaxDoc() + " (id=" + docIDToID.Get(docID) + ")");
+                    Console.WriteLine("TEST: docID=" + docID + " of " + r.MaxDoc + " (id=" + docIDToID.Get(docID) + ")");
                 }
                 iter.Document = docID;
                 int[] answers = idToOrds[docIDToID.Get(docID)];

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e2f44e8e/src/Lucene.Net.Tests/core/Index/TestDocValuesIndexing.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/core/Index/TestDocValuesIndexing.cs b/src/Lucene.Net.Tests/core/Index/TestDocValuesIndexing.cs
index 82544b7..713f64c 100644
--- a/src/Lucene.Net.Tests/core/Index/TestDocValuesIndexing.cs
+++ b/src/Lucene.Net.Tests/core/Index/TestDocValuesIndexing.cs
@@ -86,7 +86,7 @@ namespace Lucene.Net.Index
             DirectoryReader r3 = w.Reader;
             w.Dispose();
             AtomicReader sr = GetOnlySegmentReader(r3);
-            Assert.AreEqual(2, sr.NumDocs());
+            Assert.AreEqual(2, sr.NumDocs);
             NumericDocValues docValues = sr.GetNumericDocValues("dv");
             Assert.IsNotNull(docValues);
             r3.Dispose();
@@ -895,8 +895,8 @@ namespace Lucene.Net.Index
             DirectoryReader r = writer.Reader;
             writer.Dispose();
 
-            AtomicReader subR = (AtomicReader)r.Leaves()[0].Reader();
-            Assert.AreEqual(2, subR.NumDocs());
+            AtomicReader subR = (AtomicReader)r.Leaves[0].Reader;
+            Assert.AreEqual(2, subR.NumDocs);
 
             Bits bits = FieldCache_Fields.DEFAULT.GetDocsWithField(subR, "dv");
             Assert.IsTrue(bits.Get(0));

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e2f44e8e/src/Lucene.Net.Tests/core/Index/TestDocValuesWithThreads.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/core/Index/TestDocValuesWithThreads.cs b/src/Lucene.Net.Tests/core/Index/TestDocValuesWithThreads.cs
index fe9cb21..f043947 100644
--- a/src/Lucene.Net.Tests/core/Index/TestDocValuesWithThreads.cs
+++ b/src/Lucene.Net.Tests/core/Index/TestDocValuesWithThreads.cs
@@ -70,8 +70,8 @@ namespace Lucene.Net.Index
             IndexReader r = w.Reader;
             w.Dispose();
 
-            Assert.AreEqual(1, r.Leaves().Count);
-            AtomicReader ar = (AtomicReader)r.Leaves()[0].Reader();
+            Assert.AreEqual(1, r.Leaves.Count);
+            AtomicReader ar = (AtomicReader)r.Leaves[0].Reader;
 
             int numThreads = TestUtil.NextInt(Random(), 2, 5);
             IList<ThreadClass> threads = new List<ThreadClass>();
@@ -299,7 +299,7 @@ namespace Lucene.Net.Index
 
                     for (int iter = 0; iter < 100; iter++)
                     {
-                        int docID = random.Next(Sr.MaxDoc());
+                        int docID = random.Next(Sr.MaxDoc);
                         source.Get(docID, scratch);
                         Assert.AreEqual(DocValues[(int)docIDToID.Get(docID)], scratch);
                     }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e2f44e8e/src/Lucene.Net.Tests/core/Index/TestDocsAndPositions.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/core/Index/TestDocsAndPositions.cs b/src/Lucene.Net.Tests/core/Index/TestDocsAndPositions.cs
index f3ddf89..a9873a3 100644
--- a/src/Lucene.Net.Tests/core/Index/TestDocsAndPositions.cs
+++ b/src/Lucene.Net.Tests/core/Index/TestDocsAndPositions.cs
@@ -73,15 +73,15 @@ namespace Lucene.Net.Index
             {
                 BytesRef bytes = new BytesRef("1");
                 IndexReaderContext topReaderContext = reader.Context;
-                foreach (AtomicReaderContext atomicReaderContext in topReaderContext.Leaves())
+                foreach (AtomicReaderContext atomicReaderContext in topReaderContext.Leaves)
                 {
-                    DocsAndPositionsEnum docsAndPosEnum = GetDocsAndPositions((AtomicReader)atomicReaderContext.Reader(), bytes, null);
+                    DocsAndPositionsEnum docsAndPosEnum = GetDocsAndPositions((AtomicReader)atomicReaderContext.Reader, bytes, null);
                     Assert.IsNotNull(docsAndPosEnum);
-                    if (atomicReaderContext.Reader().MaxDoc() == 0)
+                    if (atomicReaderContext.Reader.MaxDoc == 0)
                     {
                         continue;
                     }
-                    int advance = docsAndPosEnum.Advance(Random().Next(atomicReaderContext.Reader().MaxDoc()));
+                    int advance = docsAndPosEnum.Advance(Random().Next(atomicReaderContext.Reader.MaxDoc));
                     do
                     {
                         string msg = "Advanced to: " + advance + " current doc: " + docsAndPosEnum.DocID(); // TODO: + " usePayloads: " + usePayload;
@@ -164,12 +164,12 @@ namespace Lucene.Net.Index
             {
                 BytesRef bytes = new BytesRef("" + term);
                 IndexReaderContext topReaderContext = reader.Context;
-                foreach (AtomicReaderContext atomicReaderContext in topReaderContext.Leaves())
+                foreach (AtomicReaderContext atomicReaderContext in topReaderContext.Leaves)
                 {
-                    DocsAndPositionsEnum docsAndPosEnum = GetDocsAndPositions((AtomicReader)atomicReaderContext.Reader(), bytes, null);
+                    DocsAndPositionsEnum docsAndPosEnum = GetDocsAndPositions((AtomicReader)atomicReaderContext.Reader, bytes, null);
                     Assert.IsNotNull(docsAndPosEnum);
                     int initDoc = 0;
-                    int maxDoc = atomicReaderContext.Reader().MaxDoc();
+                    int maxDoc = atomicReaderContext.Reader.MaxDoc;
                     // initially advance or do next doc
                     if (Random().NextBoolean())
                     {
@@ -248,10 +248,10 @@ namespace Lucene.Net.Index
             {
                 BytesRef bytes = new BytesRef("" + term);
                 IndexReaderContext topReaderContext = reader.Context;
-                foreach (AtomicReaderContext context in topReaderContext.Leaves())
+                foreach (AtomicReaderContext context in topReaderContext.Leaves)
                 {
-                    int maxDoc = context.Reader().MaxDoc();
-                    DocsEnum docsEnum = TestUtil.Docs(Random(), context.Reader(), FieldName, bytes, null, null, DocsEnum.FLAG_FREQS);
+                    int maxDoc = context.AtomicReader.MaxDoc;
+                    DocsEnum docsEnum = TestUtil.Docs(Random(), context.Reader, FieldName, bytes, null, null, DocsEnum.FLAG_FREQS);
                     if (FindNext(freqInDoc, context.DocBase, context.DocBase + maxDoc) == int.MaxValue)
                     {
                         Assert.IsNull(docsEnum);
@@ -345,13 +345,13 @@ namespace Lucene.Net.Index
                 BytesRef bytes = new BytesRef("even");
 
                 IndexReaderContext topReaderContext = reader.Context;
-                foreach (AtomicReaderContext atomicReaderContext in topReaderContext.Leaves())
+                foreach (AtomicReaderContext atomicReaderContext in topReaderContext.Leaves)
                 {
-                    DocsAndPositionsEnum docsAndPosEnum = GetDocsAndPositions((AtomicReader)atomicReaderContext.Reader(), bytes, null);
+                    DocsAndPositionsEnum docsAndPosEnum = GetDocsAndPositions((AtomicReader)atomicReaderContext.Reader, bytes, null);
                     Assert.IsNotNull(docsAndPosEnum);
 
                     int initDoc = 0;
-                    int maxDoc = atomicReaderContext.Reader().MaxDoc();
+                    int maxDoc = atomicReaderContext.Reader.MaxDoc;
                     // initially advance or do next doc
                     if (Random().NextBoolean())
                     {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e2f44e8e/src/Lucene.Net.Tests/core/Index/TestFilterAtomicReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/core/Index/TestFilterAtomicReader.cs b/src/Lucene.Net.Tests/core/Index/TestFilterAtomicReader.cs
index 37f6732..9c36d82 100644
--- a/src/Lucene.Net.Tests/core/Index/TestFilterAtomicReader.cs
+++ b/src/Lucene.Net.Tests/core/Index/TestFilterAtomicReader.cs
@@ -124,9 +124,9 @@ namespace Lucene.Net.Index
             {
             }
 
-            public override Fields Fields()
+            public override Fields Fields
             {
-                return new TestFields(base.Fields());
+                get { return new TestFields(base.Fields); }
             }
         }
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e2f44e8e/src/Lucene.Net.Tests/core/Index/TestFlex.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/core/Index/TestFlex.cs b/src/Lucene.Net.Tests/core/Index/TestFlex.cs
index 8a5347c..d84f581 100644
--- a/src/Lucene.Net.Tests/core/Index/TestFlex.cs
+++ b/src/Lucene.Net.Tests/core/Index/TestFlex.cs
@@ -80,7 +80,7 @@ namespace Lucene.Net.Index
             w.AddDocument(doc);
             w.ForceMerge(1);
             DirectoryReader r = w.Reader;
-            TermsEnum terms = GetOnlySegmentReader(r).Fields().Terms("f").Iterator(null);
+            TermsEnum terms = GetOnlySegmentReader(r).Fields.Terms("f").Iterator(null);
             Assert.IsTrue(terms.Next() != null);
             try
             {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e2f44e8e/src/Lucene.Net.Tests/core/Index/TestIndexReaderClose.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/core/Index/TestIndexReaderClose.cs b/src/Lucene.Net.Tests/core/Index/TestIndexReaderClose.cs
index 3a3a141..8c785bd 100644
--- a/src/Lucene.Net.Tests/core/Index/TestIndexReaderClose.cs
+++ b/src/Lucene.Net.Tests/core/Index/TestIndexReaderClose.cs
@@ -86,7 +86,7 @@ namespace Lucene.Net.Index
 
                 try
                 {
-                    reader.Fields();
+                    var aaa = reader.Fields;
                     Assert.Fail("we are closed");
                 }
                 catch (AlreadyClosedException ex)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e2f44e8e/src/Lucene.Net.Tests/core/Index/TestIndexWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/core/Index/TestIndexWriter.cs b/src/Lucene.Net.Tests/core/Index/TestIndexWriter.cs
index e489065..afa0b1a 100644
--- a/src/Lucene.Net.Tests/core/Index/TestIndexWriter.cs
+++ b/src/Lucene.Net.Tests/core/Index/TestIndexWriter.cs
@@ -107,7 +107,7 @@ namespace Lucene.Net.Index
             {
                 AddDocWithIndex(writer, i);
             }
-            Assert.AreEqual(100, writer.MaxDoc());
+            Assert.AreEqual(100, writer.MaxDoc);
             writer.Dispose();
 
             // delete 40 documents
@@ -119,27 +119,27 @@ namespace Lucene.Net.Index
             writer.Dispose();
 
             reader = DirectoryReader.Open(dir);
-            Assert.AreEqual(60, reader.NumDocs());
+            Assert.AreEqual(60, reader.NumDocs);
             reader.Dispose();
 
             // merge the index down and check that the new doc count is correct
             writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
             Assert.AreEqual(60, writer.NumDocs());
             writer.ForceMerge(1);
-            Assert.AreEqual(60, writer.MaxDoc());
+            Assert.AreEqual(60, writer.MaxDoc);
             Assert.AreEqual(60, writer.NumDocs());
             writer.Dispose();
 
             // check that the index reader gives the same numbers.
             reader = DirectoryReader.Open(dir);
-            Assert.AreEqual(60, reader.MaxDoc());
-            Assert.AreEqual(60, reader.NumDocs());
+            Assert.AreEqual(60, reader.MaxDoc);
+            Assert.AreEqual(60, reader.NumDocs);
             reader.Dispose();
 
             // make sure opening a new index for create over
             // this existing one works correctly:
             writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode_e.CREATE));
-            Assert.AreEqual(0, writer.MaxDoc());
+            Assert.AreEqual(0, writer.MaxDoc);
             Assert.AreEqual(0, writer.NumDocs());
             writer.Dispose();
             dir.Dispose();
@@ -204,17 +204,17 @@ namespace Lucene.Net.Index
 
             // now open reader:
             IndexReader reader = DirectoryReader.Open(dir);
-            Assert.AreEqual(reader.NumDocs(), 1, "should be one document");
+            Assert.AreEqual(reader.NumDocs, 1, "should be one document");
 
             // now open index for create:
             writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode_e.CREATE));
-            Assert.AreEqual(writer.MaxDoc(), 0, "should be zero documents");
+            Assert.AreEqual(writer.MaxDoc, 0, "should be zero documents");
             AddDoc(writer);
             writer.Dispose();
 
-            Assert.AreEqual(reader.NumDocs(), 1, "should be one document");
+            Assert.AreEqual(reader.NumDocs, 1, "should be one document");
             IndexReader reader2 = DirectoryReader.Open(dir);
-            Assert.AreEqual(reader2.NumDocs(), 1, "should be one document");
+            Assert.AreEqual(reader2.NumDocs, 1, "should be one document");
             reader.Dispose();
             reader2.Dispose();
 
@@ -254,8 +254,8 @@ namespace Lucene.Net.Index
             writer.Dispose();
 
             IndexReader reader = DirectoryReader.Open(dir);
-            Assert.AreEqual(0, reader.MaxDoc());
-            Assert.AreEqual(0, reader.NumDocs());
+            Assert.AreEqual(0, reader.MaxDoc);
+            Assert.AreEqual(0, reader.NumDocs);
             reader.Dispose();
 
             writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode_e.APPEND));
@@ -263,8 +263,8 @@ namespace Lucene.Net.Index
             writer.Dispose();
 
             reader = DirectoryReader.Open(dir);
-            Assert.AreEqual(0, reader.MaxDoc());
-            Assert.AreEqual(0, reader.NumDocs());
+            Assert.AreEqual(0, reader.MaxDoc);
+            Assert.AreEqual(0, reader.NumDocs);
             reader.Dispose();
             dir.Dispose();
         }
@@ -288,8 +288,8 @@ namespace Lucene.Net.Index
             writer.Dispose();
 
             IndexReader reader = DirectoryReader.Open(dir);
-            Assert.AreEqual(100, reader.MaxDoc());
-            Assert.AreEqual(100, reader.NumDocs());
+            Assert.AreEqual(100, reader.MaxDoc);
+            Assert.AreEqual(100, reader.NumDocs);
             for (int j = 0; j < 100; j++)
             {
                 Assert.AreEqual(1, reader.DocFreq(new Term("a" + j, "aaa" + j)));
@@ -635,8 +635,8 @@ namespace Lucene.Net.Index
             writer.Dispose();
 
             IndexReader reader = DirectoryReader.Open(dir);
-            Assert.AreEqual(1, reader.MaxDoc());
-            Assert.AreEqual(1, reader.NumDocs());
+            Assert.AreEqual(1, reader.MaxDoc);
+            Assert.AreEqual(1, reader.NumDocs);
             Term t = new Term("field", "a");
             Assert.AreEqual(1, reader.DocFreq(t));
             DocsEnum td = TestUtil.Docs(Random(), reader, "field", new BytesRef("a"), MultiFields.GetLiveDocs(reader), null, DocsEnum.FLAG_FREQS);
@@ -738,7 +738,7 @@ namespace Lucene.Net.Index
             writer.AddDocument(new Document());
             writer.Dispose();
             IndexReader reader = DirectoryReader.Open(dir);
-            Assert.AreEqual(2, reader.NumDocs());
+            Assert.AreEqual(2, reader.NumDocs);
             reader.Dispose();
             dir.Dispose();
         }
@@ -901,7 +901,7 @@ namespace Lucene.Net.Index
             writer.Dispose();
             DirectoryReader reader = DirectoryReader.Open(dir);
             AtomicReader subreader = GetOnlySegmentReader(reader);
-            TermsEnum te = subreader.Fields().Terms("").Iterator(null);
+            TermsEnum te = subreader.Fields.Terms("").Iterator(null);
             Assert.AreEqual(new BytesRef("a"), te.Next());
             Assert.AreEqual(new BytesRef("b"), te.Next());
             Assert.AreEqual(new BytesRef("c"), te.Next());
@@ -924,7 +924,7 @@ namespace Lucene.Net.Index
             writer.Dispose();
             DirectoryReader reader = DirectoryReader.Open(dir);
             AtomicReader subreader = GetOnlySegmentReader(reader);
-            TermsEnum te = subreader.Fields().Terms("").Iterator(null);
+            TermsEnum te = subreader.Fields.Terms("").Iterator(null);
             Assert.AreEqual(new BytesRef(""), te.Next());
             Assert.AreEqual(new BytesRef("a"), te.Next());
             Assert.AreEqual(new BytesRef("b"), te.Next());
@@ -977,7 +977,7 @@ namespace Lucene.Net.Index
             w.Dispose();
 
             IndexReader ir = DirectoryReader.Open(dir);
-            Assert.AreEqual(0, ir.NumDocs());
+            Assert.AreEqual(0, ir.NumDocs);
             ir.Dispose();
 
             dir.Dispose();
@@ -1109,7 +1109,7 @@ namespace Lucene.Net.Index
             writer.Dispose();
 
             IndexReader r3 = DirectoryReader.Open(dir);
-            Assert.AreEqual(5, r3.NumDocs());
+            Assert.AreEqual(5, r3.NumDocs);
             r3.Dispose();
 
             r1.Dispose();
@@ -1255,9 +1255,9 @@ namespace Lucene.Net.Index
                                     try
                                     {
                                         r = DirectoryReader.Open(w, Random.NextBoolean());
-                                        if (Random.NextBoolean() && r.MaxDoc() > 0)
+                                        if (Random.NextBoolean() && r.MaxDoc > 0)
                                         {
-                                            int docid = Random.Next(r.MaxDoc());
+                                            int docid = Random.Next(r.MaxDoc);
                                             w.TryDeleteDocument(r, docid);
                                         }
                                     }
@@ -1348,7 +1348,7 @@ namespace Lucene.Net.Index
                     try
                     {
                         IndexReader r = DirectoryReader.Open(dir);
-                        //System.out.println("doc count=" + r.NumDocs());
+                        //System.out.println("doc count=" + r.NumDocs);
                         r.Dispose();
                     }
                     catch (Exception e)
@@ -1561,7 +1561,7 @@ namespace Lucene.Net.Index
             w.AddDocument(d);
 
             AtomicReader r = GetOnlySegmentReader(w.Reader);
-            TermsEnum t = r.Fields().Terms("field").Iterator(null);
+            TermsEnum t = r.Fields.Terms("field").Iterator(null);
             int count = 0;
             while (t.Next() != null)
             {
@@ -1854,9 +1854,9 @@ namespace Lucene.Net.Index
 
             AssertNoUnreferencedFiles(dir, "no tv files");
             DirectoryReader r0 = DirectoryReader.Open(dir);
-            foreach (AtomicReaderContext ctx in r0.Leaves())
+            foreach (AtomicReaderContext ctx in r0.Leaves)
             {
-                SegmentReader sr = (SegmentReader)ctx.Reader();
+                SegmentReader sr = (SegmentReader)ctx.Reader;
                 Assert.IsFalse(sr.FieldInfos.HasVectors());
             }
 
@@ -1969,7 +1969,7 @@ namespace Lucene.Net.Index
 
             // Make sure the doc that has the massive term is NOT in
             // the index:
-            Assert.AreEqual(1, reader.NumDocs(), "document with wicked long term is in the index!");
+            Assert.AreEqual(1, reader.NumDocs, "document with wicked long term is in the index!");
 
             reader.Dispose();
             dir.Dispose();
@@ -2159,7 +2159,7 @@ namespace Lucene.Net.Index
             w.Commit();
             w.Dispose();
             IndexReader r = DirectoryReader.Open(dir);
-            Assert.AreEqual(0, r.MaxDoc());
+            Assert.AreEqual(0, r.MaxDoc);
             r.Dispose();
             dir.Dispose();
         }
@@ -2190,7 +2190,7 @@ namespace Lucene.Net.Index
             w.Rollback();
             Assert.IsTrue(DirectoryReader.IndexExists(dir));
             IndexReader r = DirectoryReader.Open(dir);
-            Assert.AreEqual(0, r.MaxDoc());
+            Assert.AreEqual(0, r.MaxDoc);
             r.Dispose();
             dir.Dispose();
         }
@@ -2547,13 +2547,13 @@ namespace Lucene.Net.Index
                 }
             }
             DirectoryReader reader = w.Reader;
-            Assert.AreEqual(docCount, reader.NumDocs());
-            IList<AtomicReaderContext> leaves = reader.Leaves();
+            Assert.AreEqual(docCount, reader.NumDocs);
+            IList<AtomicReaderContext> leaves = reader.Leaves;
             foreach (AtomicReaderContext atomicReaderContext in leaves)
             {
-                AtomicReader ar = (AtomicReader)atomicReaderContext.Reader();
+                AtomicReader ar = (AtomicReader)atomicReaderContext.Reader;
                 Bits liveDocs = ar.LiveDocs;
-                int maxDoc = ar.MaxDoc();
+                int maxDoc = ar.MaxDoc;
                 for (int i = 0; i < maxDoc; i++)
                 {
                     if (liveDocs == null || liveDocs.Get(i))
@@ -2808,7 +2808,7 @@ namespace Lucene.Net.Index
             w.Dispose();
 
             // Make sure document was not (incorrectly) deleted:
-            Assert.AreEqual(1, r.NumDocs());
+            Assert.AreEqual(1, r.NumDocs);
             r.Dispose();
             dir.Dispose();
         }