You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by cm...@apache.org on 2013/08/11 14:19:39 UTC

svn commit: r1512909 [13/38] - in /lucene/dev/branches/lucene4956: ./ dev-tools/ dev-tools/eclipse/ dev-tools/idea/.idea/libraries/ dev-tools/idea/lucene/suggest/ dev-tools/idea/solr/contrib/dataimporthandler/ dev-tools/idea/solr/core/src/test/ dev-too...

Modified: lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/index/TestStressIndexing2.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/index/TestStressIndexing2.java?rev=1512909&r1=1512908&r2=1512909&view=diff
==============================================================================
--- lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/index/TestStressIndexing2.java (original)
+++ lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/index/TestStressIndexing2.java Sun Aug 11 12:19:13 2013
@@ -47,21 +47,16 @@ public class TestStressIndexing2 extends
   static int maxBufferedDocs=3;
   static int seed=0;
 
-  public class MockIndexWriter extends IndexWriter {
-
-    public MockIndexWriter(Directory dir, IndexWriterConfig conf) throws IOException {
-      super(dir, conf);
-    }
+  public final class YieldTestPoint implements RandomIndexWriter.TestPoint {
 
     @Override
-    boolean testPoint(String name) {
+    public void apply(String name) {
       //      if (name.equals("startCommit")) {
       if (random().nextInt(4) == 2)
         Thread.yield();
-      return true;
     }
   }
-  
+//  
   public void testRandomIWReader() throws Throwable {
     Directory dir = newDirectory();
     
@@ -151,12 +146,12 @@ public class TestStressIndexing2 extends
   
   public DocsAndWriter indexRandomIWReader(int nThreads, int iterations, int range, Directory dir) throws IOException, InterruptedException {
     Map<String,Document> docs = new HashMap<String,Document>();
-    IndexWriter w = new MockIndexWriter(dir, newIndexWriterConfig(
+    IndexWriter w = RandomIndexWriter.mockIndexWriter(dir, newIndexWriterConfig(
         TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.CREATE).setRAMBufferSizeMB(
-                                                                                                  0.1).setMaxBufferedDocs(maxBufferedDocs).setMergePolicy(newLogMergePolicy()));
+            0.1).setMaxBufferedDocs(maxBufferedDocs).setMergePolicy(newLogMergePolicy()), new YieldTestPoint());
     w.commit();
     LogMergePolicy lmp = (LogMergePolicy) w.getConfig().getMergePolicy();
-    lmp.setUseCompoundFile(false);
+    lmp.setNoCFSRatio(0.0);
     lmp.setMergeFactor(mergeFactor);
     /***
         w.setMaxMergeDocs(Integer.MAX_VALUE);
@@ -202,12 +197,12 @@ public class TestStressIndexing2 extends
   public Map<String,Document> indexRandom(int nThreads, int iterations, int range, Directory dir, int maxThreadStates,
                                           boolean doReaderPooling) throws IOException, InterruptedException {
     Map<String,Document> docs = new HashMap<String,Document>();
-    IndexWriter w = new MockIndexWriter(dir, newIndexWriterConfig(
+    IndexWriter w = RandomIndexWriter.mockIndexWriter(dir, newIndexWriterConfig(
         TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.CREATE)
              .setRAMBufferSizeMB(0.1).setMaxBufferedDocs(maxBufferedDocs).setIndexerThreadPool(new ThreadAffinityDocumentsWriterThreadPool(maxThreadStates))
-             .setReaderPooling(doReaderPooling).setMergePolicy(newLogMergePolicy()));
+             .setReaderPooling(doReaderPooling).setMergePolicy(newLogMergePolicy()), new YieldTestPoint());
     LogMergePolicy lmp = (LogMergePolicy) w.getConfig().getMergePolicy();
-    lmp.setUseCompoundFile(false);
+    lmp.setNoCFSRatio(0.0);
     lmp.setMergeFactor(mergeFactor);
 
     threads = new IndexingThread[nThreads];
@@ -359,7 +354,7 @@ public class TestStressIndexing2 extends
       }
 
       termDocs1 = _TestUtil.docs(random(), termsEnum, liveDocs1, termDocs1, DocsEnum.FLAG_NONE);
-      if (termsEnum2.seekExact(term, false)) {
+      if (termsEnum2.seekExact(term)) {
         termDocs2 = _TestUtil.docs(random(), termsEnum2, liveDocs2, termDocs2, DocsEnum.FLAG_NONE);
       } else {
         termDocs2 = null;

Modified: lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/index/TestTermVectorsReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/index/TestTermVectorsReader.java?rev=1512909&r1=1512908&r2=1512909&view=diff
==============================================================================
--- lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/index/TestTermVectorsReader.java (original)
+++ lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/index/TestTermVectorsReader.java Sun Aug 11 12:19:13 2013
@@ -93,7 +93,7 @@ public class TestTermVectorsReader exten
         dir,
         newIndexWriterConfig(TEST_VERSION_CURRENT, new MyAnalyzer()).
             setMaxBufferedDocs(-1).
-            setMergePolicy(newLogMergePolicy(false, 10))
+            setMergePolicy(newLogMergePolicy(false, 10)).setUseCompoundFile(false)
     );
 
     Document doc = new Document();
@@ -331,4 +331,64 @@ public class TestTermVectorsReader exten
     }
     reader.close();
   }
+
+  public void testIllegalIndexableField() throws Exception {
+    Directory dir = newDirectory();
+    RandomIndexWriter w = new RandomIndexWriter(random(), dir);
+    FieldType ft = new FieldType(TextField.TYPE_NOT_STORED);
+    ft.setStoreTermVectors(true);
+    ft.setStoreTermVectorPayloads(true);
+    Document doc = new Document();
+    doc.add(new Field("field", "value", ft));
+    try {
+      w.addDocument(doc);
+      fail("did not hit exception");
+    } catch (IllegalArgumentException iae) {
+      // Expected
+      assertEquals("cannot index term vector payloads without term vector positions (field=\"field\")", iae.getMessage());
+    }
+
+    ft = new FieldType(TextField.TYPE_NOT_STORED);
+    ft.setStoreTermVectors(false);
+    ft.setStoreTermVectorOffsets(true);
+    doc = new Document();
+    doc.add(new Field("field", "value", ft));
+    try {
+      w.addDocument(doc);
+      fail("did not hit exception");
+    } catch (IllegalArgumentException iae) {
+      // Expected
+      assertEquals("cannot index term vector offsets when term vectors are not indexed (field=\"field\")", iae.getMessage());
+    }
+
+    ft = new FieldType(TextField.TYPE_NOT_STORED);
+    ft.setStoreTermVectors(false);
+    ft.setStoreTermVectorPositions(true);
+    doc = new Document();
+    doc.add(new Field("field", "value", ft));
+    try {
+      w.addDocument(doc);
+      fail("did not hit exception");
+    } catch (IllegalArgumentException iae) {
+      // Expected
+      assertEquals("cannot index term vector positions when term vectors are not indexed (field=\"field\")", iae.getMessage());
+    }
+
+    ft = new FieldType(TextField.TYPE_NOT_STORED);
+    ft.setStoreTermVectors(false);
+    ft.setStoreTermVectorPayloads(true);
+    doc = new Document();
+    doc.add(new Field("field", "value", ft));
+    try {
+      w.addDocument(doc);
+      fail("did not hit exception");
+    } catch (IllegalArgumentException iae) {
+      // Expected
+      assertEquals("cannot index term vector payloads when term vectors are not indexed (field=\"field\")", iae.getMessage());
+    }
+
+    w.close();
+    
+    dir.close();
+  }
 }

Modified: lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/index/TestTermVectorsWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/index/TestTermVectorsWriter.java?rev=1512909&r1=1512908&r2=1512909&view=diff
==============================================================================
--- lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/index/TestTermVectorsWriter.java (original)
+++ lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/index/TestTermVectorsWriter.java Sun Aug 11 12:19:13 2013
@@ -18,8 +18,6 @@ package org.apache.lucene.index;
  */
 
 import java.io.IOException;
-import java.io.StringReader;
-
 import org.apache.lucene.analysis.Analyzer;
 import org.apache.lucene.analysis.CachingTokenFilter;
 import org.apache.lucene.analysis.MockAnalyzer;
@@ -176,7 +174,7 @@ public class TestTermVectorsWriter exten
     Analyzer analyzer = new MockAnalyzer(random());
     IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, analyzer));
     Document doc = new Document();
-    TokenStream stream = analyzer.tokenStream("field", new StringReader("abcd   "));
+    TokenStream stream = analyzer.tokenStream("field", "abcd   ");
     stream.reset(); // TODO: weird to reset before wrapping with CachingTokenFilter... correct?
     stream = new CachingTokenFilter(stream);
     FieldType customType = new FieldType(TextField.TYPE_NOT_STORED);

Modified: lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/index/TestTermsEnum.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/index/TestTermsEnum.java?rev=1512909&r1=1512908&r2=1512909&view=diff
==============================================================================
--- lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/index/TestTermsEnum.java (original)
+++ lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/index/TestTermsEnum.java Sun Aug 11 12:19:13 2013
@@ -111,7 +111,7 @@ public class TestTermsEnum extends Lucen
             System.out.println("TEST: iter seekCeil target=" + target.utf8ToString() + " exists=" + exists);
           }
           // seekCeil
-          final TermsEnum.SeekStatus status = termsEnum.seekCeil(target, random().nextBoolean());
+          final TermsEnum.SeekStatus status = termsEnum.seekCeil(target);
           if (VERBOSE) {
             System.out.println("  got " + status);
           }
@@ -134,7 +134,7 @@ public class TestTermsEnum extends Lucen
             System.out.println("TEST: iter seekExact target=" + target.utf8ToString() + " exists=" + exists);
           }
           // seekExact
-          final boolean result = termsEnum.seekExact(target, false);
+          final boolean result = termsEnum.seekExact(target);
           if (VERBOSE) {
             System.out.println("  got " + result);
           }
@@ -571,7 +571,7 @@ public class TestTermsEnum extends Lucen
 
   // sugar
   private boolean seekExact(TermsEnum te, String term) throws IOException {
-    return te.seekExact(new BytesRef(term), random().nextBoolean());
+    return te.seekExact(new BytesRef(term));
   }
 
   // sugar
@@ -666,13 +666,13 @@ public class TestTermsEnum extends Lucen
         if (VERBOSE) {
           System.out.println("  seekExact");
         }
-        assertEquals(loc >= 0, te.seekExact(t, random().nextBoolean()));
+        assertEquals(loc >= 0, te.seekExact(t));
       } else {
         if (VERBOSE) {
           System.out.println("  seekCeil");
         }
 
-        final TermsEnum.SeekStatus result = te.seekCeil(t, random().nextBoolean());
+        final TermsEnum.SeekStatus result = te.seekCeil(t);
         if (VERBOSE) {
           System.out.println("  got " + result);
         }

Modified: lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/index/TestTermsEnum2.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/index/TestTermsEnum2.java?rev=1512909&r1=1512908&r2=1512909&view=diff
==============================================================================
--- lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/index/TestTermsEnum2.java (original)
+++ lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/index/TestTermsEnum2.java Sun Aug 11 12:19:13 2013
@@ -117,10 +117,10 @@ public class TestTermsEnum2 extends Luce
           // term is accepted
           if (random().nextBoolean()) {
             // seek exact
-            assertTrue(te.seekExact(term, random().nextBoolean()));
+            assertTrue(te.seekExact(term));
           } else {
             // seek ceil
-            assertEquals(SeekStatus.FOUND, te.seekCeil(term, random().nextBoolean()));
+            assertEquals(SeekStatus.FOUND, te.seekCeil(term));
             assertEquals(term, te.term());
           }
         }
@@ -138,10 +138,10 @@ public class TestTermsEnum2 extends Luce
         if (c == 0) {
           assertEquals(term, te.next());
         } else if (c == 1) {
-          assertEquals(SeekStatus.FOUND, te.seekCeil(term, random().nextBoolean()));
+          assertEquals(SeekStatus.FOUND, te.seekCeil(term));
           assertEquals(term, te.term());
         } else {
-          assertTrue(te.seekExact(term, random().nextBoolean()));
+          assertTrue(te.seekExact(term));
         }
       }
     }

Modified: lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/index/TestTryDelete.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/index/TestTryDelete.java?rev=1512909&r1=1512908&r2=1512909&view=diff
==============================================================================
--- lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/index/TestTryDelete.java (original)
+++ lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/index/TestTryDelete.java Sun Aug 11 12:19:13 2013
@@ -40,7 +40,7 @@ public class TestTryDelete extends Lucen
   private static IndexWriter getWriter (Directory directory)
     throws IOException
   {
-    LogMergePolicy policy = new LogByteSizeMergePolicy();
+    MergePolicy policy = new LogByteSizeMergePolicy();
     IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT,
                                                    new MockAnalyzer(random()));
     conf.setMergePolicy(policy);

Modified: lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/index/TestUniqueTermCount.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/index/TestUniqueTermCount.java?rev=1512909&r1=1512908&r2=1512909&view=diff
==============================================================================
--- lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/index/TestUniqueTermCount.java (original)
+++ lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/index/TestUniqueTermCount.java Sun Aug 11 12:19:13 2013
@@ -110,12 +110,7 @@ public class TestUniqueTermCount extends
     }
 
     @Override
-    public ExactSimScorer exactSimScorer(SimWeight weight, AtomicReaderContext context) throws IOException {
-      throw new UnsupportedOperationException();
-    }
-
-    @Override
-    public SloppySimScorer sloppySimScorer(SimWeight weight, AtomicReaderContext context) throws IOException {
+    public SimScorer simScorer(SimWeight weight, AtomicReaderContext context) throws IOException {
       throw new UnsupportedOperationException();
     }
   }

Modified: lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/search/BaseTestRangeFilter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/search/BaseTestRangeFilter.java?rev=1512909&r1=1512908&r2=1512909&view=diff
==============================================================================
--- lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/search/BaseTestRangeFilter.java (original)
+++ lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/search/BaseTestRangeFilter.java Sun Aug 11 12:19:13 2013
@@ -22,7 +22,12 @@ import java.util.Random;
 
 import org.apache.lucene.analysis.MockAnalyzer;
 import org.apache.lucene.document.Document;
+import org.apache.lucene.document.DoubleField;
 import org.apache.lucene.document.Field;
+import org.apache.lucene.document.Field.Store;
+import org.apache.lucene.document.FloatField;
+import org.apache.lucene.document.IntField;
+import org.apache.lucene.document.LongField;
 import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.index.IndexWriterConfig.OpenMode;
 import org.apache.lucene.index.RandomIndexWriter;
@@ -115,9 +120,17 @@ public class BaseTestRangeFilter extends
     
     Document doc = new Document();
     Field idField = newStringField(random, "id", "", Field.Store.YES);
+    Field intIdField = new IntField("id_int", 0, Store.YES);
+    Field floatIdField = new FloatField("id_float", 0, Store.YES);
+    Field longIdField = new LongField("id_long", 0, Store.YES);
+    Field doubleIdField = new DoubleField("id_double", 0, Store.YES);
     Field randField = newStringField(random, "rand", "", Field.Store.YES);
     Field bodyField = newStringField(random, "body", "", Field.Store.NO);
     doc.add(idField);
+    doc.add(intIdField);
+    doc.add(floatIdField);
+    doc.add(longIdField);
+    doc.add(doubleIdField);
     doc.add(randField);
     doc.add(bodyField);
 
@@ -133,6 +146,10 @@ public class BaseTestRangeFilter extends
 
       for (int d = minId; d <= maxId; d++) {
         idField.setStringValue(pad(d));
+        intIdField.setIntValue(d);
+        floatIdField.setFloatValue(d);
+        longIdField.setLongValue(d);
+        doubleIdField.setDoubleValue(d);
         int r = index.allowNegativeRandomInts ? random.nextInt() : random
           .nextInt(Integer.MAX_VALUE);
         if (index.maxR < r) {

Modified: lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/search/JustCompileSearch.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/search/JustCompileSearch.java?rev=1512909&r1=1512908&r2=1512909&view=diff
==============================================================================
--- lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/search/JustCompileSearch.java (original)
+++ lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/search/JustCompileSearch.java Sun Aug 11 12:19:13 2013
@@ -270,12 +270,7 @@ final class JustCompileSearch {
     }
 
     @Override
-    public ExactSimScorer exactSimScorer(SimWeight stats, AtomicReaderContext context) {
-      throw new UnsupportedOperationException(UNSUPPORTED_MSG);
-    }
-
-    @Override
-    public SloppySimScorer sloppySimScorer(SimWeight stats, AtomicReaderContext context) {
+    public SimScorer simScorer(SimWeight stats, AtomicReaderContext context) {
       throw new UnsupportedOperationException(UNSUPPORTED_MSG);
     }
 

Modified: lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/search/TestConjunctions.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/search/TestConjunctions.java?rev=1512909&r1=1512908&r2=1512909&view=diff
==============================================================================
--- lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/search/TestConjunctions.java (original)
+++ lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/search/TestConjunctions.java Sun Aug 11 12:19:13 2013
@@ -109,18 +109,8 @@ public class TestConjunctions extends Lu
     }
 
     @Override
-    public ExactSimScorer exactSimScorer(SimWeight weight, AtomicReaderContext context) throws IOException {
-      return new ExactSimScorer() {
-        @Override
-        public float score(int doc, int freq) {
-          return freq;
-        }
-      };
-    }
-
-    @Override
-    public SloppySimScorer sloppySimScorer(SimWeight weight, AtomicReaderContext context) throws IOException {
-      return new SloppySimScorer() {
+    public SimScorer simScorer(SimWeight weight, AtomicReaderContext context) throws IOException {
+      return new SimScorer() {
         @Override
         public float score(int doc, float freq) {
           return freq;

Modified: lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/search/TestDocValuesScoring.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/search/TestDocValuesScoring.java?rev=1512909&r1=1512908&r2=1512909&view=diff
==============================================================================
--- lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/search/TestDocValuesScoring.java (original)
+++ lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/search/TestDocValuesScoring.java Sun Aug 11 12:19:13 2013
@@ -156,34 +156,11 @@ public class TestDocValuesScoring extend
     }
 
     @Override
-    public ExactSimScorer exactSimScorer(SimWeight stats, AtomicReaderContext context) throws IOException {
-      final ExactSimScorer sub = sim.exactSimScorer(stats, context);
-      final FieldCache.Floats values = FieldCache.DEFAULT.getFloats(context.reader(), boostField, false);
-
-      return new ExactSimScorer() {
-        @Override
-        public float score(int doc, int freq) {
-          return values.get(doc) * sub.score(doc, freq);
-        }
-
-        @Override
-        public Explanation explain(int doc, Explanation freq) {
-          Explanation boostExplanation = new Explanation(values.get(doc), "indexDocValue(" + boostField + ")");
-          Explanation simExplanation = sub.explain(doc, freq);
-          Explanation expl = new Explanation(boostExplanation.getValue() * simExplanation.getValue(), "product of:");
-          expl.addDetail(boostExplanation);
-          expl.addDetail(simExplanation);
-          return expl;
-        }
-      };
-    }
-
-    @Override
-    public SloppySimScorer sloppySimScorer(SimWeight stats, AtomicReaderContext context) throws IOException {
-      final SloppySimScorer sub = sim.sloppySimScorer(stats, context);
+    public SimScorer simScorer(SimWeight stats, AtomicReaderContext context) throws IOException {
+      final SimScorer sub = sim.simScorer(stats, context);
       final FieldCache.Floats values = FieldCache.DEFAULT.getFloats(context.reader(), boostField, false);
       
-      return new SloppySimScorer() {
+      return new SimScorer() {
         @Override
         public float score(int doc, float freq) {
           return values.get(doc) * sub.score(doc, freq);

Modified: lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/search/TestFieldCache.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/search/TestFieldCache.java?rev=1512909&r1=1512908&r2=1512909&view=diff
==============================================================================
--- lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/search/TestFieldCache.java (original)
+++ lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/search/TestFieldCache.java Sun Aug 11 12:19:13 2013
@@ -23,33 +23,45 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.LinkedHashSet;
 import java.util.List;
-import java.util.Locale;
 import java.util.concurrent.CyclicBarrier;
 import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicInteger;
 
 import org.apache.lucene.analysis.MockAnalyzer;
-import org.apache.lucene.codecs.DocValuesFormat;
 import org.apache.lucene.document.BinaryDocValuesField;
 import org.apache.lucene.document.Document;
+import org.apache.lucene.document.DoubleField;
 import org.apache.lucene.document.Field;
+import org.apache.lucene.document.Field.Store;
+import org.apache.lucene.document.FloatField;
 import org.apache.lucene.document.IntField;
+import org.apache.lucene.document.LongField;
 import org.apache.lucene.document.NumericDocValuesField;
 import org.apache.lucene.document.SortedDocValuesField;
 import org.apache.lucene.document.SortedSetDocValuesField;
 import org.apache.lucene.document.StoredField;
-import org.apache.lucene.document.StringField;
-import org.apache.lucene.index.*;
-import org.apache.lucene.search.FieldCache.Bytes;
+import org.apache.lucene.index.AtomicReader;
+import org.apache.lucene.index.BinaryDocValues;
+import org.apache.lucene.index.DirectoryReader;
+import org.apache.lucene.index.DocTermOrds;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
+import org.apache.lucene.index.RandomIndexWriter;
+import org.apache.lucene.index.SlowCompositeReaderWrapper;
+import org.apache.lucene.index.SortedDocValues;
+import org.apache.lucene.index.SortedSetDocValues;
+import org.apache.lucene.index.Terms;
+import org.apache.lucene.index.TermsEnum;
 import org.apache.lucene.search.FieldCache.Doubles;
 import org.apache.lucene.search.FieldCache.Floats;
 import org.apache.lucene.search.FieldCache.Ints;
 import org.apache.lucene.search.FieldCache.Longs;
-import org.apache.lucene.search.FieldCache.Shorts;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.util.Bits;
 import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.LuceneTestCase;
+import org.apache.lucene.util.NumericUtils;
 import org.apache.lucene.util._TestUtil;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
@@ -70,8 +82,6 @@ public class TestFieldCache extends Luce
     RandomIndexWriter writer= new RandomIndexWriter(random(), directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy()));
     long theLong = Long.MAX_VALUE;
     double theDouble = Double.MAX_VALUE;
-    byte theByte = Byte.MAX_VALUE;
-    short theShort = Short.MAX_VALUE;
     int theInt = Integer.MAX_VALUE;
     float theFloat = Float.MAX_VALUE;
     unicodeStrings = new String[NUM_DOCS];
@@ -81,14 +91,12 @@ public class TestFieldCache extends Luce
     }
     for (int i = 0; i < NUM_DOCS; i++){
       Document doc = new Document();
-      doc.add(newStringField("theLong", String.valueOf(theLong--), Field.Store.NO));
-      doc.add(newStringField("theDouble", String.valueOf(theDouble--), Field.Store.NO));
-      doc.add(newStringField("theByte", String.valueOf(theByte--), Field.Store.NO));
-      doc.add(newStringField("theShort", String.valueOf(theShort--), Field.Store.NO));
-      doc.add(newStringField("theInt", String.valueOf(theInt--), Field.Store.NO));
-      doc.add(newStringField("theFloat", String.valueOf(theFloat--), Field.Store.NO));
+      doc.add(new LongField("theLong", theLong--, Field.Store.NO));
+      doc.add(new DoubleField("theDouble", theDouble--, Field.Store.NO));
+      doc.add(new IntField("theInt", theInt--, Field.Store.NO));
+      doc.add(new FloatField("theFloat", theFloat--, Field.Store.NO));
       if (i%2 == 0) {
-        doc.add(newStringField("sparse", String.valueOf(i), Field.Store.NO));
+        doc.add(new IntField("sparse", i, Field.Store.NO));
       }
 
       if (i%2 == 0) {
@@ -133,7 +141,16 @@ public class TestFieldCache extends Luce
       ByteArrayOutputStream bos = new ByteArrayOutputStream(1024);
       cache.setInfoStream(new PrintStream(bos, false, "UTF-8"));
       cache.getDoubles(reader, "theDouble", false);
-      cache.getFloats(reader, "theDouble", false);
+      cache.getFloats(reader, "theDouble", new FieldCache.FloatParser() {
+        @Override
+        public TermsEnum termsEnum(Terms terms) throws IOException {
+          return NumericUtils.filterPrefixCodedLongs(terms.iterator(null));
+        }
+        @Override
+        public float parseFloat(BytesRef term) {
+          return NumericUtils.sortableIntToFloat((int) NumericUtils.prefixCodedToLong(term));
+        }
+      }, false);
       assertTrue(bos.toString("UTF-8").indexOf("WARNING") != -1);
     } finally {
       FieldCache.DEFAULT.purgeAllCaches();
@@ -144,42 +161,28 @@ public class TestFieldCache extends Luce
     FieldCache cache = FieldCache.DEFAULT;
     FieldCache.Doubles doubles = cache.getDoubles(reader, "theDouble", random().nextBoolean());
     assertSame("Second request to cache return same array", doubles, cache.getDoubles(reader, "theDouble", random().nextBoolean()));
-    assertSame("Second request with explicit parser return same array", doubles, cache.getDoubles(reader, "theDouble", FieldCache.DEFAULT_DOUBLE_PARSER, random().nextBoolean()));
+    assertSame("Second request with explicit parser return same array", doubles, cache.getDoubles(reader, "theDouble", FieldCache.NUMERIC_UTILS_DOUBLE_PARSER, random().nextBoolean()));
     for (int i = 0; i < NUM_DOCS; i++) {
       assertTrue(doubles.get(i) + " does not equal: " + (Double.MAX_VALUE - i), doubles.get(i) == (Double.MAX_VALUE - i));
     }
     
     FieldCache.Longs longs = cache.getLongs(reader, "theLong", random().nextBoolean());
     assertSame("Second request to cache return same array", longs, cache.getLongs(reader, "theLong", random().nextBoolean()));
-    assertSame("Second request with explicit parser return same array", longs, cache.getLongs(reader, "theLong", FieldCache.DEFAULT_LONG_PARSER, random().nextBoolean()));
+    assertSame("Second request with explicit parser return same array", longs, cache.getLongs(reader, "theLong", FieldCache.NUMERIC_UTILS_LONG_PARSER, random().nextBoolean()));
     for (int i = 0; i < NUM_DOCS; i++) {
       assertTrue(longs.get(i) + " does not equal: " + (Long.MAX_VALUE - i) + " i=" + i, longs.get(i) == (Long.MAX_VALUE - i));
     }
-    
-    FieldCache.Bytes bytes = cache.getBytes(reader, "theByte", random().nextBoolean());
-    assertSame("Second request to cache return same array", bytes, cache.getBytes(reader, "theByte", random().nextBoolean()));
-    assertSame("Second request with explicit parser return same array", bytes, cache.getBytes(reader, "theByte", FieldCache.DEFAULT_BYTE_PARSER, random().nextBoolean()));
-    for (int i = 0; i < NUM_DOCS; i++) {
-      assertTrue(bytes.get(i) + " does not equal: " + (Byte.MAX_VALUE - i), bytes.get(i) == (byte) (Byte.MAX_VALUE - i));
-    }
-    
-    FieldCache.Shorts shorts = cache.getShorts(reader, "theShort", random().nextBoolean());
-    assertSame("Second request to cache return same array", shorts, cache.getShorts(reader, "theShort", random().nextBoolean()));
-    assertSame("Second request with explicit parser return same array", shorts, cache.getShorts(reader, "theShort", FieldCache.DEFAULT_SHORT_PARSER, random().nextBoolean()));
-    for (int i = 0; i < NUM_DOCS; i++) {
-      assertTrue(shorts.get(i) + " does not equal: " + (Short.MAX_VALUE - i), shorts.get(i) == (short) (Short.MAX_VALUE - i));
-    }
-    
+
     FieldCache.Ints ints = cache.getInts(reader, "theInt", random().nextBoolean());
     assertSame("Second request to cache return same array", ints, cache.getInts(reader, "theInt", random().nextBoolean()));
-    assertSame("Second request with explicit parser return same array", ints, cache.getInts(reader, "theInt", FieldCache.DEFAULT_INT_PARSER, random().nextBoolean()));
+    assertSame("Second request with explicit parser return same array", ints, cache.getInts(reader, "theInt", FieldCache.NUMERIC_UTILS_INT_PARSER, random().nextBoolean()));
     for (int i = 0; i < NUM_DOCS; i++) {
       assertTrue(ints.get(i) + " does not equal: " + (Integer.MAX_VALUE - i), ints.get(i) == (Integer.MAX_VALUE - i));
     }
     
     FieldCache.Floats floats = cache.getFloats(reader, "theFloat", random().nextBoolean());
     assertSame("Second request to cache return same array", floats, cache.getFloats(reader, "theFloat", random().nextBoolean()));
-    assertSame("Second request with explicit parser return same array", floats, cache.getFloats(reader, "theFloat", FieldCache.DEFAULT_FLOAT_PARSER, random().nextBoolean()));
+    assertSame("Second request with explicit parser return same array", floats, cache.getFloats(reader, "theFloat", FieldCache.NUMERIC_UTILS_FLOAT_PARSER, random().nextBoolean()));
     for (int i = 0; i < NUM_DOCS; i++) {
       assertTrue(floats.get(i) + " does not equal: " + (Float.MAX_VALUE - i), floats.get(i) == (Float.MAX_VALUE - i));
     }
@@ -587,12 +590,6 @@ public class TestFieldCache extends Luce
     cache.purgeAllCaches();
     assertEquals(0, cache.getCacheEntries().length);
     
-    Bytes bytes = cache.getBytes(ar, "bogusbytes", true);
-    assertEquals(0, bytes.get(0));
-
-    Shorts shorts = cache.getShorts(ar, "bogusshorts", true);
-    assertEquals(0, shorts.get(0));
-    
     Ints ints = cache.getInts(ar, "bogusints", true);
     assertEquals(0, ints.get(0));
     
@@ -652,12 +649,6 @@ public class TestFieldCache extends Luce
     cache.purgeAllCaches();
     assertEquals(0, cache.getCacheEntries().length);
     
-    Bytes bytes = cache.getBytes(ar, "bogusbytes", true);
-    assertEquals(0, bytes.get(0));
-
-    Shorts shorts = cache.getShorts(ar, "bogusshorts", true);
-    assertEquals(0, shorts.get(0));
-    
     Ints ints = cache.getInts(ar, "bogusints", true);
     assertEquals(0, ints.get(0));
     
@@ -692,4 +683,97 @@ public class TestFieldCache extends Luce
     ir.close();
     dir.close();
   }
+
+  // Make sure that the use of GrowableWriter doesn't prevent from using the full long range
+  public void testLongFieldCache() throws IOException {
+    Directory dir = newDirectory();
+    IndexWriterConfig cfg = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
+    cfg.setMergePolicy(newLogMergePolicy());
+    RandomIndexWriter iw = new RandomIndexWriter(random(), dir, cfg);
+    Document doc = new Document();
+    LongField field = new LongField("f", 0L, Store.YES);
+    doc.add(field);
+    final long[] values = new long[_TestUtil.nextInt(random(), 1, 10)];
+    for (int i = 0; i < values.length; ++i) {
+      final long v;
+      switch (random().nextInt(10)) {
+        case 0:
+          v = Long.MIN_VALUE;
+          break;
+        case 1:
+          v = 0;
+          break;
+        case 2:
+          v = Long.MAX_VALUE;
+          break;
+        default:
+          v = _TestUtil.nextLong(random(), -10, 10);
+          break;
+      }
+      values[i] = v;
+      if (v == 0 && random().nextBoolean()) {
+        // missing
+        iw.addDocument(new Document());
+      } else {
+        field.setLongValue(v);
+        iw.addDocument(doc);
+      }
+    }
+    iw.forceMerge(1);
+    final DirectoryReader reader = iw.getReader();
+    final FieldCache.Longs longs = FieldCache.DEFAULT.getLongs(getOnlySegmentReader(reader), "f", false);
+    for (int i = 0; i < values.length; ++i) {
+      assertEquals(values[i], longs.get(i));
+    }
+    reader.close();
+    iw.close();
+    dir.close();
+  }
+
+  // Make sure that the use of GrowableWriter doesn't prevent from using the full int range
+  public void testIntFieldCache() throws IOException {
+    Directory dir = newDirectory();
+    IndexWriterConfig cfg = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
+    cfg.setMergePolicy(newLogMergePolicy());
+    RandomIndexWriter iw = new RandomIndexWriter(random(), dir, cfg);
+    Document doc = new Document();
+    IntField field = new IntField("f", 0, Store.YES);
+    doc.add(field);
+    final int[] values = new int[_TestUtil.nextInt(random(), 1, 10)];
+    for (int i = 0; i < values.length; ++i) {
+      final int v;
+      switch (random().nextInt(10)) {
+        case 0:
+          v = Integer.MIN_VALUE;
+          break;
+        case 1:
+          v = 0;
+          break;
+        case 2:
+          v = Integer.MAX_VALUE;
+          break;
+        default:
+          v = _TestUtil.nextInt(random(), -10, 10);
+          break;
+      }
+      values[i] = v;
+      if (v == 0 && random().nextBoolean()) {
+        // missing
+        iw.addDocument(new Document());
+      } else {
+        field.setIntValue(v);
+        iw.addDocument(doc);
+      }
+    }
+    iw.forceMerge(1);
+    final DirectoryReader reader = iw.getReader();
+    final FieldCache.Ints ints = FieldCache.DEFAULT.getInts(getOnlySegmentReader(reader), "f", false);
+    for (int i = 0; i < values.length; ++i) {
+      assertEquals(values[i], ints.get(i));
+    }
+    reader.close();
+    iw.close();
+    dir.close();
+  }
+
 }

Modified: lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/search/TestFieldCacheRangeFilter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/search/TestFieldCacheRangeFilter.java?rev=1512909&r1=1512908&r2=1512909&view=diff
==============================================================================
--- lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/search/TestFieldCacheRangeFilter.java (original)
+++ lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/search/TestFieldCacheRangeFilter.java Sun Aug 11 12:19:13 2013
@@ -19,15 +19,17 @@ package org.apache.lucene.search;
 
 import java.io.IOException;
 
-
+import org.apache.lucene.analysis.MockAnalyzer;
+import org.apache.lucene.document.Document;
 import org.apache.lucene.document.Field;
+import org.apache.lucene.document.IntField;
 import org.apache.lucene.index.DirectoryReader;
 import org.apache.lucene.index.IndexReader;
-import org.apache.lucene.index.Term;
 import org.apache.lucene.index.IndexWriter;
-import org.apache.lucene.analysis.MockAnalyzer;
-import org.apache.lucene.document.Document;
+import org.apache.lucene.index.Term;
 import org.apache.lucene.store.Directory;
+import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.NumericUtils;
 import org.junit.Test;
 
 /**
@@ -187,98 +189,6 @@ public class TestFieldCacheRangeFilter e
     assertEquals("max,nul,T,T", 1, result.length);
   }
   
-  // byte-ranges cannot be tested, because all ranges are too big for bytes, need an extra range for that
-
-  @Test
-  public void testFieldCacheRangeFilterShorts() throws IOException {
-
-    IndexReader reader = signedIndexReader;
-    IndexSearcher search = newSearcher(reader);
-
-    int numDocs = reader.numDocs();
-    int medId = ((maxId - minId) / 2);
-    Short minIdO = Short.valueOf((short) minId);
-    Short maxIdO = Short.valueOf((short) maxId);
-    Short medIdO = Short.valueOf((short) medId);
-        
-    assertEquals("num of docs", numDocs, 1+ maxId - minId);
-        
-    ScoreDoc[] result;
-    Query q = new TermQuery(new Term("body","body"));
-
-    // test id, bounded on both ends
-    result = search.search(q,FieldCacheRangeFilter.newShortRange("id",minIdO,maxIdO,T,T), numDocs).scoreDocs;
-    assertEquals("find all", numDocs, result.length);
-
-    result = search.search(q,FieldCacheRangeFilter.newShortRange("id",minIdO,maxIdO,T,F), numDocs).scoreDocs;
-    assertEquals("all but last", numDocs-1, result.length);
-
-    result = search.search(q,FieldCacheRangeFilter.newShortRange("id",minIdO,maxIdO,F,T), numDocs).scoreDocs;
-    assertEquals("all but first", numDocs-1, result.length);
-        
-    result = search.search(q,FieldCacheRangeFilter.newShortRange("id",minIdO,maxIdO,F,F), numDocs).scoreDocs;
-    assertEquals("all but ends", numDocs-2, result.length);
-    
-    result = search.search(q,FieldCacheRangeFilter.newShortRange("id",medIdO,maxIdO,T,T), numDocs).scoreDocs;
-    assertEquals("med and up", 1+ maxId-medId, result.length);
-        
-    result = search.search(q,FieldCacheRangeFilter.newShortRange("id",minIdO,medIdO,T,T), numDocs).scoreDocs;
-    assertEquals("up to med", 1+ medId-minId, result.length);
-    
-    // unbounded id
-
-    result = search.search(q,FieldCacheRangeFilter.newShortRange("id",null,null,T,T), numDocs).scoreDocs;
-    assertEquals("find all", numDocs, result.length);
-
-    result = search.search(q,FieldCacheRangeFilter.newShortRange("id",minIdO,null,T,F), numDocs).scoreDocs;
-    assertEquals("min and up", numDocs, result.length);
-
-    result = search.search(q,FieldCacheRangeFilter.newShortRange("id",null,maxIdO,F,T), numDocs).scoreDocs;
-    assertEquals("max and down", numDocs, result.length);
-
-    result = search.search(q,FieldCacheRangeFilter.newShortRange("id",minIdO,null,F,F), numDocs).scoreDocs;
-    assertEquals("not min, but up", numDocs-1, result.length);
-        
-    result = search.search(q,FieldCacheRangeFilter.newShortRange("id",null,maxIdO,F,F), numDocs).scoreDocs;
-    assertEquals("not max, but down", numDocs-1, result.length);
-        
-    result = search.search(q,FieldCacheRangeFilter.newShortRange("id",medIdO,maxIdO,T,F), numDocs).scoreDocs;
-    assertEquals("med and up, not max", maxId-medId, result.length);
-        
-    result = search.search(q,FieldCacheRangeFilter.newShortRange("id",minIdO,medIdO,F,T), numDocs).scoreDocs;
-    assertEquals("not min, up to med", medId-minId, result.length);
-
-    // very small sets
-
-    result = search.search(q,FieldCacheRangeFilter.newShortRange("id",minIdO,minIdO,F,F), numDocs).scoreDocs;
-    assertEquals("min,min,F,F", 0, result.length);
-    result = search.search(q,FieldCacheRangeFilter.newShortRange("id",medIdO,medIdO,F,F), numDocs).scoreDocs;
-    assertEquals("med,med,F,F", 0, result.length);
-    result = search.search(q,FieldCacheRangeFilter.newShortRange("id",maxIdO,maxIdO,F,F), numDocs).scoreDocs;
-    assertEquals("max,max,F,F", 0, result.length);
-                     
-    result = search.search(q,FieldCacheRangeFilter.newShortRange("id",minIdO,minIdO,T,T), numDocs).scoreDocs;
-    assertEquals("min,min,T,T", 1, result.length);
-    result = search.search(q,FieldCacheRangeFilter.newShortRange("id",null,minIdO,F,T), numDocs).scoreDocs;
-    assertEquals("nul,min,F,T", 1, result.length);
-
-    result = search.search(q,FieldCacheRangeFilter.newShortRange("id",maxIdO,maxIdO,T,T), numDocs).scoreDocs;
-    assertEquals("max,max,T,T", 1, result.length);
-    result = search.search(q,FieldCacheRangeFilter.newShortRange("id",maxIdO,null,T,F), numDocs).scoreDocs;
-    assertEquals("max,nul,T,T", 1, result.length);
-
-    result = search.search(q,FieldCacheRangeFilter.newShortRange("id",medIdO,medIdO,T,T), numDocs).scoreDocs;
-    assertEquals("med,med,T,T", 1, result.length);
-    
-    // special cases
-    result = search.search(q,FieldCacheRangeFilter.newShortRange("id",Short.valueOf(Short.MAX_VALUE),null,F,F), numDocs).scoreDocs;
-    assertEquals("overflow special case", 0, result.length);
-    result = search.search(q,FieldCacheRangeFilter.newShortRange("id",null,Short.valueOf(Short.MIN_VALUE),F,F), numDocs).scoreDocs;
-    assertEquals("overflow special case", 0, result.length);
-    result = search.search(q,FieldCacheRangeFilter.newShortRange("id",maxIdO,minIdO,T,T), numDocs).scoreDocs;
-    assertEquals("inverse range", 0, result.length);
-  }
-  
   @Test
   public void testFieldCacheRangeFilterInts() throws IOException {
 
@@ -298,75 +208,75 @@ public class TestFieldCacheRangeFilter e
 
     // test id, bounded on both ends
         
-    result = search.search(q,FieldCacheRangeFilter.newIntRange("id",minIdO,maxIdO,T,T), numDocs).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newIntRange("id_int",minIdO,maxIdO,T,T), numDocs).scoreDocs;
     assertEquals("find all", numDocs, result.length);
 
-    result = search.search(q,FieldCacheRangeFilter.newIntRange("id",minIdO,maxIdO,T,F), numDocs).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newIntRange("id_int",minIdO,maxIdO,T,F), numDocs).scoreDocs;
     assertEquals("all but last", numDocs-1, result.length);
 
-    result = search.search(q,FieldCacheRangeFilter.newIntRange("id",minIdO,maxIdO,F,T), numDocs).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newIntRange("id_int",minIdO,maxIdO,F,T), numDocs).scoreDocs;
     assertEquals("all but first", numDocs-1, result.length);
         
-    result = search.search(q,FieldCacheRangeFilter.newIntRange("id",minIdO,maxIdO,F,F), numDocs).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newIntRange("id_int",minIdO,maxIdO,F,F), numDocs).scoreDocs;
     assertEquals("all but ends", numDocs-2, result.length);
     
-    result = search.search(q,FieldCacheRangeFilter.newIntRange("id",medIdO,maxIdO,T,T), numDocs).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newIntRange("id_int",medIdO,maxIdO,T,T), numDocs).scoreDocs;
     assertEquals("med and up", 1+ maxId-medId, result.length);
         
-    result = search.search(q,FieldCacheRangeFilter.newIntRange("id",minIdO,medIdO,T,T), numDocs).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newIntRange("id_int",minIdO,medIdO,T,T), numDocs).scoreDocs;
     assertEquals("up to med", 1+ medId-minId, result.length);
     
     // unbounded id
 
-    result = search.search(q,FieldCacheRangeFilter.newIntRange("id",null,null,T,T), numDocs).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newIntRange("id_int",null,null,T,T), numDocs).scoreDocs;
     assertEquals("find all", numDocs, result.length);
 
-    result = search.search(q,FieldCacheRangeFilter.newIntRange("id",minIdO,null,T,F), numDocs).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newIntRange("id_int",minIdO,null,T,F), numDocs).scoreDocs;
     assertEquals("min and up", numDocs, result.length);
 
-    result = search.search(q,FieldCacheRangeFilter.newIntRange("id",null,maxIdO,F,T), numDocs).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newIntRange("id_int",null,maxIdO,F,T), numDocs).scoreDocs;
     assertEquals("max and down", numDocs, result.length);
 
-    result = search.search(q,FieldCacheRangeFilter.newIntRange("id",minIdO,null,F,F), numDocs).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newIntRange("id_int",minIdO,null,F,F), numDocs).scoreDocs;
     assertEquals("not min, but up", numDocs-1, result.length);
         
-    result = search.search(q,FieldCacheRangeFilter.newIntRange("id",null,maxIdO,F,F), numDocs).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newIntRange("id_int",null,maxIdO,F,F), numDocs).scoreDocs;
     assertEquals("not max, but down", numDocs-1, result.length);
         
-    result = search.search(q,FieldCacheRangeFilter.newIntRange("id",medIdO,maxIdO,T,F), numDocs).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newIntRange("id_int",medIdO,maxIdO,T,F), numDocs).scoreDocs;
     assertEquals("med and up, not max", maxId-medId, result.length);
         
-    result = search.search(q,FieldCacheRangeFilter.newIntRange("id",minIdO,medIdO,F,T), numDocs).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newIntRange("id_int",minIdO,medIdO,F,T), numDocs).scoreDocs;
     assertEquals("not min, up to med", medId-minId, result.length);
 
     // very small sets
 
-    result = search.search(q,FieldCacheRangeFilter.newIntRange("id",minIdO,minIdO,F,F), numDocs).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newIntRange("id_int",minIdO,minIdO,F,F), numDocs).scoreDocs;
     assertEquals("min,min,F,F", 0, result.length);
-    result = search.search(q,FieldCacheRangeFilter.newIntRange("id",medIdO,medIdO,F,F), numDocs).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newIntRange("id_int",medIdO,medIdO,F,F), numDocs).scoreDocs;
     assertEquals("med,med,F,F", 0, result.length);
-    result = search.search(q,FieldCacheRangeFilter.newIntRange("id",maxIdO,maxIdO,F,F), numDocs).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newIntRange("id_int",maxIdO,maxIdO,F,F), numDocs).scoreDocs;
     assertEquals("max,max,F,F", 0, result.length);
                      
-    result = search.search(q,FieldCacheRangeFilter.newIntRange("id",minIdO,minIdO,T,T), numDocs).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newIntRange("id_int",minIdO,minIdO,T,T), numDocs).scoreDocs;
     assertEquals("min,min,T,T", 1, result.length);
-    result = search.search(q,FieldCacheRangeFilter.newIntRange("id",null,minIdO,F,T), numDocs).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newIntRange("id_int",null,minIdO,F,T), numDocs).scoreDocs;
     assertEquals("nul,min,F,T", 1, result.length);
 
-    result = search.search(q,FieldCacheRangeFilter.newIntRange("id",maxIdO,maxIdO,T,T), numDocs).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newIntRange("id_int",maxIdO,maxIdO,T,T), numDocs).scoreDocs;
     assertEquals("max,max,T,T", 1, result.length);
-    result = search.search(q,FieldCacheRangeFilter.newIntRange("id",maxIdO,null,T,F), numDocs).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newIntRange("id_int",maxIdO,null,T,F), numDocs).scoreDocs;
     assertEquals("max,nul,T,T", 1, result.length);
 
-    result = search.search(q,FieldCacheRangeFilter.newIntRange("id",medIdO,medIdO,T,T), numDocs).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newIntRange("id_int",medIdO,medIdO,T,T), numDocs).scoreDocs;
     assertEquals("med,med,T,T", 1, result.length);
     
     // special cases
-    result = search.search(q,FieldCacheRangeFilter.newIntRange("id",Integer.valueOf(Integer.MAX_VALUE),null,F,F), numDocs).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newIntRange("id_int",Integer.valueOf(Integer.MAX_VALUE),null,F,F), numDocs).scoreDocs;
     assertEquals("overflow special case", 0, result.length);
-    result = search.search(q,FieldCacheRangeFilter.newIntRange("id",null,Integer.valueOf(Integer.MIN_VALUE),F,F), numDocs).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newIntRange("id_int",null,Integer.valueOf(Integer.MIN_VALUE),F,F), numDocs).scoreDocs;
     assertEquals("overflow special case", 0, result.length);
-    result = search.search(q,FieldCacheRangeFilter.newIntRange("id",maxIdO,minIdO,T,T), numDocs).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newIntRange("id_int",maxIdO,minIdO,T,T), numDocs).scoreDocs;
     assertEquals("inverse range", 0, result.length);
   }
   
@@ -389,75 +299,75 @@ public class TestFieldCacheRangeFilter e
 
     // test id, bounded on both ends
         
-    result = search.search(q,FieldCacheRangeFilter.newLongRange("id",minIdO,maxIdO,T,T), numDocs).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newLongRange("id_long",minIdO,maxIdO,T,T), numDocs).scoreDocs;
     assertEquals("find all", numDocs, result.length);
 
-    result = search.search(q,FieldCacheRangeFilter.newLongRange("id",minIdO,maxIdO,T,F), numDocs).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newLongRange("id_long",minIdO,maxIdO,T,F), numDocs).scoreDocs;
     assertEquals("all but last", numDocs-1, result.length);
 
-    result = search.search(q,FieldCacheRangeFilter.newLongRange("id",minIdO,maxIdO,F,T), numDocs).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newLongRange("id_long",minIdO,maxIdO,F,T), numDocs).scoreDocs;
     assertEquals("all but first", numDocs-1, result.length);
         
-    result = search.search(q,FieldCacheRangeFilter.newLongRange("id",minIdO,maxIdO,F,F), numDocs).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newLongRange("id_long",minIdO,maxIdO,F,F), numDocs).scoreDocs;
     assertEquals("all but ends", numDocs-2, result.length);
     
-    result = search.search(q,FieldCacheRangeFilter.newLongRange("id",medIdO,maxIdO,T,T), numDocs).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newLongRange("id_long",medIdO,maxIdO,T,T), numDocs).scoreDocs;
     assertEquals("med and up", 1+ maxId-medId, result.length);
         
-    result = search.search(q,FieldCacheRangeFilter.newLongRange("id",minIdO,medIdO,T,T), numDocs).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newLongRange("id_long",minIdO,medIdO,T,T), numDocs).scoreDocs;
     assertEquals("up to med", 1+ medId-minId, result.length);
     
     // unbounded id
 
-    result = search.search(q,FieldCacheRangeFilter.newLongRange("id",null,null,T,T), numDocs).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newLongRange("id_long",null,null,T,T), numDocs).scoreDocs;
     assertEquals("find all", numDocs, result.length);
 
-    result = search.search(q,FieldCacheRangeFilter.newLongRange("id",minIdO,null,T,F), numDocs).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newLongRange("id_long",minIdO,null,T,F), numDocs).scoreDocs;
     assertEquals("min and up", numDocs, result.length);
 
-    result = search.search(q,FieldCacheRangeFilter.newLongRange("id",null,maxIdO,F,T), numDocs).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newLongRange("id_long",null,maxIdO,F,T), numDocs).scoreDocs;
     assertEquals("max and down", numDocs, result.length);
 
-    result = search.search(q,FieldCacheRangeFilter.newLongRange("id",minIdO,null,F,F), numDocs).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newLongRange("id_long",minIdO,null,F,F), numDocs).scoreDocs;
     assertEquals("not min, but up", numDocs-1, result.length);
         
-    result = search.search(q,FieldCacheRangeFilter.newLongRange("id",null,maxIdO,F,F), numDocs).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newLongRange("id_long",null,maxIdO,F,F), numDocs).scoreDocs;
     assertEquals("not max, but down", numDocs-1, result.length);
         
-    result = search.search(q,FieldCacheRangeFilter.newLongRange("id",medIdO,maxIdO,T,F), numDocs).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newLongRange("id_long",medIdO,maxIdO,T,F), numDocs).scoreDocs;
     assertEquals("med and up, not max", maxId-medId, result.length);
         
-    result = search.search(q,FieldCacheRangeFilter.newLongRange("id",minIdO,medIdO,F,T), numDocs).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newLongRange("id_long",minIdO,medIdO,F,T), numDocs).scoreDocs;
     assertEquals("not min, up to med", medId-minId, result.length);
 
     // very small sets
 
-    result = search.search(q,FieldCacheRangeFilter.newLongRange("id",minIdO,minIdO,F,F), numDocs).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newLongRange("id_long",minIdO,minIdO,F,F), numDocs).scoreDocs;
     assertEquals("min,min,F,F", 0, result.length);
-    result = search.search(q,FieldCacheRangeFilter.newLongRange("id",medIdO,medIdO,F,F), numDocs).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newLongRange("id_long",medIdO,medIdO,F,F), numDocs).scoreDocs;
     assertEquals("med,med,F,F", 0, result.length);
-    result = search.search(q,FieldCacheRangeFilter.newLongRange("id",maxIdO,maxIdO,F,F), numDocs).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newLongRange("id_long",maxIdO,maxIdO,F,F), numDocs).scoreDocs;
     assertEquals("max,max,F,F", 0, result.length);
                      
-    result = search.search(q,FieldCacheRangeFilter.newLongRange("id",minIdO,minIdO,T,T), numDocs).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newLongRange("id_long",minIdO,minIdO,T,T), numDocs).scoreDocs;
     assertEquals("min,min,T,T", 1, result.length);
-    result = search.search(q,FieldCacheRangeFilter.newLongRange("id",null,minIdO,F,T), numDocs).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newLongRange("id_long",null,minIdO,F,T), numDocs).scoreDocs;
     assertEquals("nul,min,F,T", 1, result.length);
 
-    result = search.search(q,FieldCacheRangeFilter.newLongRange("id",maxIdO,maxIdO,T,T), numDocs).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newLongRange("id_long",maxIdO,maxIdO,T,T), numDocs).scoreDocs;
     assertEquals("max,max,T,T", 1, result.length);
-    result = search.search(q,FieldCacheRangeFilter.newLongRange("id",maxIdO,null,T,F), numDocs).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newLongRange("id_long",maxIdO,null,T,F), numDocs).scoreDocs;
     assertEquals("max,nul,T,T", 1, result.length);
 
-    result = search.search(q,FieldCacheRangeFilter.newLongRange("id",medIdO,medIdO,T,T), numDocs).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newLongRange("id_long",medIdO,medIdO,T,T), numDocs).scoreDocs;
     assertEquals("med,med,T,T", 1, result.length);
     
     // special cases
-    result = search.search(q,FieldCacheRangeFilter.newLongRange("id",Long.valueOf(Long.MAX_VALUE),null,F,F), numDocs).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newLongRange("id_long",Long.valueOf(Long.MAX_VALUE),null,F,F), numDocs).scoreDocs;
     assertEquals("overflow special case", 0, result.length);
-    result = search.search(q,FieldCacheRangeFilter.newLongRange("id",null,Long.valueOf(Long.MIN_VALUE),F,F), numDocs).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newLongRange("id_long",null,Long.valueOf(Long.MIN_VALUE),F,F), numDocs).scoreDocs;
     assertEquals("overflow special case", 0, result.length);
-    result = search.search(q,FieldCacheRangeFilter.newLongRange("id",maxIdO,minIdO,T,T), numDocs).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newLongRange("id_long",maxIdO,minIdO,T,T), numDocs).scoreDocs;
     assertEquals("inverse range", 0, result.length);
   }
   
@@ -476,19 +386,19 @@ public class TestFieldCacheRangeFilter e
     ScoreDoc[] result;
     Query q = new TermQuery(new Term("body","body"));
 
-    result = search.search(q,FieldCacheRangeFilter.newFloatRange("id",minIdO,medIdO,T,T), numDocs).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newFloatRange("id_float",minIdO,medIdO,T,T), numDocs).scoreDocs;
     assertEquals("find all", numDocs/2, result.length);
     int count = 0;
-    result = search.search(q,FieldCacheRangeFilter.newFloatRange("id",null,medIdO,F,T), numDocs).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newFloatRange("id_float",null,medIdO,F,T), numDocs).scoreDocs;
     count += result.length;
-    result = search.search(q,FieldCacheRangeFilter.newFloatRange("id",medIdO,null,F,F), numDocs).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newFloatRange("id_float",medIdO,null,F,F), numDocs).scoreDocs;
     count += result.length;
     assertEquals("sum of two concenatted ranges", numDocs, count);
-    result = search.search(q,FieldCacheRangeFilter.newFloatRange("id",null,null,T,T), numDocs).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newFloatRange("id_float",null,null,T,T), numDocs).scoreDocs;
     assertEquals("find all", numDocs, result.length);
-    result = search.search(q,FieldCacheRangeFilter.newFloatRange("id",Float.valueOf(Float.POSITIVE_INFINITY),null,F,F), numDocs).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newFloatRange("id_float",Float.valueOf(Float.POSITIVE_INFINITY),null,F,F), numDocs).scoreDocs;
     assertEquals("infinity special case", 0, result.length);
-    result = search.search(q,FieldCacheRangeFilter.newFloatRange("id",null,Float.valueOf(Float.NEGATIVE_INFINITY),F,F), numDocs).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newFloatRange("id_float",null,Float.valueOf(Float.NEGATIVE_INFINITY),F,F), numDocs).scoreDocs;
     assertEquals("infinity special case", 0, result.length);
   }
   
@@ -505,19 +415,19 @@ public class TestFieldCacheRangeFilter e
     ScoreDoc[] result;
     Query q = new TermQuery(new Term("body","body"));
 
-    result = search.search(q,FieldCacheRangeFilter.newDoubleRange("id",minIdO,medIdO,T,T), numDocs).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newDoubleRange("id_double",minIdO,medIdO,T,T), numDocs).scoreDocs;
     assertEquals("find all", numDocs/2, result.length);
     int count = 0;
-    result = search.search(q,FieldCacheRangeFilter.newDoubleRange("id",null,medIdO,F,T), numDocs).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newDoubleRange("id_double",null,medIdO,F,T), numDocs).scoreDocs;
     count += result.length;
-    result = search.search(q,FieldCacheRangeFilter.newDoubleRange("id",medIdO,null,F,F), numDocs).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newDoubleRange("id_double",medIdO,null,F,F), numDocs).scoreDocs;
     count += result.length;
     assertEquals("sum of two concenatted ranges", numDocs, count);
-    result = search.search(q,FieldCacheRangeFilter.newDoubleRange("id",null,null,T,T), numDocs).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newDoubleRange("id_double",null,null,T,T), numDocs).scoreDocs;
     assertEquals("find all", numDocs, result.length);
-    result = search.search(q,FieldCacheRangeFilter.newDoubleRange("id",Double.valueOf(Double.POSITIVE_INFINITY),null,F,F), numDocs).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newDoubleRange("id_double",Double.valueOf(Double.POSITIVE_INFINITY),null,F,F), numDocs).scoreDocs;
     assertEquals("infinity special case", 0, result.length);
-    result = search.search(q,FieldCacheRangeFilter.newDoubleRange("id",null, Double.valueOf(Double.NEGATIVE_INFINITY),F,F), numDocs).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newDoubleRange("id_double",null, Double.valueOf(Double.NEGATIVE_INFINITY),F,F), numDocs).scoreDocs;
     assertEquals("infinity special case", 0, result.length);
   }
   
@@ -529,13 +439,15 @@ public class TestFieldCacheRangeFilter e
 
     for (int d = -20; d <= 20; d++) {
       Document doc = new Document();
-      doc.add(newStringField("id", Integer.toString(d), Field.Store.NO));
+      doc.add(new IntField("id_int", d, Field.Store.NO));
       doc.add(newStringField("body", "body", Field.Store.NO));
       writer.addDocument(doc);
     }
     
     writer.forceMerge(1);
-    writer.deleteDocuments(new Term("id","0"));
+    BytesRef term0 = new BytesRef();
+    NumericUtils.intToPrefixCoded(0, 0, term0);
+    writer.deleteDocuments(new Term("id_int", term0));
     writer.close();
 
     IndexReader reader = DirectoryReader.open(dir);
@@ -545,19 +457,19 @@ public class TestFieldCacheRangeFilter e
     ScoreDoc[] result;
     Query q = new TermQuery(new Term("body","body"));
 
-    result = search.search(q,FieldCacheRangeFilter.newByteRange("id",Byte.valueOf((byte) -20),Byte.valueOf((byte) 20),T,T), 100).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newIntRange("id_int",-20,20,T,T), 100).scoreDocs;
     assertEquals("find all", 40, result.length);
 
-    result = search.search(q,FieldCacheRangeFilter.newByteRange("id",Byte.valueOf((byte) 0),Byte.valueOf((byte) 20),T,T), 100).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newIntRange("id_int",0,20,T,T), 100).scoreDocs;
     assertEquals("find all", 20, result.length);
 
-    result = search.search(q,FieldCacheRangeFilter.newByteRange("id",Byte.valueOf((byte) -20),Byte.valueOf((byte) 0),T,T), 100).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newIntRange("id_int",-20,0,T,T), 100).scoreDocs;
     assertEquals("find all", 20, result.length);
 
-    result = search.search(q,FieldCacheRangeFilter.newByteRange("id",Byte.valueOf((byte) 10),Byte.valueOf((byte) 20),T,T), 100).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newIntRange("id_int",10,20,T,T), 100).scoreDocs;
     assertEquals("find all", 11, result.length);
 
-    result = search.search(q,FieldCacheRangeFilter.newByteRange("id",Byte.valueOf((byte) -20),Byte.valueOf((byte) -10),T,T), 100).scoreDocs;
+    result = search.search(q,FieldCacheRangeFilter.newIntRange("id_int",-20,-10,T,T), 100).scoreDocs;
     assertEquals("find all", 11, result.length);
     reader.close();
     dir.close();

Modified: lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/search/TestIndexSearcher.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/search/TestIndexSearcher.java?rev=1512909&r1=1512908&r2=1512909&view=diff
==============================================================================
--- lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/search/TestIndexSearcher.java (original)
+++ lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/search/TestIndexSearcher.java Sun Aug 11 12:19:13 2013
@@ -29,9 +29,11 @@ import org.apache.lucene.index.RandomInd
 import org.apache.lucene.index.Term;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.IOUtils;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util.NamedThreadFactory;
 import org.apache.lucene.util._TestUtil;
+import org.junit.Test;
 
 public class TestIndexSearcher extends LuceneTestCase {
   Directory dir;
@@ -116,4 +118,25 @@ public class TestIndexSearcher extends L
     
     _TestUtil.shutdownExecutorService(service);
   }
+  
+  @Test
+  public void testSearchAfterPassedMaxDoc() throws Exception {
+    // LUCENE-5128: ensure we get a meaningful message if searchAfter exceeds maxDoc
+    Directory dir = newDirectory();
+    RandomIndexWriter w = new RandomIndexWriter(random(), dir);
+    w.addDocument(new Document());
+    IndexReader r = w.getReader();
+    w.close();
+    
+    IndexSearcher s = new IndexSearcher(r);
+    try {
+      s.searchAfter(new ScoreDoc(r.maxDoc(), 0.54f), new MatchAllDocsQuery(), 10);
+      fail("should have hit IllegalArgumentException when searchAfter exceeds maxDoc");
+    } catch (IllegalArgumentException e) {
+      // ok
+    } finally {
+      IOUtils.close(r, dir);
+    }
+  }
+  
 }

Modified: lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/search/TestMinShouldMatch2.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/search/TestMinShouldMatch2.java?rev=1512909&r1=1512908&r2=1512909&view=diff
==============================================================================
--- lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/search/TestMinShouldMatch2.java (original)
+++ lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/search/TestMinShouldMatch2.java Sun Aug 11 12:19:13 2013
@@ -37,7 +37,7 @@ import org.apache.lucene.index.Term;
 import org.apache.lucene.index.TermContext;
 import org.apache.lucene.search.BooleanQuery.BooleanWeight;
 import org.apache.lucene.search.similarities.DefaultSimilarity;
-import org.apache.lucene.search.similarities.Similarity.ExactSimScorer;
+import org.apache.lucene.search.similarities.Similarity.SimScorer;
 import org.apache.lucene.search.similarities.Similarity.SimWeight;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.util.BytesRef;
@@ -274,7 +274,7 @@ public class TestMinShouldMatch2 extends
     final int maxDoc;
 
     final Set<Long> ords = new HashSet<Long>();
-    final ExactSimScorer[] sims;
+    final SimScorer[] sims;
     final int minNrShouldMatch;
     
     double score = Float.NaN;
@@ -285,7 +285,7 @@ public class TestMinShouldMatch2 extends
       this.maxDoc = reader.maxDoc();
       BooleanQuery bq = (BooleanQuery) weight.getQuery();
       this.minNrShouldMatch = bq.getMinimumNumberShouldMatch();
-      this.sims = new ExactSimScorer[(int)dv.getValueCount()];
+      this.sims = new SimScorer[(int)dv.getValueCount()];
       for (BooleanClause clause : bq.getClauses()) {
         assert !clause.isProhibited();
         assert !clause.isRequired();
@@ -294,13 +294,13 @@ public class TestMinShouldMatch2 extends
         if (ord >= 0) {
           boolean success = ords.add(ord);
           assert success; // no dups
-          TermContext context = TermContext.build(reader.getContext(), term, true);
+          TermContext context = TermContext.build(reader.getContext(), term);
           SimWeight w = weight.similarity.computeWeight(1f, 
                         searcher.collectionStatistics("field"),
                         searcher.termStatistics(term, context));
           w.getValueForNormalization(); // ignored
           w.normalize(1F, 1F);
-          sims[(int)ord] = weight.similarity.exactSimScorer(w, reader.getContext());
+          sims[(int)ord] = weight.similarity.simScorer(w, reader.getContext());
         }
       }
     }

Modified: lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/search/TestMultiThreadTermVectors.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/search/TestMultiThreadTermVectors.java?rev=1512909&r1=1512908&r2=1512909&view=diff
==============================================================================
--- lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/search/TestMultiThreadTermVectors.java (original)
+++ lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/search/TestMultiThreadTermVectors.java Sun Aug 11 12:19:13 2013
@@ -41,7 +41,7 @@ public class TestMultiThreadTermVectors 
     super.setUp();
     directory = newDirectory();
     IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy()));
-    //writer.setUseCompoundFile(false);
+    //writer.setNoCFSRatio(0.0);
     //writer.infoStream = System.out;
     FieldType customType = new FieldType(TextField.TYPE_STORED);
     customType.setTokenized(false);

Modified: lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/search/TestPhraseQuery.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/search/TestPhraseQuery.java?rev=1512909&r1=1512908&r2=1512909&view=diff
==============================================================================
--- lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/search/TestPhraseQuery.java (original)
+++ lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/search/TestPhraseQuery.java Sun Aug 11 12:19:13 2013
@@ -617,7 +617,7 @@ public class TestPhraseQuery extends Luc
               break;
             }
           }
-          TokenStream ts = analyzer.tokenStream("ignore", new StringReader(term));
+          TokenStream ts = analyzer.tokenStream("ignore", term);
           CharTermAttribute termAttr = ts.addAttribute(CharTermAttribute.class);
           ts.reset();
           while(ts.incrementToken()) {

Modified: lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/search/TestSearchAfter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/search/TestSearchAfter.java?rev=1512909&r1=1512908&r2=1512909&view=diff
==============================================================================
--- lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/search/TestSearchAfter.java (original)
+++ lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/search/TestSearchAfter.java Sun Aug 11 12:19:13 2013
@@ -111,8 +111,6 @@ public class TestSearchAfter extends Luc
     assertQuery(query, filter, Sort.INDEXORDER);
     for(int rev=0;rev<2;rev++) {
       boolean reversed = rev == 1;
-      assertQuery(query, filter, new Sort(new SortField[] {new SortField("byte", SortField.Type.BYTE, reversed)}));
-      assertQuery(query, filter, new Sort(new SortField[] {new SortField("short", SortField.Type.SHORT, reversed)}));
       assertQuery(query, filter, new Sort(new SortField[] {new SortField("int", SortField.Type.INT, reversed)}));
       assertQuery(query, filter, new Sort(new SortField[] {new SortField("long", SortField.Type.LONG, reversed)}));
       assertQuery(query, filter, new Sort(new SortField[] {new SortField("float", SortField.Type.FLOAT, reversed)}));

Modified: lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/search/TestShardSearching.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/search/TestShardSearching.java?rev=1512909&r1=1512908&r2=1512909&view=diff
==============================================================================
--- lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/search/TestShardSearching.java (original)
+++ lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/search/TestShardSearching.java Sun Aug 11 12:19:13 2013
@@ -219,7 +219,7 @@ public class TestShardSearching extends 
                 //sort = new Sort(SortField.FIELD_DOC);
                 sort = null;
               } else if (what == 2) {
-                sort = new Sort(new SortField[] {new SortField("docid", SortField.Type.INT, random().nextBoolean())});
+                sort = new Sort(new SortField[] {new SortField("docid_int", SortField.Type.INT, random().nextBoolean())});
               } else {
                 sort = new Sort(new SortField[] {new SortField("title", SortField.Type.STRING, random().nextBoolean())});
               }

Modified: lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/search/TestSimilarityProvider.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/search/TestSimilarityProvider.java?rev=1512909&r1=1512908&r2=1512909&view=diff
==============================================================================
--- lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/search/TestSimilarityProvider.java (original)
+++ lucene/dev/branches/lucene4956/lucene/core/src/test/org/apache/lucene/search/TestSimilarityProvider.java Sun Aug 11 12:19:13 2013
@@ -108,6 +108,16 @@ public class TestSimilarityProvider exte
   private class Sim1 extends TFIDFSimilarity {
     
     @Override
+    public long encodeNormValue(float f) {
+      return (long) f;
+    }
+    
+    @Override
+    public float decodeNormValue(long norm) {
+      return norm;
+    }
+    
+    @Override
     public float coord(int overlap, int maxOverlap) {
       return 1f;
     }
@@ -146,6 +156,16 @@ public class TestSimilarityProvider exte
   private class Sim2 extends TFIDFSimilarity {
     
     @Override
+    public long encodeNormValue(float f) {
+      return (long) f;
+    }
+    
+    @Override
+    public float decodeNormValue(long norm) {
+      return norm;
+    }
+    
+    @Override
     public float coord(int overlap, int maxOverlap) {
       return 1f;
     }