You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by ho...@apache.org on 2016/03/12 01:27:08 UTC

[27/50] [abbrv] lucene-solr git commit: LUCENE-7090, LUCENE-7075: deprecate single-valued LegacyNumerics fieldcaching, provide Points-based replacement.

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d35d5694/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSort.java
----------------------------------------------------------------------
diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSort.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSort.java
index 717d364..f46bdde 100644
--- a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSort.java
+++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSort.java
@@ -23,11 +23,16 @@ import java.util.Map;
 
 import org.apache.lucene.analysis.MockAnalyzer;
 import org.apache.lucene.document.Document;
+import org.apache.lucene.document.DoublePoint;
 import org.apache.lucene.document.LegacyDoubleField;
 import org.apache.lucene.document.Field;
+import org.apache.lucene.document.FloatPoint;
+import org.apache.lucene.document.IntPoint;
 import org.apache.lucene.document.LegacyFloatField;
 import org.apache.lucene.document.LegacyIntField;
 import org.apache.lucene.document.LegacyLongField;
+import org.apache.lucene.document.LongPoint;
+import org.apache.lucene.document.StoredField;
 import org.apache.lucene.document.StringField;
 import org.apache.lucene.index.DirectoryReader;
 import org.apache.lucene.index.IndexReader;
@@ -449,6 +454,140 @@ public class TestFieldCacheSort extends LuceneTestCase {
     Directory dir = newDirectory();
     RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
     Document doc = new Document();
+    doc.add(new IntPoint("value", 300000));
+    doc.add(new StoredField("value", 300000));
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new IntPoint("value", -1));
+    doc.add(new StoredField("value", -1));
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new IntPoint("value", 4));
+    doc.add(new StoredField("value", 4));
+    writer.addDocument(doc);
+    IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
+                     Collections.singletonMap("value", Type.INTEGER_POINT));
+    writer.close();
+    
+    IndexSearcher searcher = newSearcher(ir, false);
+    Sort sort = new Sort(new SortField("value", SortField.Type.INT));
+
+    TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
+    assertEquals(3, td.totalHits);
+    // numeric order
+    assertEquals("-1", searcher.doc(td.scoreDocs[0].doc).get("value"));
+    assertEquals("4", searcher.doc(td.scoreDocs[1].doc).get("value"));
+    assertEquals("300000", searcher.doc(td.scoreDocs[2].doc).get("value"));
+    TestUtil.checkReader(ir);
+    ir.close();
+    dir.close();
+  }
+  
+  /** Tests sorting on type int with a missing value */
+  public void testIntMissing() throws IOException {
+    Directory dir = newDirectory();
+    RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
+    Document doc = new Document();
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new IntPoint("value", -1));
+    doc.add(new StoredField("value", -1));
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new IntPoint("value", 4));
+    doc.add(new StoredField("value", 4));
+    writer.addDocument(doc);
+    IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
+                     Collections.singletonMap("value", Type.INTEGER_POINT));
+    writer.close();
+    
+    IndexSearcher searcher = newSearcher(ir, false);
+    Sort sort = new Sort(new SortField("value", SortField.Type.INT));
+
+    TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
+    assertEquals(3, td.totalHits);
+    // null is treated as a 0
+    assertEquals("-1", searcher.doc(td.scoreDocs[0].doc).get("value"));
+    assertNull(searcher.doc(td.scoreDocs[1].doc).get("value"));
+    assertEquals("4", searcher.doc(td.scoreDocs[2].doc).get("value"));
+    TestUtil.checkReader(ir);
+    ir.close();
+    dir.close();
+  }
+  
+  /** Tests sorting on type int, specifying the missing value should be treated as Integer.MAX_VALUE */
+  public void testIntMissingLast() throws IOException {
+    Directory dir = newDirectory();
+    RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
+    Document doc = new Document();
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new IntPoint("value", -1));
+    doc.add(new StoredField("value", -1));
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new IntPoint("value", 4));
+    doc.add(new StoredField("value", 4));
+    writer.addDocument(doc);
+    IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
+                     Collections.singletonMap("value", Type.INTEGER_POINT));
+    writer.close();
+    
+    IndexSearcher searcher = newSearcher(ir, false);
+    SortField sortField = new SortField("value", SortField.Type.INT);
+    sortField.setMissingValue(Integer.MAX_VALUE);
+    Sort sort = new Sort(sortField);
+
+    TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
+    assertEquals(3, td.totalHits);
+    // null is treated as a Integer.MAX_VALUE
+    assertEquals("-1", searcher.doc(td.scoreDocs[0].doc).get("value"));
+    assertEquals("4", searcher.doc(td.scoreDocs[1].doc).get("value"));
+    assertNull(searcher.doc(td.scoreDocs[2].doc).get("value"));
+    TestUtil.checkReader(ir);
+    ir.close();
+    dir.close();
+  }
+  
+  /** Tests sorting on type int in reverse */
+  public void testIntReverse() throws IOException {
+    Directory dir = newDirectory();
+    RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
+    Document doc = new Document();
+    doc.add(new IntPoint("value", 300000));
+    doc.add(new StoredField("value", 300000));
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new IntPoint("value", -1));
+    doc.add(new StoredField("value", -1));
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new IntPoint("value", 4));
+    doc.add(new StoredField("value", 4));
+    writer.addDocument(doc);
+    IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
+                     Collections.singletonMap("value", Type.INTEGER_POINT));
+    writer.close();
+    
+    IndexSearcher searcher = newSearcher(ir, false);
+    Sort sort = new Sort(new SortField("value", SortField.Type.INT, true));
+
+    TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
+    assertEquals(3, td.totalHits);
+    // reverse numeric order
+    assertEquals("300000", searcher.doc(td.scoreDocs[0].doc).get("value"));
+    assertEquals("4", searcher.doc(td.scoreDocs[1].doc).get("value"));
+    assertEquals("-1", searcher.doc(td.scoreDocs[2].doc).get("value"));
+    TestUtil.checkReader(ir);
+    ir.close();
+    dir.close();
+  }
+
+  /** Tests sorting on type legacy int */
+  public void testLegacyInt() throws IOException {
+    Directory dir = newDirectory();
+    RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
+    Document doc = new Document();
     doc.add(new LegacyIntField("value", 300000, Field.Store.YES));
     writer.addDocument(doc);
     doc = new Document();
@@ -458,7 +597,7 @@ public class TestFieldCacheSort extends LuceneTestCase {
     doc.add(new LegacyIntField("value", 4, Field.Store.YES));
     writer.addDocument(doc);
     IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
-                     Collections.singletonMap("value", Type.INTEGER));
+                     Collections.singletonMap("value", Type.LEGACY_INTEGER));
     writer.close();
     
     IndexSearcher searcher = newSearcher(ir);
@@ -475,8 +614,8 @@ public class TestFieldCacheSort extends LuceneTestCase {
     dir.close();
   }
   
-  /** Tests sorting on type int with a missing value */
-  public void testIntMissing() throws IOException {
+  /** Tests sorting on type legacy int with a missing value */
+  public void testLegacyIntMissing() throws IOException {
     Directory dir = newDirectory();
     RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
     Document doc = new Document();
@@ -488,7 +627,7 @@ public class TestFieldCacheSort extends LuceneTestCase {
     doc.add(new LegacyIntField("value", 4, Field.Store.YES));
     writer.addDocument(doc);
     IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
-                     Collections.singletonMap("value", Type.INTEGER));
+                     Collections.singletonMap("value", Type.LEGACY_INTEGER));
     writer.close();
     
     IndexSearcher searcher = newSearcher(ir);
@@ -505,8 +644,8 @@ public class TestFieldCacheSort extends LuceneTestCase {
     dir.close();
   }
   
-  /** Tests sorting on type int, specifying the missing value should be treated as Integer.MAX_VALUE */
-  public void testIntMissingLast() throws IOException {
+  /** Tests sorting on type legacy int, specifying the missing value should be treated as Integer.MAX_VALUE */
+  public void testLegacyIntMissingLast() throws IOException {
     Directory dir = newDirectory();
     RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
     Document doc = new Document();
@@ -518,7 +657,7 @@ public class TestFieldCacheSort extends LuceneTestCase {
     doc.add(new LegacyIntField("value", 4, Field.Store.YES));
     writer.addDocument(doc);
     IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
-                     Collections.singletonMap("value", Type.INTEGER));
+                     Collections.singletonMap("value", Type.LEGACY_INTEGER));
     writer.close();
     
     IndexSearcher searcher = newSearcher(ir);
@@ -537,8 +676,8 @@ public class TestFieldCacheSort extends LuceneTestCase {
     dir.close();
   }
   
-  /** Tests sorting on type int in reverse */
-  public void testIntReverse() throws IOException {
+  /** Tests sorting on type legacy int in reverse */
+  public void testLegacyIntReverse() throws IOException {
     Directory dir = newDirectory();
     RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
     Document doc = new Document();
@@ -551,7 +690,7 @@ public class TestFieldCacheSort extends LuceneTestCase {
     doc.add(new LegacyIntField("value", 4, Field.Store.YES));
     writer.addDocument(doc);
     IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
-                     Collections.singletonMap("value", Type.INTEGER));
+                     Collections.singletonMap("value", Type.LEGACY_INTEGER));
     writer.close();
     
     IndexSearcher searcher = newSearcher(ir);
@@ -573,6 +712,140 @@ public class TestFieldCacheSort extends LuceneTestCase {
     Directory dir = newDirectory();
     RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
     Document doc = new Document();
+    doc.add(new LongPoint("value", 3000000000L));
+    doc.add(new StoredField("value", 3000000000L));
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new LongPoint("value", -1));
+    doc.add(new StoredField("value", -1));
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new LongPoint("value", 4));
+    doc.add(new StoredField("value", 4));
+    writer.addDocument(doc);
+    IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
+                     Collections.singletonMap("value", Type.LONG_POINT));
+    writer.close();
+    
+    IndexSearcher searcher = newSearcher(ir, false);
+    Sort sort = new Sort(new SortField("value", SortField.Type.LONG));
+
+    TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
+    assertEquals(3, td.totalHits);
+    // numeric order
+    assertEquals("-1", searcher.doc(td.scoreDocs[0].doc).get("value"));
+    assertEquals("4", searcher.doc(td.scoreDocs[1].doc).get("value"));
+    assertEquals("3000000000", searcher.doc(td.scoreDocs[2].doc).get("value"));
+    TestUtil.checkReader(ir);
+    ir.close();
+    dir.close();
+  }
+  
+  /** Tests sorting on type long with a missing value */
+  public void testLongMissing() throws IOException {
+    Directory dir = newDirectory();
+    RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
+    Document doc = new Document();
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new LongPoint("value", -1));
+    doc.add(new StoredField("value", -1));
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new LongPoint("value", 4));
+    doc.add(new StoredField("value", 4));
+    writer.addDocument(doc);
+    IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
+                     Collections.singletonMap("value", Type.LONG_POINT));
+    writer.close();
+    
+    IndexSearcher searcher = newSearcher(ir, false);
+    Sort sort = new Sort(new SortField("value", SortField.Type.LONG));
+
+    TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
+    assertEquals(3, td.totalHits);
+    // null is treated as 0
+    assertEquals("-1", searcher.doc(td.scoreDocs[0].doc).get("value"));
+    assertNull(searcher.doc(td.scoreDocs[1].doc).get("value"));
+    assertEquals("4", searcher.doc(td.scoreDocs[2].doc).get("value"));
+    TestUtil.checkReader(ir);
+    ir.close();
+    dir.close();
+  }
+  
+  /** Tests sorting on type long, specifying the missing value should be treated as Long.MAX_VALUE */
+  public void testLongMissingLast() throws IOException {
+    Directory dir = newDirectory();
+    RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
+    Document doc = new Document();
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new LongPoint("value", -1));
+    doc.add(new StoredField("value", -1));
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new LongPoint("value", 4));
+    doc.add(new StoredField("value", 4));
+    writer.addDocument(doc);
+    IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
+                     Collections.singletonMap("value", Type.LONG_POINT));
+    writer.close();
+    
+    IndexSearcher searcher = newSearcher(ir, false);
+    SortField sortField = new SortField("value", SortField.Type.LONG);
+    sortField.setMissingValue(Long.MAX_VALUE);
+    Sort sort = new Sort(sortField);
+
+    TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
+    assertEquals(3, td.totalHits);
+    // null is treated as Long.MAX_VALUE
+    assertEquals("-1", searcher.doc(td.scoreDocs[0].doc).get("value"));
+    assertEquals("4", searcher.doc(td.scoreDocs[1].doc).get("value"));
+    assertNull(searcher.doc(td.scoreDocs[2].doc).get("value"));
+    TestUtil.checkReader(ir);
+    ir.close();
+    dir.close();
+  }
+  
+  /** Tests sorting on type long in reverse */
+  public void testLongReverse() throws IOException {
+    Directory dir = newDirectory();
+    RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
+    Document doc = new Document();
+    doc.add(new LongPoint("value", 3000000000L));
+    doc.add(new StoredField("value", 3000000000L));
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new LongPoint("value", -1));
+    doc.add(new StoredField("value", -1));
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new LongPoint("value", 4));
+    doc.add(new StoredField("value", 4));
+    writer.addDocument(doc);
+    IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
+                     Collections.singletonMap("value", Type.LONG_POINT));
+    writer.close();
+    
+    IndexSearcher searcher = newSearcher(ir, false);
+    Sort sort = new Sort(new SortField("value", SortField.Type.LONG, true));
+
+    TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
+    assertEquals(3, td.totalHits);
+    // reverse numeric order
+    assertEquals("3000000000", searcher.doc(td.scoreDocs[0].doc).get("value"));
+    assertEquals("4", searcher.doc(td.scoreDocs[1].doc).get("value"));
+    assertEquals("-1", searcher.doc(td.scoreDocs[2].doc).get("value"));
+    TestUtil.checkReader(ir);
+    ir.close();
+    dir.close();
+  }
+  
+  /** Tests sorting on type legacy long */
+  public void testLegacyLong() throws IOException {
+    Directory dir = newDirectory();
+    RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
+    Document doc = new Document();
     doc.add(new LegacyLongField("value", 3000000000L, Field.Store.YES));
     writer.addDocument(doc);
     doc = new Document();
@@ -582,7 +855,7 @@ public class TestFieldCacheSort extends LuceneTestCase {
     doc.add(new LegacyLongField("value", 4, Field.Store.YES));
     writer.addDocument(doc);
     IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
-                     Collections.singletonMap("value", Type.LONG));
+                     Collections.singletonMap("value", Type.LEGACY_LONG));
     writer.close();
     
     IndexSearcher searcher = newSearcher(ir);
@@ -599,8 +872,8 @@ public class TestFieldCacheSort extends LuceneTestCase {
     dir.close();
   }
   
-  /** Tests sorting on type long with a missing value */
-  public void testLongMissing() throws IOException {
+  /** Tests sorting on type legacy long with a missing value */
+  public void testLegacyLongMissing() throws IOException {
     Directory dir = newDirectory();
     RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
     Document doc = new Document();
@@ -612,7 +885,7 @@ public class TestFieldCacheSort extends LuceneTestCase {
     doc.add(new LegacyLongField("value", 4, Field.Store.YES));
     writer.addDocument(doc);
     IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
-                     Collections.singletonMap("value", Type.LONG));
+                     Collections.singletonMap("value", Type.LEGACY_LONG));
     writer.close();
     
     IndexSearcher searcher = newSearcher(ir);
@@ -629,8 +902,8 @@ public class TestFieldCacheSort extends LuceneTestCase {
     dir.close();
   }
   
-  /** Tests sorting on type long, specifying the missing value should be treated as Long.MAX_VALUE */
-  public void testLongMissingLast() throws IOException {
+  /** Tests sorting on type legacy long, specifying the missing value should be treated as Long.MAX_VALUE */
+  public void testLegacyLongMissingLast() throws IOException {
     Directory dir = newDirectory();
     RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
     Document doc = new Document();
@@ -642,7 +915,7 @@ public class TestFieldCacheSort extends LuceneTestCase {
     doc.add(new LegacyLongField("value", 4, Field.Store.YES));
     writer.addDocument(doc);
     IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
-                     Collections.singletonMap("value", Type.LONG));
+                     Collections.singletonMap("value", Type.LEGACY_LONG));
     writer.close();
     
     IndexSearcher searcher = newSearcher(ir);
@@ -661,8 +934,8 @@ public class TestFieldCacheSort extends LuceneTestCase {
     dir.close();
   }
   
-  /** Tests sorting on type long in reverse */
-  public void testLongReverse() throws IOException {
+  /** Tests sorting on type legacy long in reverse */
+  public void testLegacyLongReverse() throws IOException {
     Directory dir = newDirectory();
     RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
     Document doc = new Document();
@@ -675,7 +948,7 @@ public class TestFieldCacheSort extends LuceneTestCase {
     doc.add(new LegacyLongField("value", 4, Field.Store.YES));
     writer.addDocument(doc);
     IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
-                     Collections.singletonMap("value", Type.LONG));
+                     Collections.singletonMap("value", Type.LEGACY_LONG));
     writer.close();
     
     IndexSearcher searcher = newSearcher(ir);
@@ -697,6 +970,140 @@ public class TestFieldCacheSort extends LuceneTestCase {
     Directory dir = newDirectory();
     RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
     Document doc = new Document();
+    doc.add(new FloatPoint("value", 30.1f));
+    doc.add(new StoredField("value", 30.1f));
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new FloatPoint("value", -1.3f));
+    doc.add(new StoredField("value", -1.3f));
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new FloatPoint("value", 4.2f));
+    doc.add(new StoredField("value", 4.2f));
+    writer.addDocument(doc);
+    IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
+                     Collections.singletonMap("value", Type.FLOAT_POINT));
+    writer.close();
+    
+    IndexSearcher searcher = newSearcher(ir, false);
+    Sort sort = new Sort(new SortField("value", SortField.Type.FLOAT));
+
+    TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
+    assertEquals(3, td.totalHits);
+    // numeric order
+    assertEquals("-1.3", searcher.doc(td.scoreDocs[0].doc).get("value"));
+    assertEquals("4.2", searcher.doc(td.scoreDocs[1].doc).get("value"));
+    assertEquals("30.1", searcher.doc(td.scoreDocs[2].doc).get("value"));
+    TestUtil.checkReader(ir);
+    ir.close();
+    dir.close();
+  }
+  
+  /** Tests sorting on type float with a missing value */
+  public void testFloatMissing() throws IOException {
+    Directory dir = newDirectory();
+    RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
+    Document doc = new Document();
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new FloatPoint("value", -1.3f));
+    doc.add(new StoredField("value", -1.3f));
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new FloatPoint("value", 4.2f));
+    doc.add(new StoredField("value", 4.2f));
+    writer.addDocument(doc);
+    IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
+                     Collections.singletonMap("value", Type.FLOAT_POINT));
+    writer.close();
+    
+    IndexSearcher searcher = newSearcher(ir, false);
+    Sort sort = new Sort(new SortField("value", SortField.Type.FLOAT));
+
+    TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
+    assertEquals(3, td.totalHits);
+    // null is treated as 0
+    assertEquals("-1.3", searcher.doc(td.scoreDocs[0].doc).get("value"));
+    assertNull(searcher.doc(td.scoreDocs[1].doc).get("value"));
+    assertEquals("4.2", searcher.doc(td.scoreDocs[2].doc).get("value"));
+    TestUtil.checkReader(ir);
+    ir.close();
+    dir.close();
+  }
+  
+  /** Tests sorting on type float, specifying the missing value should be treated as Float.MAX_VALUE */
+  public void testFloatMissingLast() throws IOException {
+    Directory dir = newDirectory();
+    RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
+    Document doc = new Document();
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new FloatPoint("value", -1.3f));
+    doc.add(new StoredField("value", -1.3f));
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new FloatPoint("value", 4.2f));
+    doc.add(new StoredField("value", 4.2f));
+    writer.addDocument(doc);
+    IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
+                     Collections.singletonMap("value", Type.FLOAT_POINT));
+    writer.close();
+    
+    IndexSearcher searcher = newSearcher(ir, false);
+    SortField sortField = new SortField("value", SortField.Type.FLOAT);
+    sortField.setMissingValue(Float.MAX_VALUE);
+    Sort sort = new Sort(sortField);
+
+    TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
+    assertEquals(3, td.totalHits);
+    // null is treated as Float.MAX_VALUE
+    assertEquals("-1.3", searcher.doc(td.scoreDocs[0].doc).get("value"));
+    assertEquals("4.2", searcher.doc(td.scoreDocs[1].doc).get("value"));
+    assertNull(searcher.doc(td.scoreDocs[2].doc).get("value"));
+    TestUtil.checkReader(ir);
+    ir.close();
+    dir.close();
+  }
+  
+  /** Tests sorting on type float in reverse */
+  public void testFloatReverse() throws IOException {
+    Directory dir = newDirectory();
+    RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
+    Document doc = new Document();
+    doc.add(new FloatPoint("value", 30.1f));
+    doc.add(new StoredField("value", 30.1f));
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new FloatPoint("value", -1.3f));
+    doc.add(new StoredField("value", -1.3f));
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new FloatPoint("value", 4.2f));
+    doc.add(new StoredField("value", 4.2f));
+    writer.addDocument(doc);
+    IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
+                     Collections.singletonMap("value", Type.FLOAT_POINT));
+    writer.close();
+    
+    IndexSearcher searcher = newSearcher(ir, false);
+    Sort sort = new Sort(new SortField("value", SortField.Type.FLOAT, true));
+
+    TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
+    assertEquals(3, td.totalHits);
+    // reverse numeric order
+    assertEquals("30.1", searcher.doc(td.scoreDocs[0].doc).get("value"));
+    assertEquals("4.2", searcher.doc(td.scoreDocs[1].doc).get("value"));
+    assertEquals("-1.3", searcher.doc(td.scoreDocs[2].doc).get("value"));
+    TestUtil.checkReader(ir);
+    ir.close();
+    dir.close();
+  }
+  
+  /** Tests sorting on type legacy float */
+  public void testLegacyFloat() throws IOException {
+    Directory dir = newDirectory();
+    RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
+    Document doc = new Document();
     doc.add(new LegacyFloatField("value", 30.1f, Field.Store.YES));
     writer.addDocument(doc);
     doc = new Document();
@@ -706,7 +1113,7 @@ public class TestFieldCacheSort extends LuceneTestCase {
     doc.add(new LegacyFloatField("value", 4.2f, Field.Store.YES));
     writer.addDocument(doc);
     IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
-                     Collections.singletonMap("value", Type.FLOAT));
+                     Collections.singletonMap("value", Type.LEGACY_FLOAT));
     writer.close();
     
     IndexSearcher searcher = newSearcher(ir);
@@ -723,8 +1130,8 @@ public class TestFieldCacheSort extends LuceneTestCase {
     dir.close();
   }
   
-  /** Tests sorting on type float with a missing value */
-  public void testFloatMissing() throws IOException {
+  /** Tests sorting on type legacy float with a missing value */
+  public void testLegacyFloatMissing() throws IOException {
     Directory dir = newDirectory();
     RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
     Document doc = new Document();
@@ -736,7 +1143,7 @@ public class TestFieldCacheSort extends LuceneTestCase {
     doc.add(new LegacyFloatField("value", 4.2f, Field.Store.YES));
     writer.addDocument(doc);
     IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
-                     Collections.singletonMap("value", Type.FLOAT));
+                     Collections.singletonMap("value", Type.LEGACY_FLOAT));
     writer.close();
     
     IndexSearcher searcher = newSearcher(ir);
@@ -753,8 +1160,8 @@ public class TestFieldCacheSort extends LuceneTestCase {
     dir.close();
   }
   
-  /** Tests sorting on type float, specifying the missing value should be treated as Float.MAX_VALUE */
-  public void testFloatMissingLast() throws IOException {
+  /** Tests sorting on type legacy float, specifying the missing value should be treated as Float.MAX_VALUE */
+  public void testLegacyFloatMissingLast() throws IOException {
     Directory dir = newDirectory();
     RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
     Document doc = new Document();
@@ -766,7 +1173,7 @@ public class TestFieldCacheSort extends LuceneTestCase {
     doc.add(new LegacyFloatField("value", 4.2f, Field.Store.YES));
     writer.addDocument(doc);
     IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
-                     Collections.singletonMap("value", Type.FLOAT));
+                     Collections.singletonMap("value", Type.LEGACY_FLOAT));
     writer.close();
     
     IndexSearcher searcher = newSearcher(ir);
@@ -785,8 +1192,8 @@ public class TestFieldCacheSort extends LuceneTestCase {
     dir.close();
   }
   
-  /** Tests sorting on type float in reverse */
-  public void testFloatReverse() throws IOException {
+  /** Tests sorting on type legacy float in reverse */
+  public void testLegacyFloatReverse() throws IOException {
     Directory dir = newDirectory();
     RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
     Document doc = new Document();
@@ -799,7 +1206,7 @@ public class TestFieldCacheSort extends LuceneTestCase {
     doc.add(new LegacyFloatField("value", 4.2f, Field.Store.YES));
     writer.addDocument(doc);
     IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
-                     Collections.singletonMap("value", Type.FLOAT));
+                     Collections.singletonMap("value", Type.LEGACY_FLOAT));
     writer.close();
     
     IndexSearcher searcher = newSearcher(ir);
@@ -821,6 +1228,195 @@ public class TestFieldCacheSort extends LuceneTestCase {
     Directory dir = newDirectory();
     RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
     Document doc = new Document();
+    doc.add(new DoublePoint("value", 30.1));
+    doc.add(new StoredField("value", 30.1));
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new DoublePoint("value", -1.3));
+    doc.add(new StoredField("value", -1.3));
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new DoublePoint("value", 4.2333333333333));
+    doc.add(new StoredField("value", 4.2333333333333));
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new DoublePoint("value", 4.2333333333332));
+    doc.add(new StoredField("value", 4.2333333333332));
+    writer.addDocument(doc);
+    IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
+                     Collections.singletonMap("value", Type.DOUBLE_POINT));
+    writer.close();
+    
+    IndexSearcher searcher = newSearcher(ir, false);
+    Sort sort = new Sort(new SortField("value", SortField.Type.DOUBLE));
+
+    TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
+    assertEquals(4, td.totalHits);
+    // numeric order
+    assertEquals("-1.3", searcher.doc(td.scoreDocs[0].doc).get("value"));
+    assertEquals("4.2333333333332", searcher.doc(td.scoreDocs[1].doc).get("value"));
+    assertEquals("4.2333333333333", searcher.doc(td.scoreDocs[2].doc).get("value"));
+    assertEquals("30.1", searcher.doc(td.scoreDocs[3].doc).get("value"));
+    TestUtil.checkReader(ir);
+    ir.close();
+    dir.close();
+  }
+  
+  /** Tests sorting on type double with +/- zero */
+  public void testDoubleSignedZero() throws IOException {
+    Directory dir = newDirectory();
+    RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
+    Document doc = new Document();
+    doc.add(new DoublePoint("value", +0d));
+    doc.add(new StoredField("value", +0d));
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new DoublePoint("value", -0d));
+    doc.add(new StoredField("value", -0d));
+    writer.addDocument(doc);
+    doc = new Document();
+    IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
+                     Collections.singletonMap("value", Type.DOUBLE_POINT));
+    writer.close();
+    
+    IndexSearcher searcher = newSearcher(ir, false);
+    Sort sort = new Sort(new SortField("value", SortField.Type.DOUBLE));
+
+    TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
+    assertEquals(2, td.totalHits);
+    // numeric order
+    double v0 = searcher.doc(td.scoreDocs[0].doc).getField("value").numericValue().doubleValue();
+    double v1 = searcher.doc(td.scoreDocs[1].doc).getField("value").numericValue().doubleValue();
+    assertEquals(0, v0, 0d);
+    assertEquals(0, v1, 0d);
+    // check sign bits
+    assertEquals(1, Double.doubleToLongBits(v0) >>> 63);
+    assertEquals(0, Double.doubleToLongBits(v1) >>> 63);
+    TestUtil.checkReader(ir);
+    ir.close();
+    dir.close();
+  }
+  
+  /** Tests sorting on type double with a missing value */
+  public void testDoubleMissing() throws IOException {
+    Directory dir = newDirectory();
+    RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
+    Document doc = new Document();
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new DoublePoint("value", -1.3));
+    doc.add(new StoredField("value", -1.3));
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new DoublePoint("value", 4.2333333333333));
+    doc.add(new StoredField("value", 4.2333333333333));
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new DoublePoint("value", 4.2333333333332));
+    doc.add(new StoredField("value", 4.2333333333332));
+    writer.addDocument(doc);
+    IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
+                     Collections.singletonMap("value", Type.DOUBLE_POINT));
+    writer.close();
+    
+    IndexSearcher searcher = newSearcher(ir, false);
+    Sort sort = new Sort(new SortField("value", SortField.Type.DOUBLE));
+
+    TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
+    assertEquals(4, td.totalHits);
+    // null treated as a 0
+    assertEquals("-1.3", searcher.doc(td.scoreDocs[0].doc).get("value"));
+    assertNull(searcher.doc(td.scoreDocs[1].doc).get("value"));
+    assertEquals("4.2333333333332", searcher.doc(td.scoreDocs[2].doc).get("value"));
+    assertEquals("4.2333333333333", searcher.doc(td.scoreDocs[3].doc).get("value"));
+    TestUtil.checkReader(ir);
+    ir.close();
+    dir.close();
+  }
+  
+  /** Tests sorting on type double, specifying the missing value should be treated as Double.MAX_VALUE */
+  public void testDoubleMissingLast() throws IOException {
+    Directory dir = newDirectory();
+    RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
+    Document doc = new Document();
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new DoublePoint("value", -1.3));
+    doc.add(new StoredField("value", -1.3));
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new DoublePoint("value", 4.2333333333333));
+    doc.add(new StoredField("value", 4.2333333333333));
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new DoublePoint("value", 4.2333333333332));
+    doc.add(new StoredField("value", 4.2333333333332));
+    writer.addDocument(doc);
+    IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
+                     Collections.singletonMap("value", Type.DOUBLE_POINT));
+    writer.close();
+    
+    IndexSearcher searcher = newSearcher(ir, false);
+    SortField sortField = new SortField("value", SortField.Type.DOUBLE);
+    sortField.setMissingValue(Double.MAX_VALUE);
+    Sort sort = new Sort(sortField);
+
+    TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
+    assertEquals(4, td.totalHits);
+    // null treated as Double.MAX_VALUE
+    assertEquals("-1.3", searcher.doc(td.scoreDocs[0].doc).get("value"));
+    assertEquals("4.2333333333332", searcher.doc(td.scoreDocs[1].doc).get("value"));
+    assertEquals("4.2333333333333", searcher.doc(td.scoreDocs[2].doc).get("value"));
+    assertNull(searcher.doc(td.scoreDocs[3].doc).get("value"));
+    TestUtil.checkReader(ir);
+    ir.close();
+    dir.close();
+  }
+  
+  /** Tests sorting on type double in reverse */
+  public void testDoubleReverse() throws IOException {
+    Directory dir = newDirectory();
+    RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
+    Document doc = new Document();
+    doc.add(new DoublePoint("value", 30.1));
+    doc.add(new StoredField("value", 30.1));
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new DoublePoint("value", -1.3));
+    doc.add(new StoredField("value", -1.3));
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new DoublePoint("value", 4.2333333333333));
+    doc.add(new StoredField("value", 4.2333333333333));
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new DoublePoint("value", 4.2333333333332));
+    doc.add(new StoredField("value", 4.2333333333332));
+    writer.addDocument(doc);
+    IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
+                     Collections.singletonMap("value", Type.DOUBLE_POINT));
+    writer.close();
+    
+    IndexSearcher searcher = newSearcher(ir, false);
+    Sort sort = new Sort(new SortField("value", SortField.Type.DOUBLE, true));
+
+    TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
+    assertEquals(4, td.totalHits);
+    // numeric order
+    assertEquals("30.1", searcher.doc(td.scoreDocs[0].doc).get("value"));
+    assertEquals("4.2333333333333", searcher.doc(td.scoreDocs[1].doc).get("value"));
+    assertEquals("4.2333333333332", searcher.doc(td.scoreDocs[2].doc).get("value"));
+    assertEquals("-1.3", searcher.doc(td.scoreDocs[3].doc).get("value"));
+    TestUtil.checkReader(ir);
+    ir.close();
+    dir.close();
+  }
+
+  /** Tests sorting on type legacy double */
+  public void testLegacyDouble() throws IOException {
+    Directory dir = newDirectory();
+    RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
+    Document doc = new Document();
     doc.add(new LegacyDoubleField("value", 30.1, Field.Store.YES));
     writer.addDocument(doc);
     doc = new Document();
@@ -833,7 +1429,7 @@ public class TestFieldCacheSort extends LuceneTestCase {
     doc.add(new LegacyDoubleField("value", 4.2333333333332, Field.Store.YES));
     writer.addDocument(doc);
     IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
-                     Collections.singletonMap("value", Type.DOUBLE));
+                     Collections.singletonMap("value", Type.LEGACY_DOUBLE));
     writer.close();
     
     IndexSearcher searcher = newSearcher(ir);
@@ -851,8 +1447,8 @@ public class TestFieldCacheSort extends LuceneTestCase {
     dir.close();
   }
   
-  /** Tests sorting on type double with +/- zero */
-  public void testDoubleSignedZero() throws IOException {
+  /** Tests sorting on type legacy double with +/- zero */
+  public void testLegacyDoubleSignedZero() throws IOException {
     Directory dir = newDirectory();
     RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
     Document doc = new Document();
@@ -863,7 +1459,7 @@ public class TestFieldCacheSort extends LuceneTestCase {
     writer.addDocument(doc);
     doc = new Document();
     IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
-                     Collections.singletonMap("value", Type.DOUBLE));
+                     Collections.singletonMap("value", Type.LEGACY_DOUBLE));
     writer.close();
     
     IndexSearcher searcher = newSearcher(ir);
@@ -884,8 +1480,8 @@ public class TestFieldCacheSort extends LuceneTestCase {
     dir.close();
   }
   
-  /** Tests sorting on type double with a missing value */
-  public void testDoubleMissing() throws IOException {
+  /** Tests sorting on type legacy double with a missing value */
+  public void testLegacyDoubleMissing() throws IOException {
     Directory dir = newDirectory();
     RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
     Document doc = new Document();
@@ -900,7 +1496,7 @@ public class TestFieldCacheSort extends LuceneTestCase {
     doc.add(new LegacyDoubleField("value", 4.2333333333332, Field.Store.YES));
     writer.addDocument(doc);
     IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
-                     Collections.singletonMap("value", Type.DOUBLE));
+                     Collections.singletonMap("value", Type.LEGACY_DOUBLE));
     writer.close();
     
     IndexSearcher searcher = newSearcher(ir);
@@ -918,8 +1514,8 @@ public class TestFieldCacheSort extends LuceneTestCase {
     dir.close();
   }
   
-  /** Tests sorting on type double, specifying the missing value should be treated as Double.MAX_VALUE */
-  public void testDoubleMissingLast() throws IOException {
+  /** Tests sorting on type legacy double, specifying the missing value should be treated as Double.MAX_VALUE */
+  public void testLegacyDoubleMissingLast() throws IOException {
     Directory dir = newDirectory();
     RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
     Document doc = new Document();
@@ -934,7 +1530,7 @@ public class TestFieldCacheSort extends LuceneTestCase {
     doc.add(new LegacyDoubleField("value", 4.2333333333332, Field.Store.YES));
     writer.addDocument(doc);
     IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
-                     Collections.singletonMap("value", Type.DOUBLE));
+                     Collections.singletonMap("value", Type.LEGACY_DOUBLE));
     writer.close();
     
     IndexSearcher searcher = newSearcher(ir);
@@ -954,8 +1550,8 @@ public class TestFieldCacheSort extends LuceneTestCase {
     dir.close();
   }
   
-  /** Tests sorting on type double in reverse */
-  public void testDoubleReverse() throws IOException {
+  /** Tests sorting on type legacy double in reverse */
+  public void testLegacyDoubleReverse() throws IOException {
     Directory dir = newDirectory();
     RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
     Document doc = new Document();
@@ -971,7 +1567,7 @@ public class TestFieldCacheSort extends LuceneTestCase {
     doc.add(new LegacyDoubleField("value", 4.2333333333332, Field.Store.YES));
     writer.addDocument(doc);
     IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
-                     Collections.singletonMap("value", Type.DOUBLE));
+                     Collections.singletonMap("value", Type.LEGACY_DOUBLE));
     writer.close();
     
     IndexSearcher searcher = newSearcher(ir);
@@ -1062,7 +1658,7 @@ public class TestFieldCacheSort extends LuceneTestCase {
     }
 
     IndexReader r = UninvertingReader.wrap(DirectoryReader.open(w),
-                    Collections.singletonMap("id", Type.INTEGER));
+                    Collections.singletonMap("id", Type.LEGACY_INTEGER));
     w.close();
     Query q = new TermQuery(new Term("body", "text"));
     IndexSearcher s = newSearcher(r);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d35d5694/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSortRandom.java
----------------------------------------------------------------------
diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSortRandom.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSortRandom.java
index 0b6292d..f3bd455 100644
--- a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSortRandom.java
+++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSortRandom.java
@@ -30,7 +30,8 @@ import java.util.Set;
 
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.Field;
-import org.apache.lucene.document.LegacyIntField;
+import org.apache.lucene.document.IntPoint;
+import org.apache.lucene.document.StoredField;
 import org.apache.lucene.document.StringField;
 import org.apache.lucene.index.DocValues;
 import org.apache.lucene.index.IndexReader;
@@ -118,7 +119,8 @@ public class TestFieldCacheSortRandom extends LuceneTestCase {
         docValues.add(null);
       }
 
-      doc.add(new LegacyIntField("id", numDocs, Field.Store.YES));
+      doc.add(new IntPoint("id", numDocs));
+      doc.add(new StoredField("id", numDocs));
       writer.addDocument(doc);
       numDocs++;
 
@@ -130,7 +132,7 @@ public class TestFieldCacheSortRandom extends LuceneTestCase {
 
     Map<String,UninvertingReader.Type> mapping = new HashMap<>();
     mapping.put("stringdv", Type.SORTED);
-    mapping.put("id", Type.INTEGER);
+    mapping.put("id", Type.INTEGER_POINT);
     final IndexReader r = UninvertingReader.wrap(writer.getReader(), mapping);
     writer.close();
     if (VERBOSE) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d35d5694/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheVsDocValues.java
----------------------------------------------------------------------
diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheVsDocValues.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheVsDocValues.java
index 1b14522..23b7d0c 100644
--- a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheVsDocValues.java
+++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheVsDocValues.java
@@ -458,8 +458,8 @@ public class TestFieldCacheVsDocValues extends LuceneTestCase {
     DirectoryReader ir = DirectoryReader.open(dir);
     for (LeafReaderContext context : ir.leaves()) {
       LeafReader r = context.reader();
-      Bits expected = FieldCache.DEFAULT.getDocsWithField(r, "indexed");
-      Bits actual = FieldCache.DEFAULT.getDocsWithField(r, "dv");
+      Bits expected = FieldCache.DEFAULT.getDocsWithField(r, "indexed", null);
+      Bits actual = FieldCache.DEFAULT.getDocsWithField(r, "dv", null);
       assertEquals(expected, actual);
     }
     ir.close();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d35d5694/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheWithThreads.java
----------------------------------------------------------------------
diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheWithThreads.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheWithThreads.java
index 9b05ee1..e716419 100644
--- a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheWithThreads.java
+++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheWithThreads.java
@@ -42,6 +42,7 @@ import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util.TestUtil;
 
+// TODO: what happened to this test... its not actually uninverting?
 public class TestFieldCacheWithThreads extends LuceneTestCase {
 
   public void test() throws Exception {
@@ -83,7 +84,7 @@ public class TestFieldCacheWithThreads extends LuceneTestCase {
           public void run() {
             try {
               //NumericDocValues ndv = ar.getNumericDocValues("number");
-              NumericDocValues ndv = FieldCache.DEFAULT.getNumerics(ar, "number", FieldCache.NUMERIC_UTILS_LONG_PARSER, false);
+              NumericDocValues ndv = FieldCache.DEFAULT.getNumerics(ar, "number", FieldCache.LONG_POINT_PARSER, false);
               //BinaryDocValues bdv = ar.getBinaryDocValues("bytes");
               BinaryDocValues bdv = FieldCache.DEFAULT.getTerms(ar, "bytes", false);
               SortedDocValues sdv = FieldCache.DEFAULT.getTermsIndex(ar, "sorted");
@@ -93,16 +94,16 @@ public class TestFieldCacheWithThreads extends LuceneTestCase {
                 int docID = threadRandom.nextInt(numDocs);
                 switch(threadRandom.nextInt(4)) {
                 case 0:
-                  assertEquals(numbers.get(docID).longValue(), FieldCache.DEFAULT.getNumerics(ar, "number", FieldCache.NUMERIC_UTILS_INT_PARSER, false).get(docID));
+                  assertEquals(numbers.get(docID).longValue(), FieldCache.DEFAULT.getNumerics(ar, "number", FieldCache.INT_POINT_PARSER, false).get(docID));
                   break;
                 case 1:
-                  assertEquals(numbers.get(docID).longValue(), FieldCache.DEFAULT.getNumerics(ar, "number", FieldCache.NUMERIC_UTILS_LONG_PARSER, false).get(docID));
+                  assertEquals(numbers.get(docID).longValue(), FieldCache.DEFAULT.getNumerics(ar, "number", FieldCache.LONG_POINT_PARSER, false).get(docID));
                   break;
                 case 2:
-                  assertEquals(numbers.get(docID).longValue(), FieldCache.DEFAULT.getNumerics(ar, "number", FieldCache.NUMERIC_UTILS_FLOAT_PARSER, false).get(docID));
+                  assertEquals(numbers.get(docID).longValue(), FieldCache.DEFAULT.getNumerics(ar, "number", FieldCache.FLOAT_POINT_PARSER, false).get(docID));
                   break;
                 case 3:
-                  assertEquals(numbers.get(docID).longValue(), FieldCache.DEFAULT.getNumerics(ar, "number", FieldCache.NUMERIC_UTILS_DOUBLE_PARSER, false).get(docID));
+                  assertEquals(numbers.get(docID).longValue(), FieldCache.DEFAULT.getNumerics(ar, "number", FieldCache.DOUBLE_POINT_PARSER, false).get(docID));
                   break;
                 }
                 BytesRef term = bdv.get(docID);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d35d5694/lucene/misc/src/test/org/apache/lucene/uninverting/TestLegacyFieldCache.java
----------------------------------------------------------------------
diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestLegacyFieldCache.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestLegacyFieldCache.java
new file mode 100644
index 0000000..c4ef1c4
--- /dev/null
+++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestLegacyFieldCache.java
@@ -0,0 +1,498 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.uninverting;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.PrintStream;
+
+import java.util.concurrent.CyclicBarrier;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicInteger;
+
+import org.apache.lucene.analysis.MockAnalyzer;
+import org.apache.lucene.document.BinaryDocValuesField;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.LegacyDoubleField;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.document.Field.Store;
+import org.apache.lucene.document.LegacyFloatField;
+import org.apache.lucene.document.LegacyIntField;
+import org.apache.lucene.document.LegacyLongField;
+import org.apache.lucene.document.NumericDocValuesField;
+import org.apache.lucene.document.SortedDocValuesField;
+import org.apache.lucene.document.SortedSetDocValuesField;
+import org.apache.lucene.document.StoredField;
+import org.apache.lucene.index.LeafReader;
+import org.apache.lucene.index.DirectoryReader;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
+import org.apache.lucene.index.NumericDocValues;
+import org.apache.lucene.index.RandomIndexWriter;
+import org.apache.lucene.index.SlowCompositeReaderWrapper;
+import org.apache.lucene.index.Terms;
+import org.apache.lucene.index.TermsEnum;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.util.Bits;
+import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.IOUtils;
+import org.apache.lucene.util.LegacyNumericUtils;
+import org.apache.lucene.util.LuceneTestCase;
+import org.apache.lucene.util.TestUtil;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+
+/** random assortment of tests against legacy numerics */
+public class TestLegacyFieldCache extends LuceneTestCase {
+  private static LeafReader reader;
+  private static int NUM_DOCS;
+  private static Directory directory;
+
+  @BeforeClass
+  public static void beforeClass() throws Exception {
+    NUM_DOCS = atLeast(500);
+    directory = newDirectory();
+    RandomIndexWriter writer= new RandomIndexWriter(random(), directory, newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy()));
+    long theLong = Long.MAX_VALUE;
+    double theDouble = Double.MAX_VALUE;
+    int theInt = Integer.MAX_VALUE;
+    float theFloat = Float.MAX_VALUE;
+    if (VERBOSE) {
+      System.out.println("TEST: setUp");
+    }
+    for (int i = 0; i < NUM_DOCS; i++){
+      Document doc = new Document();
+      doc.add(new LegacyLongField("theLong", theLong--, Field.Store.NO));
+      doc.add(new LegacyDoubleField("theDouble", theDouble--, Field.Store.NO));
+      doc.add(new LegacyIntField("theInt", theInt--, Field.Store.NO));
+      doc.add(new LegacyFloatField("theFloat", theFloat--, Field.Store.NO));
+      if (i%2 == 0) {
+        doc.add(new LegacyIntField("sparse", i, Field.Store.NO));
+      }
+
+      if (i%2 == 0) {
+        doc.add(new LegacyIntField("numInt", i, Field.Store.NO));
+      }
+      writer.addDocument(doc);
+    }
+    IndexReader r = writer.getReader();
+    reader = SlowCompositeReaderWrapper.wrap(r);
+    TestUtil.checkReader(reader);
+    writer.close();
+  }
+
+  @AfterClass
+  public static void afterClass() throws Exception {
+    reader.close();
+    reader = null;
+    directory.close();
+    directory = null;
+  }
+  
+  public void testInfoStream() throws Exception {
+    try {
+      FieldCache cache = FieldCache.DEFAULT;
+      ByteArrayOutputStream bos = new ByteArrayOutputStream(1024);
+      cache.setInfoStream(new PrintStream(bos, false, IOUtils.UTF_8));
+      cache.getNumerics(reader, "theDouble", FieldCache.LEGACY_DOUBLE_PARSER, false);
+      cache.getNumerics(reader, "theDouble", new FieldCache.Parser() {
+        @Override
+        public TermsEnum termsEnum(Terms terms) throws IOException {
+          return LegacyNumericUtils.filterPrefixCodedLongs(terms.iterator());
+        }
+        @Override
+        public long parseValue(BytesRef term) {
+          int val = (int) LegacyNumericUtils.prefixCodedToLong(term);
+          if (val<0) val ^= 0x7fffffff;
+          return val;
+        }
+      }, false);
+      assertTrue(bos.toString(IOUtils.UTF_8).indexOf("WARNING") != -1);
+    } finally {
+      FieldCache.DEFAULT.setInfoStream(null);
+      FieldCache.DEFAULT.purgeAllCaches();
+    }
+  }
+
+  public void test() throws IOException {
+    FieldCache cache = FieldCache.DEFAULT;
+    NumericDocValues doubles = cache.getNumerics(reader, "theDouble", FieldCache.LEGACY_DOUBLE_PARSER, random().nextBoolean());
+    assertSame("Second request to cache return same array", doubles, cache.getNumerics(reader, "theDouble", FieldCache.LEGACY_DOUBLE_PARSER, random().nextBoolean()));
+    for (int i = 0; i < NUM_DOCS; i++) {
+      assertEquals(Double.doubleToLongBits(Double.MAX_VALUE - i), doubles.get(i));
+    }
+    
+    NumericDocValues longs = cache.getNumerics(reader, "theLong", FieldCache.LEGACY_LONG_PARSER, random().nextBoolean());
+    assertSame("Second request to cache return same array", longs, cache.getNumerics(reader, "theLong", FieldCache.LEGACY_LONG_PARSER, random().nextBoolean()));
+    for (int i = 0; i < NUM_DOCS; i++) {
+      assertEquals(Long.MAX_VALUE - i, longs.get(i));
+    }
+
+    NumericDocValues ints = cache.getNumerics(reader, "theInt", FieldCache.LEGACY_INT_PARSER, random().nextBoolean());
+    assertSame("Second request to cache return same array", ints, cache.getNumerics(reader, "theInt", FieldCache.LEGACY_INT_PARSER, random().nextBoolean()));
+    for (int i = 0; i < NUM_DOCS; i++) {
+      assertEquals(Integer.MAX_VALUE - i, ints.get(i));
+    }
+    
+    NumericDocValues floats = cache.getNumerics(reader, "theFloat", FieldCache.LEGACY_FLOAT_PARSER, random().nextBoolean());
+    assertSame("Second request to cache return same array", floats, cache.getNumerics(reader, "theFloat", FieldCache.LEGACY_FLOAT_PARSER, random().nextBoolean()));
+    for (int i = 0; i < NUM_DOCS; i++) {
+      assertEquals(Float.floatToIntBits(Float.MAX_VALUE - i), floats.get(i));
+    }
+
+    Bits docsWithField = cache.getDocsWithField(reader, "theLong", null);
+    assertSame("Second request to cache return same array", docsWithField, cache.getDocsWithField(reader, "theLong", null));
+    assertTrue("docsWithField(theLong) must be class Bits.MatchAllBits", docsWithField instanceof Bits.MatchAllBits);
+    assertTrue("docsWithField(theLong) Size: " + docsWithField.length() + " is not: " + NUM_DOCS, docsWithField.length() == NUM_DOCS);
+    for (int i = 0; i < docsWithField.length(); i++) {
+      assertTrue(docsWithField.get(i));
+    }
+    
+    docsWithField = cache.getDocsWithField(reader, "sparse", null);
+    assertSame("Second request to cache return same array", docsWithField, cache.getDocsWithField(reader, "sparse", null));
+    assertFalse("docsWithField(sparse) must not be class Bits.MatchAllBits", docsWithField instanceof Bits.MatchAllBits);
+    assertTrue("docsWithField(sparse) Size: " + docsWithField.length() + " is not: " + NUM_DOCS, docsWithField.length() == NUM_DOCS);
+    for (int i = 0; i < docsWithField.length(); i++) {
+      assertEquals(i%2 == 0, docsWithField.get(i));
+    }
+
+    FieldCache.DEFAULT.purgeByCacheKey(reader.getCoreCacheKey());
+  }
+
+  public void testEmptyIndex() throws Exception {
+    Directory dir = newDirectory();
+    IndexWriter writer= new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())).setMaxBufferedDocs(500));
+    writer.close();
+    IndexReader r = DirectoryReader.open(dir);
+    LeafReader reader = SlowCompositeReaderWrapper.wrap(r);
+    TestUtil.checkReader(reader);
+    FieldCache.DEFAULT.getTerms(reader, "foobar", true);
+    FieldCache.DEFAULT.getTermsIndex(reader, "foobar");
+    FieldCache.DEFAULT.purgeByCacheKey(reader.getCoreCacheKey());
+    r.close();
+    dir.close();
+  }
+
+  public void testDocsWithField() throws Exception {
+    FieldCache cache = FieldCache.DEFAULT;
+    cache.purgeAllCaches();
+    assertEquals(0, cache.getCacheEntries().length);
+    cache.getNumerics(reader, "theDouble", FieldCache.LEGACY_DOUBLE_PARSER, true);
+
+    // The double[] takes one slots, and docsWithField should also
+    // have been populated:
+    assertEquals(2, cache.getCacheEntries().length);
+    Bits bits = cache.getDocsWithField(reader, "theDouble", FieldCache.LEGACY_DOUBLE_PARSER);
+
+    // No new entries should appear:
+    assertEquals(2, cache.getCacheEntries().length);
+    assertTrue(bits instanceof Bits.MatchAllBits);
+
+    NumericDocValues ints = cache.getNumerics(reader, "sparse", FieldCache.LEGACY_INT_PARSER, true);
+    assertEquals(4, cache.getCacheEntries().length);
+    Bits docsWithField = cache.getDocsWithField(reader, "sparse", FieldCache.LEGACY_INT_PARSER);
+    assertEquals(4, cache.getCacheEntries().length);
+    for (int i = 0; i < docsWithField.length(); i++) {
+      if (i%2 == 0) {
+        assertTrue(docsWithField.get(i));
+        assertEquals(i, ints.get(i));
+      } else {
+        assertFalse(docsWithField.get(i));
+      }
+    }
+
+    NumericDocValues numInts = cache.getNumerics(reader, "numInt", FieldCache.LEGACY_INT_PARSER, random().nextBoolean());
+    docsWithField = cache.getDocsWithField(reader, "numInt", FieldCache.LEGACY_INT_PARSER);
+    for (int i = 0; i < docsWithField.length(); i++) {
+      if (i%2 == 0) {
+        assertTrue(docsWithField.get(i));
+        assertEquals(i, numInts.get(i));
+      } else {
+        assertFalse(docsWithField.get(i));
+      }
+    }
+  }
+  
+  public void testGetDocsWithFieldThreadSafety() throws Exception {
+    final FieldCache cache = FieldCache.DEFAULT;
+    cache.purgeAllCaches();
+
+    int NUM_THREADS = 3;
+    Thread[] threads = new Thread[NUM_THREADS];
+    final AtomicBoolean failed = new AtomicBoolean();
+    final AtomicInteger iters = new AtomicInteger();
+    final int NUM_ITER = 200 * RANDOM_MULTIPLIER;
+    final CyclicBarrier restart = new CyclicBarrier(NUM_THREADS,
+                                                    new Runnable() {
+                                                      @Override
+                                                      public void run() {
+                                                        cache.purgeAllCaches();
+                                                        iters.incrementAndGet();
+                                                      }
+                                                    });
+    for(int threadIDX=0;threadIDX<NUM_THREADS;threadIDX++) {
+      threads[threadIDX] = new Thread() {
+          @Override
+          public void run() {
+
+            try {
+              while(!failed.get()) {
+                final int op = random().nextInt(3);
+                if (op == 0) {
+                  // Purge all caches & resume, once all
+                  // threads get here:
+                  restart.await();
+                  if (iters.get() >= NUM_ITER) {
+                    break;
+                  }
+                } else if (op == 1) {
+                  Bits docsWithField = cache.getDocsWithField(reader, "sparse", null);
+                  for (int i = 0; i < docsWithField.length(); i++) {
+                    assertEquals(i%2 == 0, docsWithField.get(i));
+                  }
+                } else {
+                  NumericDocValues ints = cache.getNumerics(reader, "sparse", FieldCache.LEGACY_INT_PARSER, true);
+                  Bits docsWithField = cache.getDocsWithField(reader, "sparse", null);
+                  for (int i = 0; i < docsWithField.length(); i++) {
+                    if (i%2 == 0) {
+                      assertTrue(docsWithField.get(i));
+                      assertEquals(i, ints.get(i));
+                    } else {
+                      assertFalse(docsWithField.get(i));
+                    }
+                  }
+                }
+              }
+            } catch (Throwable t) {
+              failed.set(true);
+              restart.reset();
+              throw new RuntimeException(t);
+            }
+          }
+        };
+      threads[threadIDX].start();
+    }
+
+    for(int threadIDX=0;threadIDX<NUM_THREADS;threadIDX++) {
+      threads[threadIDX].join();
+    }
+    assertFalse(failed.get());
+  }
+  
+  public void testDocValuesIntegration() throws Exception {
+    Directory dir = newDirectory();
+    IndexWriterConfig iwc = newIndexWriterConfig(null);
+    RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc);
+    Document doc = new Document();
+    doc.add(new BinaryDocValuesField("binary", new BytesRef("binary value")));
+    doc.add(new SortedDocValuesField("sorted", new BytesRef("sorted value")));
+    doc.add(new NumericDocValuesField("numeric", 42));
+    doc.add(new SortedSetDocValuesField("sortedset", new BytesRef("sortedset value1")));
+    doc.add(new SortedSetDocValuesField("sortedset", new BytesRef("sortedset value2")));
+    iw.addDocument(doc);
+    DirectoryReader ir = iw.getReader();
+    iw.close();
+    LeafReader ar = getOnlySegmentReader(ir);
+    
+    // Binary type: can be retrieved via getTerms()
+    expectThrows(IllegalStateException.class, () -> {
+      FieldCache.DEFAULT.getNumerics(ar, "binary", FieldCache.LEGACY_INT_PARSER, false);
+    });
+    
+    // Sorted type: can be retrieved via getTerms(), getTermsIndex(), getDocTermOrds()
+    expectThrows(IllegalStateException.class, () -> {
+      FieldCache.DEFAULT.getNumerics(ar, "sorted", FieldCache.LEGACY_INT_PARSER, false);
+    });
+    
+    // Numeric type: can be retrieved via getInts() and so on
+    NumericDocValues numeric = FieldCache.DEFAULT.getNumerics(ar, "numeric", FieldCache.LEGACY_INT_PARSER, false);
+    assertEquals(42, numeric.get(0));
+       
+    // SortedSet type: can be retrieved via getDocTermOrds() 
+    expectThrows(IllegalStateException.class, () -> {
+      FieldCache.DEFAULT.getNumerics(ar, "sortedset", FieldCache.LEGACY_INT_PARSER, false);
+    });
+    
+    ir.close();
+    dir.close();
+  }
+  
+  public void testNonexistantFields() throws Exception {
+    Directory dir = newDirectory();
+    RandomIndexWriter iw = new RandomIndexWriter(random(), dir);
+    Document doc = new Document();
+    iw.addDocument(doc);
+    DirectoryReader ir = iw.getReader();
+    iw.close();
+    
+    LeafReader ar = getOnlySegmentReader(ir);
+    
+    final FieldCache cache = FieldCache.DEFAULT;
+    cache.purgeAllCaches();
+    assertEquals(0, cache.getCacheEntries().length);
+    
+    NumericDocValues ints = cache.getNumerics(ar, "bogusints", FieldCache.LEGACY_INT_PARSER, true);
+    assertEquals(0, ints.get(0));
+    
+    NumericDocValues longs = cache.getNumerics(ar, "boguslongs", FieldCache.LEGACY_LONG_PARSER, true);
+    assertEquals(0, longs.get(0));
+    
+    NumericDocValues floats = cache.getNumerics(ar, "bogusfloats", FieldCache.LEGACY_FLOAT_PARSER, true);
+    assertEquals(0, floats.get(0));
+    
+    NumericDocValues doubles = cache.getNumerics(ar, "bogusdoubles", FieldCache.LEGACY_DOUBLE_PARSER, true);
+    assertEquals(0, doubles.get(0));
+    
+    // check that we cached nothing
+    assertEquals(0, cache.getCacheEntries().length);
+    ir.close();
+    dir.close();
+  }
+  
+  public void testNonIndexedFields() throws Exception {
+    Directory dir = newDirectory();
+    RandomIndexWriter iw = new RandomIndexWriter(random(), dir);
+    Document doc = new Document();
+    doc.add(new StoredField("bogusbytes", "bogus"));
+    doc.add(new StoredField("bogusshorts", "bogus"));
+    doc.add(new StoredField("bogusints", "bogus"));
+    doc.add(new StoredField("boguslongs", "bogus"));
+    doc.add(new StoredField("bogusfloats", "bogus"));
+    doc.add(new StoredField("bogusdoubles", "bogus"));
+    doc.add(new StoredField("bogusbits", "bogus"));
+    iw.addDocument(doc);
+    DirectoryReader ir = iw.getReader();
+    iw.close();
+    
+    LeafReader ar = getOnlySegmentReader(ir);
+    
+    final FieldCache cache = FieldCache.DEFAULT;
+    cache.purgeAllCaches();
+    assertEquals(0, cache.getCacheEntries().length);
+    
+    NumericDocValues ints = cache.getNumerics(ar, "bogusints", FieldCache.LEGACY_INT_PARSER, true);
+    assertEquals(0, ints.get(0));
+    
+    NumericDocValues longs = cache.getNumerics(ar, "boguslongs", FieldCache.LEGACY_LONG_PARSER, true);
+    assertEquals(0, longs.get(0));
+    
+    NumericDocValues floats = cache.getNumerics(ar, "bogusfloats", FieldCache.LEGACY_FLOAT_PARSER, true);
+    assertEquals(0, floats.get(0));
+    
+    NumericDocValues doubles = cache.getNumerics(ar, "bogusdoubles", FieldCache.LEGACY_DOUBLE_PARSER, true);
+    assertEquals(0, doubles.get(0));
+    
+    // check that we cached nothing
+    assertEquals(0, cache.getCacheEntries().length);
+    ir.close();
+    dir.close();
+  }
+
+  // Make sure that the use of GrowableWriter doesn't prevent from using the full long range
+  public void testLongFieldCache() throws IOException {
+    Directory dir = newDirectory();
+    IndexWriterConfig cfg = newIndexWriterConfig(new MockAnalyzer(random()));
+    cfg.setMergePolicy(newLogMergePolicy());
+    RandomIndexWriter iw = new RandomIndexWriter(random(), dir, cfg);
+    Document doc = new Document();
+    LegacyLongField field = new LegacyLongField("f", 0L, Store.YES);
+    doc.add(field);
+    final long[] values = new long[TestUtil.nextInt(random(), 1, 10)];
+    for (int i = 0; i < values.length; ++i) {
+      final long v;
+      switch (random().nextInt(10)) {
+        case 0:
+          v = Long.MIN_VALUE;
+          break;
+        case 1:
+          v = 0;
+          break;
+        case 2:
+          v = Long.MAX_VALUE;
+          break;
+        default:
+          v = TestUtil.nextLong(random(), -10, 10);
+          break;
+      }
+      values[i] = v;
+      if (v == 0 && random().nextBoolean()) {
+        // missing
+        iw.addDocument(new Document());
+      } else {
+        field.setLongValue(v);
+        iw.addDocument(doc);
+      }
+    }
+    iw.forceMerge(1);
+    final DirectoryReader reader = iw.getReader();
+    final NumericDocValues longs = FieldCache.DEFAULT.getNumerics(getOnlySegmentReader(reader), "f", FieldCache.LEGACY_LONG_PARSER, false);
+    for (int i = 0; i < values.length; ++i) {
+      assertEquals(values[i], longs.get(i));
+    }
+    reader.close();
+    iw.close();
+    dir.close();
+  }
+
+  // Make sure that the use of GrowableWriter doesn't prevent from using the full int range
+  public void testIntFieldCache() throws IOException {
+    Directory dir = newDirectory();
+    IndexWriterConfig cfg = newIndexWriterConfig(new MockAnalyzer(random()));
+    cfg.setMergePolicy(newLogMergePolicy());
+    RandomIndexWriter iw = new RandomIndexWriter(random(), dir, cfg);
+    Document doc = new Document();
+    LegacyIntField field = new LegacyIntField("f", 0, Store.YES);
+    doc.add(field);
+    final int[] values = new int[TestUtil.nextInt(random(), 1, 10)];
+    for (int i = 0; i < values.length; ++i) {
+      final int v;
+      switch (random().nextInt(10)) {
+        case 0:
+          v = Integer.MIN_VALUE;
+          break;
+        case 1:
+          v = 0;
+          break;
+        case 2:
+          v = Integer.MAX_VALUE;
+          break;
+        default:
+          v = TestUtil.nextInt(random(), -10, 10);
+          break;
+      }
+      values[i] = v;
+      if (v == 0 && random().nextBoolean()) {
+        // missing
+        iw.addDocument(new Document());
+      } else {
+        field.setIntValue(v);
+        iw.addDocument(doc);
+      }
+    }
+    iw.forceMerge(1);
+    final DirectoryReader reader = iw.getReader();
+    final NumericDocValues ints = FieldCache.DEFAULT.getNumerics(getOnlySegmentReader(reader), "f", FieldCache.LEGACY_INT_PARSER, false);
+    for (int i = 0; i < values.length; ++i) {
+      assertEquals(values[i], ints.get(i));
+    }
+    reader.close();
+    iw.close();
+    dir.close();
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d35d5694/lucene/misc/src/test/org/apache/lucene/uninverting/TestNumericTerms32.java
----------------------------------------------------------------------
diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestNumericTerms32.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestNumericTerms32.java
index bc85db4..a0cddf8 100644
--- a/lucene/misc/src/test/org/apache/lucene/uninverting/TestNumericTerms32.java
+++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestNumericTerms32.java
@@ -96,9 +96,9 @@ public class TestNumericTerms32 extends LuceneTestCase {
     }
   
     Map<String,Type> map = new HashMap<>();
-    map.put("field2", Type.INTEGER);
-    map.put("field4", Type.INTEGER);
-    map.put("field8", Type.INTEGER);
+    map.put("field2", Type.LEGACY_INTEGER);
+    map.put("field4", Type.LEGACY_INTEGER);
+    map.put("field8", Type.LEGACY_INTEGER);
     reader = UninvertingReader.wrap(writer.getReader(), map);
     searcher=newSearcher(reader);
     writer.close();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d35d5694/lucene/misc/src/test/org/apache/lucene/uninverting/TestNumericTerms64.java
----------------------------------------------------------------------
diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestNumericTerms64.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestNumericTerms64.java
index d9fcc92..0724d86 100644
--- a/lucene/misc/src/test/org/apache/lucene/uninverting/TestNumericTerms64.java
+++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestNumericTerms64.java
@@ -100,10 +100,10 @@ public class TestNumericTerms64 extends LuceneTestCase {
       writer.addDocument(doc);
     }
     Map<String,Type> map = new HashMap<>();
-    map.put("field2", Type.LONG);
-    map.put("field4", Type.LONG);
-    map.put("field6", Type.LONG);
-    map.put("field8", Type.LONG);
+    map.put("field2", Type.LEGACY_LONG);
+    map.put("field4", Type.LEGACY_LONG);
+    map.put("field6", Type.LEGACY_LONG);
+    map.put("field8", Type.LEGACY_LONG);
     reader = UninvertingReader.wrap(writer.getReader(), map);
     searcher=newSearcher(reader);
     writer.close();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d35d5694/lucene/misc/src/test/org/apache/lucene/uninverting/TestUninvertingReader.java
----------------------------------------------------------------------
diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestUninvertingReader.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestUninvertingReader.java
index 99df329..0a1cf3d 100644
--- a/lucene/misc/src/test/org/apache/lucene/uninverting/TestUninvertingReader.java
+++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestUninvertingReader.java
@@ -363,8 +363,9 @@ public class TestUninvertingReader extends LuceneTestCase {
     iw.close();
 
     Map<String, Type> uninvertingMap = new HashMap<>();
-    uninvertingMap.put("int", Type.INTEGER);
-    uninvertingMap.put("dv", Type.INTEGER);
+    uninvertingMap.put("int", Type.LEGACY_INTEGER);
+    uninvertingMap.put("dv", Type.LEGACY_INTEGER);
+    uninvertingMap.put("dint", Type.INTEGER_POINT);
 
     DirectoryReader ir = UninvertingReader.wrap(DirectoryReader.open(dir), 
                          uninvertingMap);
@@ -376,6 +377,7 @@ public class TestUninvertingReader extends LuceneTestCase {
     assertEquals(0, intFInfo.getPointNumBytes());
 
     FieldInfo dintFInfo = leafReader.getFieldInfos().fieldInfo("dint");
+    assertEquals(DocValuesType.NUMERIC, dintFInfo.getDocValuesType());
     assertEquals(1, dintFInfo.getPointDimensionCount());
     assertEquals(4, dintFInfo.getPointNumBytes());
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d35d5694/lucene/spatial-extras/src/test/org/apache/lucene/spatial/SpatialTestCase.java
----------------------------------------------------------------------
diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/SpatialTestCase.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/SpatialTestCase.java
index 529e98b..8ccb9af 100644
--- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/SpatialTestCase.java
+++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/SpatialTestCase.java
@@ -73,8 +73,8 @@ public abstract class SpatialTestCase extends LuceneTestCase {
     super.setUp();
     // TODO: change this module to index docvalues instead of uninverting
     uninvertMap.clear();
-    uninvertMap.put("pointvector__x", Type.DOUBLE);
-    uninvertMap.put("pointvector__y", Type.DOUBLE);
+    uninvertMap.put("pointvector__x", Type.LEGACY_DOUBLE);
+    uninvertMap.put("pointvector__y", Type.LEGACY_DOUBLE);
 
     directory = newDirectory();
     final Random random = random();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d35d5694/solr/core/src/java/org/apache/solr/schema/EnumField.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/EnumField.java b/solr/core/src/java/org/apache/solr/schema/EnumField.java
index e1fb420..cbf1d4e 100644
--- a/solr/core/src/java/org/apache/solr/schema/EnumField.java
+++ b/solr/core/src/java/org/apache/solr/schema/EnumField.java
@@ -194,7 +194,7 @@ public class EnumField extends PrimitiveFieldType {
     if (sf.multiValued()) {
       return Type.SORTED_SET_INTEGER;
     } else {
-      return Type.INTEGER;
+      return Type.LEGACY_INTEGER;
     }
   }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d35d5694/solr/core/src/java/org/apache/solr/schema/TrieField.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/TrieField.java b/solr/core/src/java/org/apache/solr/schema/TrieField.java
index 572bf88..c4899a1 100644
--- a/solr/core/src/java/org/apache/solr/schema/TrieField.java
+++ b/solr/core/src/java/org/apache/solr/schema/TrieField.java
@@ -203,14 +203,14 @@ public class TrieField extends PrimitiveFieldType {
     } else {
       switch (type) {
         case INTEGER:
-          return Type.INTEGER;
+          return Type.LEGACY_INTEGER;
         case LONG:
         case DATE:
-          return Type.LONG;
+          return Type.LEGACY_LONG;
         case FLOAT:
-          return Type.FLOAT;
+          return Type.LEGACY_FLOAT;
         case DOUBLE:
-          return Type.DOUBLE;
+          return Type.LEGACY_DOUBLE;
         default:
           throw new AssertionError();
       }