You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by rm...@apache.org on 2013/02/07 21:48:28 UTC

svn commit: r1443717 [7/14] - in /lucene/dev/trunk: ./ dev-tools/ lucene/ lucene/analysis/ lucene/analysis/common/ lucene/analysis/icu/src/java/org/apache/lucene/collation/ lucene/analysis/icu/src/test/org/apache/lucene/collation/ lucene/backwards/ luc...

Modified: lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDirectoryReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDirectoryReader.java?rev=1443717&r1=1443716&r2=1443717&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDirectoryReader.java (original)
+++ lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDirectoryReader.java Thu Feb  7 20:48:21 2013
@@ -39,7 +39,6 @@ import org.apache.lucene.index.IndexWrit
 import org.apache.lucene.search.DocIdSetIterator;
 import org.apache.lucene.search.FieldCache;
 import org.apache.lucene.store.Directory;
-import org.apache.lucene.store.MockDirectoryWrapper;
 import org.apache.lucene.store.NoSuchDirectoryException;
 import org.apache.lucene.util.Bits;
 import org.apache.lucene.util.BytesRef;
@@ -368,57 +367,57 @@ void assertTermDocsCount(String msg,
 
   
   public void testBinaryFields() throws IOException {
-      Directory dir = newDirectory();
-      byte[] bin = new byte[]{0, 1, 2, 3, 4, 5, 6, 7, 8, 9};
+    Directory dir = newDirectory();
+    byte[] bin = new byte[]{0, 1, 2, 3, 4, 5, 6, 7, 8, 9};
       
-      IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy()));
+    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy()));
       
-      for (int i = 0; i < 10; i++) {
-        addDoc(writer, "document number " + (i + 1));
-        addDocumentWithFields(writer);
-        addDocumentWithDifferentFields(writer);
-        addDocumentWithTermVectorFields(writer);
-      }
-      writer.close();
-      writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND).setMergePolicy(newLogMergePolicy()));
-      Document doc = new Document();
-      doc.add(new StoredField("bin1", bin));
-      doc.add(new TextField("junk", "junk text", Field.Store.NO));
-      writer.addDocument(doc);
-      writer.close();
-      DirectoryReader reader = DirectoryReader.open(dir);
-      StoredDocument doc2 = reader.document(reader.maxDoc() - 1);
-      StorableField[] fields = doc2.getFields("bin1");
-      assertNotNull(fields);
-      assertEquals(1, fields.length);
-      StorableField b1 = fields[0];
-      assertTrue(b1.binaryValue() != null);
-      BytesRef bytesRef = b1.binaryValue();
-      assertEquals(bin.length, bytesRef.length);
-      for (int i = 0; i < bin.length; i++) {
-        assertEquals(bin[i], bytesRef.bytes[i + bytesRef.offset]);
-      }
-      reader.close();
-      // force merge
+    for (int i = 0; i < 10; i++) {
+      addDoc(writer, "document number " + (i + 1));
+      addDocumentWithFields(writer);
+      addDocumentWithDifferentFields(writer);
+      addDocumentWithTermVectorFields(writer);
+    }
+    writer.close();
+    writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND).setMergePolicy(newLogMergePolicy()));
+    Document doc = new Document();
+    doc.add(new StoredField("bin1", bin));
+    doc.add(new TextField("junk", "junk text", Field.Store.NO));
+    writer.addDocument(doc);
+    writer.close();
+    DirectoryReader reader = DirectoryReader.open(dir);
+    StoredDocument doc2 = reader.document(reader.maxDoc() - 1);
+    StorableField[] fields = doc2.getFields("bin1");
+    assertNotNull(fields);
+    assertEquals(1, fields.length);
+    StorableField b1 = fields[0];
+    assertTrue(b1.binaryValue() != null);
+    BytesRef bytesRef = b1.binaryValue();
+    assertEquals(bin.length, bytesRef.length);
+    for (int i = 0; i < bin.length; i++) {
+      assertEquals(bin[i], bytesRef.bytes[i + bytesRef.offset]);
+    }
+    reader.close();
+    // force merge
 
 
-      writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND).setMergePolicy(newLogMergePolicy()));
-      writer.forceMerge(1);
-      writer.close();
-      reader = DirectoryReader.open(dir);
-      doc2 = reader.document(reader.maxDoc() - 1);
-      fields = doc2.getFields("bin1");
-      assertNotNull(fields);
-      assertEquals(1, fields.length);
-      b1 = fields[0];
-      assertTrue(b1.binaryValue() != null);
-      bytesRef = b1.binaryValue();
-      assertEquals(bin.length, bytesRef.length);
-      for (int i = 0; i < bin.length; i++) {
-        assertEquals(bin[i], bytesRef.bytes[i + bytesRef.offset]);
-      }
-      reader.close();
-      dir.close();
+    writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND).setMergePolicy(newLogMergePolicy()));
+    writer.forceMerge(1);
+    writer.close();
+    reader = DirectoryReader.open(dir);
+    doc2 = reader.document(reader.maxDoc() - 1);
+    fields = doc2.getFields("bin1");
+    assertNotNull(fields);
+    assertEquals(1, fields.length);
+    b1 = fields[0];
+    assertTrue(b1.binaryValue() != null);
+    bytesRef = b1.binaryValue();
+    assertEquals(bin.length, bytesRef.length);
+    for (int i = 0; i < bin.length; i++) {
+      assertEquals(bin[i], bytesRef.bytes[i + bytesRef.offset]);
+    }
+    reader.close();
+    dir.close();
   }
 
   /* ??? public void testOpenEmptyDirectory() throws IOException{
@@ -551,7 +550,7 @@ public void testFilesOpenClose() throws 
     assertEquals("IndexReaders have different values for maxDoc.", index1.maxDoc(), index2.maxDoc());
     assertEquals("Only one IndexReader has deletions.", index1.hasDeletions(), index2.hasDeletions());
     assertEquals("Single segment test differs.", index1.leaves().size() == 1, index2.leaves().size() == 1);
-    
+
     // check field names
     FieldInfos fieldInfos1 = MultiFields.getMergedFieldInfos(index1);
     FieldInfos fieldInfos2 = MultiFields.getMergedFieldInfos(index2);
@@ -566,21 +565,16 @@ public void testFilesOpenClose() throws 
     // check norms
     for(FieldInfo fieldInfo : fieldInfos1) {
       String curField = fieldInfo.name;
-      DocValues norms1 = MultiDocValues.getNormDocValues(index1, curField);
-      DocValues norms2 = MultiDocValues.getNormDocValues(index2, curField);
-      if (norms1 != null && norms2 != null)
-      {
+      NumericDocValues norms1 = MultiDocValues.getNormValues(index1, curField);
+      NumericDocValues norms2 = MultiDocValues.getNormValues(index2, curField);
+      if (norms1 != null && norms2 != null) {
         // todo: generalize this (like TestDuelingCodecs assert)
-        byte[] b1 = (byte[]) norms1.getSource().getArray();
-        byte[] b2 = (byte[]) norms2.getSource().getArray();
-        assertEquals(b1.length, b2.length);
-        for (int i = 0; i < b1.length; i++) {
-          assertEquals("Norm different for doc " + i + " and field '" + curField + "'.", b1[i], b2[i]);
+        for (int i = 0; i < index1.maxDoc(); i++) {
+          assertEquals("Norm different for doc " + i + " and field '" + curField + "'.", norms1.get(i), norms2.get(i));
         }
-      }
-      else
-      {
-        assertSame(norms1, norms2);
+      } else {
+        assertNull(norms1);
+        assertNull(norms2);
       }
     }
     
@@ -776,9 +770,8 @@ public void testFilesOpenClose() throws 
     // Open reader1
     DirectoryReader r = DirectoryReader.open(dir);
     AtomicReader r1 = getOnlySegmentReader(r);
-    final int[] ints = FieldCache.DEFAULT.getInts(r1, "number", false);
-    assertEquals(1, ints.length);
-    assertEquals(17, ints[0]);
+    final FieldCache.Ints ints = FieldCache.DEFAULT.getInts(r1, "number", false);
+    assertEquals(17, ints.get(0));
   
     // Add new segment
     writer.addDocument(doc);
@@ -789,7 +782,7 @@ public void testFilesOpenClose() throws 
     assertNotNull(r2);
     r.close();
     AtomicReader sub0 = r2.leaves().get(0).reader();
-    final int[] ints2 = FieldCache.DEFAULT.getInts(sub0, "number", false);
+    final FieldCache.Ints ints2 = FieldCache.DEFAULT.getInts(sub0, "number", false);
     r2.close();
     assertTrue(ints == ints2);
   

Modified: lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDirectoryReaderReopen.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDirectoryReaderReopen.java?rev=1443717&r1=1443716&r2=1443717&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDirectoryReaderReopen.java (original)
+++ lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDirectoryReaderReopen.java Thu Feb  7 20:48:21 2013
@@ -16,7 +16,6 @@ package org.apache.lucene.index;
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Collection;
@@ -38,7 +37,6 @@ import org.apache.lucene.search.IndexSea
 import org.apache.lucene.search.ScoreDoc;
 import org.apache.lucene.search.TermQuery;
 import org.apache.lucene.store.Directory;
-import org.apache.lucene.store.MockDirectoryWrapper;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util._TestUtil;
 

Modified: lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDocTermOrds.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDocTermOrds.java?rev=1443717&r1=1443716&r2=1443717&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDocTermOrds.java (original)
+++ lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDocTermOrds.java Thu Feb  7 20:48:21 2013
@@ -32,7 +32,6 @@ import org.apache.lucene.document.IntFie
 import org.apache.lucene.index.DocTermOrds.TermOrdsIterator;
 import org.apache.lucene.search.FieldCache;
 import org.apache.lucene.store.Directory;
-import org.apache.lucene.store.MockDirectoryWrapper;
 import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util.StringHelper;
@@ -303,7 +302,7 @@ public class TestDocTermOrds extends Luc
                                             _TestUtil.nextInt(random(), 2, 10));
                                             
 
-    final int[] docIDToID = FieldCache.DEFAULT.getInts(r, "id", false);
+    final FieldCache.Ints docIDToID = FieldCache.DEFAULT.getInts(r, "id", false);
     /*
       for(int docID=0;docID<subR.maxDoc();docID++) {
       System.out.println("  docID=" + docID + " id=" + docIDToID[docID]);
@@ -357,10 +356,10 @@ public class TestDocTermOrds extends Luc
     final int[] buffer = new int[5];
     for(int docID=0;docID<r.maxDoc();docID++) {
       if (VERBOSE) {
-        System.out.println("TEST: docID=" + docID + " of " + r.maxDoc() + " (id=" + docIDToID[docID] + ")");
+        System.out.println("TEST: docID=" + docID + " of " + r.maxDoc() + " (id=" + docIDToID.get(docID) + ")");
       }
       iter = dto.lookup(docID, iter);
-      final int[] answers = idToOrds[docIDToID[docID]];
+      final int[] answers = idToOrds[docIDToID.get(docID)];
       int upto = 0;
       while(true) {
         final int chunk = iter.read(buffer);

Modified: lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDocValuesIndexing.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDocValuesIndexing.java?rev=1443717&r1=1443716&r2=1443717&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDocValuesIndexing.java (original)
+++ lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDocValuesIndexing.java Thu Feb  7 20:48:21 2013
@@ -16,57 +16,28 @@ package org.apache.lucene.index;
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-import java.io.Closeable;
+
 import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.EnumSet;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Random;
-import java.util.Set;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.atomic.AtomicBoolean;
 
+import org.apache.lucene.analysis.Analyzer;
 import org.apache.lucene.analysis.MockAnalyzer;
-import org.apache.lucene.document.ByteDocValuesField;
-import org.apache.lucene.document.DerefBytesDocValuesField;
+import org.apache.lucene.document.BinaryDocValuesField;
 import org.apache.lucene.document.Document;
-import org.apache.lucene.document.DoubleDocValuesField;
 import org.apache.lucene.document.Field;
-import org.apache.lucene.document.FloatDocValuesField;
-import org.apache.lucene.document.IntDocValuesField;
-import org.apache.lucene.document.LongDocValuesField;
-import org.apache.lucene.document.PackedLongDocValuesField;
-import org.apache.lucene.document.ShortDocValuesField;
-import org.apache.lucene.document.SortedBytesDocValuesField;
-import org.apache.lucene.document.StraightBytesDocValuesField;
-import org.apache.lucene.document.StringField;
+import org.apache.lucene.document.NumericDocValuesField;
+import org.apache.lucene.document.SortedDocValuesField;
 import org.apache.lucene.document.TextField;
-import org.apache.lucene.index.DocValues.SortedSource;
-import org.apache.lucene.index.DocValues.Source;
-import org.apache.lucene.index.DocValues.SourceCache;
-import org.apache.lucene.index.DocValues.Type;
-import org.apache.lucene.index.DocValues.SourceCache.DirectSourceCache;
-import org.apache.lucene.search.BooleanClause;
-import org.apache.lucene.search.BooleanQuery;
-import org.apache.lucene.search.DocIdSetIterator;
-import org.apache.lucene.search.IndexSearcher;
-import org.apache.lucene.search.ScoreDoc;
-import org.apache.lucene.search.TermQuery;
-import org.apache.lucene.search.TopDocs;
+import org.apache.lucene.search.FieldCache;
 import org.apache.lucene.store.Directory;
+import org.apache.lucene.util.Bits;
 import org.apache.lucene.util.BytesRef;
-import org.apache.lucene.util.BytesRefHash;
-import org.apache.lucene.util.FixedBitSet;
 import org.apache.lucene.util.LuceneTestCase;
-import org.apache.lucene.util._TestUtil;
 
 /**
  * 
- * Tests DocValues integration into IndexWriter & Codecs
+ * Tests DocValues integration into IndexWriter
  * 
  */
 public class TestDocValuesIndexing extends LuceneTestCase {
@@ -75,70 +46,12 @@ public class TestDocValuesIndexing exten
    * - add multithreaded tests / integrate into stress indexing?
    */
   
-  /*
-   * Simple test case to show how to use the API
-   */
-  public void testDocValuesSimple() throws IOException {
-    Directory dir = newDirectory();
-    IndexWriter writer = new IndexWriter(dir, writerConfig(false));
-    for (int i = 0; i < 5; i++) {
-      Document doc = new Document();
-      doc.add(new PackedLongDocValuesField("docId", i));
-      doc.add(new TextField("docId", "" + i, Field.Store.NO));
-      writer.addDocument(doc);
-    }
-    writer.commit();
-    writer.forceMerge(1, true);
-
-    writer.close(true);
-
-    DirectoryReader reader = DirectoryReader.open(dir, 1);
-    assertEquals(1, reader.leaves().size());
-
-    IndexSearcher searcher = new IndexSearcher(reader);
-
-    BooleanQuery query = new BooleanQuery();
-    query.add(new TermQuery(new Term("docId", "0")), BooleanClause.Occur.SHOULD);
-    query.add(new TermQuery(new Term("docId", "1")), BooleanClause.Occur.SHOULD);
-    query.add(new TermQuery(new Term("docId", "2")), BooleanClause.Occur.SHOULD);
-    query.add(new TermQuery(new Term("docId", "3")), BooleanClause.Occur.SHOULD);
-    query.add(new TermQuery(new Term("docId", "4")), BooleanClause.Occur.SHOULD);
-
-    TopDocs search = searcher.search(query, 10);
-    assertEquals(5, search.totalHits);
-    ScoreDoc[] scoreDocs = search.scoreDocs;
-    DocValues docValues = MultiDocValues.getDocValues(reader, "docId");
-    Source source = docValues.getSource();
-    for (int i = 0; i < scoreDocs.length; i++) {
-      assertEquals(i, scoreDocs[i].doc);
-      assertEquals(i, source.getInt(scoreDocs[i].doc));
-    }
-    reader.close();
-    dir.close();
-  }
-
-  public void testIndexBytesNoDeletes() throws IOException {
-    runTestIndexBytes(writerConfig(random().nextBoolean()), false);
-  }
-
-  public void testIndexBytesDeletes() throws IOException {
-    runTestIndexBytes(writerConfig(random().nextBoolean()), true);
-  }
-
-  public void testIndexNumericsNoDeletes() throws IOException {
-    runTestNumerics(writerConfig(random().nextBoolean()), false);
-  }
-
-  public void testIndexNumericsDeletes() throws IOException {
-    runTestNumerics(writerConfig(random().nextBoolean()), true);
-  }
-
   public void testAddIndexes() throws IOException {
     Directory d1 = newDirectory();
     RandomIndexWriter w = new RandomIndexWriter(random(), d1);
     Document doc = new Document();
     doc.add(newStringField("id", "1", Field.Store.YES));
-    doc.add(new PackedLongDocValuesField("dv", 1));
+    doc.add(new NumericDocValuesField("dv", 1));
     w.addDocument(doc);
     IndexReader r1 = w.getReader();
     w.close();
@@ -147,7 +60,7 @@ public class TestDocValuesIndexing exten
     w = new RandomIndexWriter(random(), d2);
     doc = new Document();
     doc.add(newStringField("id", "2", Field.Store.YES));
-    doc.add(new PackedLongDocValuesField("dv", 2));
+    doc.add(new NumericDocValuesField("dv", 2));
     w.addDocument(doc);
     IndexReader r2 = w.getReader();
     w.close();
@@ -165,601 +78,17 @@ public class TestDocValuesIndexing exten
     w.close();
     AtomicReader sr = getOnlySegmentReader(r3);
     assertEquals(2, sr.numDocs());
-    DocValues docValues = sr.docValues("dv");
+    NumericDocValues docValues = sr.getNumericDocValues("dv");
     assertNotNull(docValues);
     r3.close();
     d3.close();
   }
 
-  public void testAddIndexesRandom() throws IOException {
-    int valuesPerIndex = 10;
-    List<Type> values = Arrays.asList(Type.values());
-    Collections.shuffle(values, random());
-    Type first = values.get(0);
-    Type second = values.get(1);
-    // index first index
-    Directory d_1 = newDirectory();
-    IndexWriter w_1 = new IndexWriter(d_1, writerConfig(random().nextBoolean()));
-    indexValues(w_1, valuesPerIndex, first, values, false, 7);
-    w_1.commit();
-    assertEquals(valuesPerIndex, w_1.maxDoc());
-    _TestUtil.checkIndex(d_1);
-
-    // index second index
-    Directory d_2 = newDirectory();
-    IndexWriter w_2 = new IndexWriter(d_2, writerConfig(random().nextBoolean()));
-    indexValues(w_2, valuesPerIndex, second, values, false, 7);
-    w_2.commit();
-    assertEquals(valuesPerIndex, w_2.maxDoc());
-    _TestUtil.checkIndex(d_2);
-
-    Directory target = newDirectory();
-    IndexWriter w = new IndexWriter(target, writerConfig(random().nextBoolean()));
-    DirectoryReader r_1 = DirectoryReader.open(w_1, true);
-    DirectoryReader r_2 = DirectoryReader.open(w_2, true);
-    if (random().nextBoolean()) {
-      w.addIndexes(d_1, d_2);
-    } else {
-      w.addIndexes(r_1, r_2);
-    }
-    w.forceMerge(1, true);
-    w.commit();
-    
-    _TestUtil.checkIndex(target);
-    assertEquals(valuesPerIndex * 2, w.maxDoc());
-
-    // check values
-    
-    DirectoryReader merged = DirectoryReader.open(w, true);
-    Source source_1 = getSource(getDocValues(r_1, first.name()));
-    Source source_2 = getSource(getDocValues(r_2, second.name()));
-    Source source_1_merged = getSource(getDocValues(merged, first.name()));
-    Source source_2_merged = getSource(getDocValues(merged, second
-        .name()));
-    for (int i = 0; i < r_1.maxDoc(); i++) {
-      switch (first) {
-      case BYTES_FIXED_DEREF:
-      case BYTES_FIXED_STRAIGHT:
-      case BYTES_VAR_DEREF:
-      case BYTES_VAR_STRAIGHT:
-      case BYTES_FIXED_SORTED:
-      case BYTES_VAR_SORTED:
-        assertEquals(source_1.getBytes(i, new BytesRef()),
-            source_1_merged.getBytes(i, new BytesRef()));
-        break;
-      case FIXED_INTS_16:
-      case FIXED_INTS_32:
-      case FIXED_INTS_64:
-      case FIXED_INTS_8:
-      case VAR_INTS:
-        assertEquals(source_1.getInt(i), source_1_merged.getInt(i));
-        break;
-      case FLOAT_32:
-      case FLOAT_64:
-        assertEquals(source_1.getFloat(i), source_1_merged.getFloat(i), 0.0d);
-        break;
-      default:
-        fail("unkonwn " + first);
-      }
-    }
-
-    for (int i = r_1.maxDoc(); i < merged.maxDoc(); i++) {
-      switch (second) {
-      case BYTES_FIXED_DEREF:
-      case BYTES_FIXED_STRAIGHT:
-      case BYTES_VAR_DEREF:
-      case BYTES_VAR_STRAIGHT:
-      case BYTES_FIXED_SORTED:
-      case BYTES_VAR_SORTED:
-        assertEquals(source_2.getBytes(i - r_1.maxDoc(), new BytesRef()),
-            source_2_merged.getBytes(i, new BytesRef()));
-        break;
-      case FIXED_INTS_16:
-      case FIXED_INTS_32:
-      case FIXED_INTS_64:
-      case FIXED_INTS_8:
-      case VAR_INTS:
-        assertEquals(source_2.getInt(i - r_1.maxDoc()),
-            source_2_merged.getInt(i));
-        break;
-      case FLOAT_32:
-      case FLOAT_64:
-        assertEquals(source_2.getFloat(i - r_1.maxDoc()),
-            source_2_merged.getFloat(i), 0.0d);
-        break;
-      default:
-        fail("unkonwn " + first);
-      }
-    }
-    // close resources
-    r_1.close();
-    r_2.close();
-    merged.close();
-    w_1.close(true);
-    w_2.close(true);
-    w.close(true);
-    d_1.close();
-    d_2.close();
-    target.close();
-  }
-
-  private IndexWriterConfig writerConfig(boolean useCompoundFile) {
-    final IndexWriterConfig cfg = newIndexWriterConfig(TEST_VERSION_CURRENT,
-        new MockAnalyzer(random()));
-    cfg.setMergePolicy(newLogMergePolicy(random()));
-    LogMergePolicy policy = new LogDocMergePolicy();
-    cfg.setMergePolicy(policy);
-    policy.setUseCompoundFile(useCompoundFile);
-    return cfg;
-  }
-
-  @SuppressWarnings("fallthrough")
-  public void runTestNumerics(IndexWriterConfig cfg, boolean withDeletions)
-      throws IOException {
-    Directory d = newDirectory();
-    IndexWriter w = new IndexWriter(d, cfg);
-    final int numValues = 50 + atLeast(10);
-    final List<Type> numVariantList = new ArrayList<Type>(NUMERICS);
-
-    // run in random order to test if fill works correctly during merges
-    Collections.shuffle(numVariantList, random());
-    for (Type val : numVariantList) {
-      FixedBitSet deleted = indexValues(w, numValues, val, numVariantList,
-          withDeletions, 7);
-      List<Closeable> closeables = new ArrayList<Closeable>();
-      DirectoryReader r = DirectoryReader.open(w, true);
-      final int numRemainingValues = numValues - deleted.cardinality();
-      final int base = r.numDocs() - numRemainingValues;
-      // for FIXED_INTS_8 we use value mod 128 - to enable testing in 
-      // one go we simply use numValues as the mod for all other INT types
-      int mod = numValues;
-      switch (val) {
-      case FIXED_INTS_8:
-        mod = 128;
-      case FIXED_INTS_16:
-      case FIXED_INTS_32:
-      case FIXED_INTS_64:
-      case VAR_INTS: {
-        DocValues intsReader = getDocValues(r, val.name());
-        assertNotNull(intsReader);
-
-        Source ints = getSource(intsReader);
-
-        for (int i = 0; i < base; i++) {
-          long value = ints.getInt(i);
-          assertEquals("index " + i, 0, value);
-        }
-
-        int expected = 0;
-        for (int i = base; i < r.numDocs(); i++, expected++) {
-          while (deleted.get(expected)) {
-            expected++;
-          }
-          assertEquals(val + " mod: " + mod + " index: " +  i, expected%mod, ints.getInt(i));
-        }
-      }
-        break;
-      case FLOAT_32:
-      case FLOAT_64: {
-        DocValues floatReader = getDocValues(r, val.name());
-        assertNotNull(floatReader);
-        Source floats = getSource(floatReader);
-        for (int i = 0; i < base; i++) {
-          double value = floats.getFloat(i);
-          assertEquals(val + " failed for doc: " + i + " base: " + base,
-              0.0d, value, 0.0d);
-        }
-        int expected = 0;
-        for (int i = base; i < r.numDocs(); i++, expected++) {
-          while (deleted.get(expected)) {
-            expected++;
-          }
-          assertEquals("index " + i, 2.0 * expected, floats.getFloat(i),
-              0.00001);
-        }
-      }
-        break;
-      default:
-        fail("unexpected value " + val);
-      }
-
-      closeables.add(r);
-      for (Closeable toClose : closeables) {
-        toClose.close();
-      }
-    }
-    w.close();
-    d.close();
-  }
-  
-  public void runTestIndexBytes(IndexWriterConfig cfg, boolean withDeletions)
-      throws IOException {
-    final Directory d = newDirectory();
-    IndexWriter w = new IndexWriter(d, cfg);
-    final List<Type> byteVariantList = new ArrayList<Type>(BYTES);
-    // run in random order to test if fill works correctly during merges
-    Collections.shuffle(byteVariantList, random());
-    final int numValues = 50 + atLeast(10);
-    for (Type byteIndexValue : byteVariantList) {
-      List<Closeable> closeables = new ArrayList<Closeable>();
-      final int bytesSize = 1 + atLeast(50);
-      FixedBitSet deleted = indexValues(w, numValues, byteIndexValue,
-          byteVariantList, withDeletions, bytesSize);
-      final DirectoryReader r = DirectoryReader.open(w, withDeletions);
-      assertEquals(0, r.numDeletedDocs());
-      final int numRemainingValues = numValues - deleted.cardinality();
-      final int base = r.numDocs() - numRemainingValues;
-      DocValues bytesReader = getDocValues(r, byteIndexValue.name());
-      assertNotNull("field " + byteIndexValue.name()
-          + " returned null reader - maybe merged failed", bytesReader);
-      Source bytes = getSource(bytesReader);
-      byte upto = 0;
-
-      // test the filled up slots for correctness
-      for (int i = 0; i < base; i++) {
-
-        BytesRef br = bytes.getBytes(i, new BytesRef());
-        String msg = " field: " + byteIndexValue.name() + " at index: " + i
-            + " base: " + base + " numDocs:" + r.numDocs();
-        switch (byteIndexValue) {
-        case BYTES_VAR_STRAIGHT:
-        case BYTES_FIXED_STRAIGHT:
-        case BYTES_FIXED_DEREF:
-        case BYTES_FIXED_SORTED:
-          // fixed straight returns bytesref with zero bytes all of fixed
-          // length
-          assertNotNull("expected none null - " + msg, br);
-          if (br.length != 0) {
-            assertEquals("expected zero bytes of length " + bytesSize + " - "
-                + msg + br.utf8ToString(), bytesSize, br.length);
-            for (int j = 0; j < br.length; j++) {
-              assertEquals("Byte at index " + j + " doesn't match - " + msg, 0,
-                  br.bytes[br.offset + j]);
-            }
-          }
-          break;
-        default:
-          assertNotNull("expected none null - " + msg, br);
-          assertEquals(byteIndexValue + "", 0, br.length);
-          // make sure we advance at least until base
-        }
-      }
-
-      // test the actual doc values added in this iteration
-      assertEquals(base + numRemainingValues, r.numDocs());
-      int v = 0;
-      for (int i = base; i < r.numDocs(); i++) {
-        String msg = " field: " + byteIndexValue.name() + " at index: " + i
-            + " base: " + base + " numDocs:" + r.numDocs() + " bytesSize: "
-            + bytesSize + " src: " + bytes;
-        while (withDeletions && deleted.get(v++)) {
-          upto += bytesSize;
-        }
-        BytesRef br = bytes.getBytes(i, new BytesRef());
-        assertTrue(msg, br.length > 0);
-        for (int j = 0; j < br.length; j++, upto++) {
-          if (!(br.bytes.length > br.offset + j))
-            br = bytes.getBytes(i, new BytesRef());
-          assertTrue("BytesRef index exceeded [" + msg + "] offset: "
-              + br.offset + " length: " + br.length + " index: "
-              + (br.offset + j), br.bytes.length > br.offset + j);
-          assertEquals("SourceRef Byte at index " + j + " doesn't match - "
-              + msg, upto, br.bytes[br.offset + j]);
-        }
-      }
-
-      // clean up
-      closeables.add(r);
-      for (Closeable toClose : closeables) {
-        toClose.close();
-      }
-    }
-
-    w.close();
-    d.close();
-  }
-  
-  public void testGetArrayNumerics() throws IOException {
-    Directory d = newDirectory();
-    IndexWriterConfig cfg = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
-    IndexWriter w = new IndexWriter(d, cfg);
-    final int numValues = 50 + atLeast(10);
-    final List<Type> numVariantList = new ArrayList<Type>(NUMERICS);
-    Collections.shuffle(numVariantList, random());
-    for (Type val : numVariantList) {
-      indexValues(w, numValues, val, numVariantList,
-          false, 7);
-      DirectoryReader r = DirectoryReader.open(w, true);
-      DocValues docValues = getDocValues(r, val.name());
-      assertNotNull(docValues);
-      // make sure we don't get a direct source since they don't support getArray()
-      Source source = docValues.getSource();
-      switch (source.getType()) {
-      case FIXED_INTS_8:
-      {
-        assertTrue(source.hasArray());
-        byte[] values = (byte[]) source.getArray();
-        for (int i = 0; i < numValues; i++) {
-          assertEquals((long)values[i], source.getInt(i));
-        }
-      }
-      break;
-      case FIXED_INTS_16:
-      {
-        assertTrue(source.hasArray());
-        short[] values = (short[]) source.getArray();
-        for (int i = 0; i < numValues; i++) {
-          assertEquals((long)values[i], source.getInt(i));
-        }
-      }
-      break;
-      case FIXED_INTS_32:
-      {
-        assertTrue(source.hasArray());
-        int[] values = (int[]) source.getArray();
-        for (int i = 0; i < numValues; i++) {
-          assertEquals((long)values[i], source.getInt(i));
-        }
-      }
-      break;
-      case FIXED_INTS_64:
-      {
-        assertTrue(source.hasArray());
-        long[] values = (long[]) source.getArray();
-        for (int i = 0; i < numValues; i++) {
-          assertEquals(values[i], source.getInt(i));
-        }
-      }
-      break;
-      case VAR_INTS:
-        assertFalse(source.hasArray());
-        break;
-      case FLOAT_32:
-      {
-        assertTrue(source.hasArray());
-        float[] values = (float[]) source.getArray();
-        for (int i = 0; i < numValues; i++) {
-          assertEquals((double)values[i], source.getFloat(i), 0.0d);
-        }
-      }
-      break;
-      case FLOAT_64:
-      {
-        assertTrue(source.hasArray());
-        double[] values = (double[]) source.getArray();
-        for (int i = 0; i < numValues; i++) {
-          assertEquals(values[i], source.getFloat(i), 0.0d);
-        }
-      }
-        break;
-      default:
-        fail("unexpected value " + source.getType());
-      }
-      r.close();
-    }
-    w.close();
-    d.close();
-  }
-  
-  public void testGetArrayBytes() throws IOException {
-    Directory d = newDirectory();
-    IndexWriterConfig cfg = newIndexWriterConfig(TEST_VERSION_CURRENT,
-        new MockAnalyzer(random()));
-    IndexWriter w = new IndexWriter(d, cfg);
-    final int numValues = 50 + atLeast(10);
-    // only single byte fixed straight supports getArray()
-    indexValues(w, numValues, Type.BYTES_FIXED_STRAIGHT, null, false, 1);
-    DirectoryReader r = DirectoryReader.open(w, true);
-    DocValues docValues = getDocValues(r, Type.BYTES_FIXED_STRAIGHT.name());
-    assertNotNull(docValues);
-    // make sure we don't get a direct source since they don't support
-    // getArray()
-    Source source = docValues.getSource();
-
-    switch (source.getType()) {
-    case BYTES_FIXED_STRAIGHT: {
-      BytesRef ref = new BytesRef();
-      if (source.hasArray()) {
-        byte[] values = (byte[]) source.getArray();
-        for (int i = 0; i < numValues; i++) {
-          source.getBytes(i, ref);
-          assertEquals(1, ref.length);
-          assertEquals(values[i], ref.bytes[ref.offset]);
-        }
-      }
-    }
-      break;
-    default:
-      fail("unexpected value " + source.getType());
-    }
-    r.close();
-    w.close();
-    d.close();
-  }
-
-  private DocValues getDocValues(IndexReader reader, String field) throws IOException {
-    final DocValues docValues = MultiDocValues.getDocValues(reader, field);
-    if (docValues == null) {
-      return docValues;
-    }
-    if (rarely()) {
-      docValues.setCache(new NotCachingSourceCache());
-    } else {
-      if (!(docValues.getCache() instanceof DirectSourceCache))  {
-        docValues.setCache(new DirectSourceCache());
-      }
-    }
-    return docValues;
-    }
-
-  @SuppressWarnings("fallthrough")
-  private Source getSource(DocValues values) throws IOException {
-    // getSource uses cache internally
-    switch(random().nextInt(5)) {
-    case 3:
-      return values.loadSource();
-    case 2:
-      return values.getDirectSource();
-    case 1:
-      if(values.getType() == Type.BYTES_VAR_SORTED || values.getType() == Type.BYTES_FIXED_SORTED) {
-        return values.getSource().asSortedSource();
-      }
-    default:
-      return values.getSource();
-    }
-  }
-
-
-  private static EnumSet<Type> BYTES = EnumSet.of(Type.BYTES_FIXED_DEREF,
-      Type.BYTES_FIXED_STRAIGHT, Type.BYTES_VAR_DEREF,
-      Type.BYTES_VAR_STRAIGHT, Type.BYTES_FIXED_SORTED, Type.BYTES_VAR_SORTED);
-
-  private static EnumSet<Type> NUMERICS = EnumSet.of(Type.VAR_INTS,
-      Type.FIXED_INTS_16, Type.FIXED_INTS_32,
-      Type.FIXED_INTS_64, 
-      Type.FIXED_INTS_8,
-      Type.FLOAT_32,
-      Type.FLOAT_64);
-
-  private FixedBitSet indexValues(IndexWriter w, int numValues, Type valueType,
-      List<Type> valueVarList, boolean withDeletions, int bytesSize)
-      throws IOException {
-    final boolean isNumeric = NUMERICS.contains(valueType);
-    FixedBitSet deleted = new FixedBitSet(numValues);
-    Document doc = new Document();
-    final Field valField;
-    if (isNumeric) {
-      switch (valueType) {
-      case VAR_INTS:
-        valField = new PackedLongDocValuesField(valueType.name(), (long) 0);
-        break;
-      case FIXED_INTS_16:
-        valField = new ShortDocValuesField(valueType.name(), (short) 0);
-        break;
-      case FIXED_INTS_32:
-        valField = new IntDocValuesField(valueType.name(), 0);
-        break;
-      case FIXED_INTS_64:
-        valField = new LongDocValuesField(valueType.name(), (long) 0);
-        break;
-      case FIXED_INTS_8:
-        valField = new ByteDocValuesField(valueType.name(), (byte) 0);
-        break;
-      case FLOAT_32:
-        valField = new FloatDocValuesField(valueType.name(), (float) 0);
-        break;
-      case FLOAT_64:
-        valField = new DoubleDocValuesField(valueType.name(), (double) 0);
-        break;
-      default:
-        valField = null;
-        fail("unhandled case");
-      }
-    } else {
-      switch (valueType) {
-      case BYTES_FIXED_STRAIGHT:
-        valField = new StraightBytesDocValuesField(valueType.name(), new BytesRef(), true);
-        break;
-      case BYTES_VAR_STRAIGHT:
-        valField = new StraightBytesDocValuesField(valueType.name(), new BytesRef(), false);
-        break;
-      case BYTES_FIXED_DEREF:
-        valField = new DerefBytesDocValuesField(valueType.name(), new BytesRef(), true);
-        break;
-      case BYTES_VAR_DEREF:
-        valField = new DerefBytesDocValuesField(valueType.name(), new BytesRef(), false);
-        break;
-      case BYTES_FIXED_SORTED:
-        valField = new SortedBytesDocValuesField(valueType.name(), new BytesRef(), true);
-        break;
-      case BYTES_VAR_SORTED:
-        valField = new SortedBytesDocValuesField(valueType.name(), new BytesRef(), false);
-        break;
-      default:
-        valField = null;
-        fail("unhandled case");
-      }
-    }
-    doc.add(valField);
-    final BytesRef bytesRef = new BytesRef();
-
-    final String idBase = valueType.name() + "_";
-    final byte[] b = new byte[bytesSize];
-    if (bytesRef != null) {
-      bytesRef.bytes = b;
-      bytesRef.length = b.length;
-      bytesRef.offset = 0;
-    }
-    byte upto = 0;
-    for (int i = 0; i < numValues; i++) {
-      if (isNumeric) {
-        switch (valueType) {
-        case VAR_INTS:
-          valField.setLongValue((long)i);
-          break;
-        case FIXED_INTS_16:
-          valField.setShortValue((short)i);
-          break;
-        case FIXED_INTS_32:
-          valField.setIntValue(i);
-          break;
-        case FIXED_INTS_64:
-          valField.setLongValue((long)i);
-          break;
-        case FIXED_INTS_8:
-          valField.setByteValue((byte)(0xFF & (i % 128)));
-          break;
-        case FLOAT_32:
-          valField.setFloatValue(2.0f * i);
-          break;
-        case FLOAT_64:
-          valField.setDoubleValue(2.0d * i);
-          break;
-        default:
-          fail("unexpected value " + valueType);
-        }
-      } else {
-        for (int j = 0; j < b.length; j++) {
-          b[j] = upto++;
-        }
-        if (bytesRef != null) {
-          valField.setBytesValue(bytesRef);
-        }
-      }
-      doc.removeFields("id");
-      doc.add(new StringField("id", idBase + i, Field.Store.YES));
-      w.addDocument(doc);
-
-      if (i % 7 == 0) {
-        if (withDeletions && random().nextBoolean()) {
-          Type val = valueVarList.get(random().nextInt(1 + valueVarList
-              .indexOf(valueType)));
-          final int randInt = val == valueType ? random().nextInt(1 + i) : random()
-              .nextInt(numValues);
-          w.deleteDocuments(new Term("id", val.name() + "_" + randInt));
-          if (val == valueType) {
-            deleted.set(randInt);
-          }
-        }
-        if (random().nextInt(10) == 0) {
-          w.commit();
-        }
-      }
-    }
-    w.commit();
-
-    // TODO test multi seg with deletions
-    if (withDeletions || random().nextBoolean()) {
-      w.forceMerge(1, true);
-    }
-    return deleted;
-  }
-
   public void testMultiValuedDocValuesField() throws Exception {
     Directory d = newDirectory();
     RandomIndexWriter w = new RandomIndexWriter(random(), d);
     Document doc = new Document();
-    Field f = new PackedLongDocValuesField("field", 17);
+    Field f = new NumericDocValuesField("field", 17);
     // Index doc values are single-valued so we should not
     // be able to add same field more than once:
     doc.add(f);
@@ -777,7 +106,7 @@ public class TestDocValuesIndexing exten
     w.forceMerge(1);
     DirectoryReader r = w.getReader();
     w.close();
-    assertEquals(17, getOnlySegmentReader(r).docValues("field").loadSource().getInt(0));
+    assertEquals(17, FieldCache.DEFAULT.getInts(getOnlySegmentReader(r), "field", false).get(0));
     r.close();
     d.close();
   }
@@ -789,8 +118,8 @@ public class TestDocValuesIndexing exten
     // Index doc values are single-valued so we should not
     // be able to add same field more than once:
     Field f;
-    doc.add(f = new PackedLongDocValuesField("field", 17));
-    doc.add(new FloatDocValuesField("field", 22.0f));
+    doc.add(f = new NumericDocValuesField("field", 17));
+    doc.add(new BinaryDocValuesField("field", new BytesRef("blah")));
     try {
       w.addDocument(doc);
       fail("didn't hit expected exception");
@@ -804,208 +133,35 @@ public class TestDocValuesIndexing exten
     w.forceMerge(1);
     DirectoryReader r = w.getReader();
     w.close();
-    assertEquals(17, getOnlySegmentReader(r).docValues("field").loadSource().getInt(0));
+    assertEquals(17, FieldCache.DEFAULT.getInts(getOnlySegmentReader(r), "field", false).get(0));
     r.close();
     d.close();
   }
-  
-  public void testSortedBytes() throws IOException {
-    Type[] types = new Type[] { Type.BYTES_FIXED_SORTED, Type.BYTES_VAR_SORTED };
-    for (Type type : types) {
-      boolean fixed = type == Type.BYTES_FIXED_SORTED;
-      final Directory d = newDirectory();
-      IndexWriterConfig cfg = newIndexWriterConfig(TEST_VERSION_CURRENT,
-          new MockAnalyzer(random()));
-      IndexWriter w = new IndexWriter(d, cfg);
-      int numDocs = atLeast(100);
-      BytesRefHash hash = new BytesRefHash();
-      Map<String, String> docToString = new HashMap<String, String>();
-      int len = 1 + random().nextInt(50);
-      for (int i = 0; i < numDocs; i++) {
-        Document doc = new Document();
-        doc.add(newTextField("id", "" + i, Field.Store.YES));
-        String string = fixed ? _TestUtil.randomFixedByteLengthUnicodeString(random(),
-            len) : _TestUtil.randomRealisticUnicodeString(random(), 1, len);
-        BytesRef br = new BytesRef(string);
-        doc.add(new SortedBytesDocValuesField("field", br, type == Type.BYTES_FIXED_SORTED));
-        hash.add(br);
-        docToString.put("" + i, string);
-        w.addDocument(doc);
-      }
-      if (rarely()) {
-        w.commit();
-      }
-      int numDocsNoValue = atLeast(10);
-      for (int i = 0; i < numDocsNoValue; i++) {
-        Document doc = new Document();
-        doc.add(newTextField("id", "noValue", Field.Store.YES));
-        w.addDocument(doc);
-      }
-      BytesRef bytesRef = new BytesRef(fixed ? len : 0);
-      bytesRef.offset = 0;
-      bytesRef.length = fixed ? len : 0;
-      hash.add(bytesRef); // add empty value for the gaps
-      if (rarely()) {
-        w.commit();
-      }
-      for (int i = 0; i < numDocs; i++) {
-        Document doc = new Document();
-        String id = "" + i + numDocs;
-        doc.add(newTextField("id", id, Field.Store.YES));
-        String string = fixed ? _TestUtil.randomFixedByteLengthUnicodeString(random(),
-            len) : _TestUtil.randomRealisticUnicodeString(random(), 1, len);
-        BytesRef br = new BytesRef(string);
-        hash.add(br);
-        docToString.put(id, string);
-        doc.add(new SortedBytesDocValuesField("field", br, type == Type.BYTES_FIXED_SORTED));
-        w.addDocument(doc);
-      }
-      w.commit();
-      IndexReader reader = w.getReader();
-      DocValues docValues = MultiDocValues.getDocValues(reader, "field");
-      Source source = getSource(docValues);
-      SortedSource asSortedSource = source.asSortedSource();
-      int[] sort = hash.sort(BytesRef.getUTF8SortedAsUnicodeComparator());
-      BytesRef expected = new BytesRef();
-      BytesRef actual = new BytesRef();
-      assertEquals(hash.size(), asSortedSource.getValueCount());
-      for (int i = 0; i < hash.size(); i++) {
-        hash.get(sort[i], expected);
-        asSortedSource.getByOrd(i, actual);
-        assertEquals(expected.utf8ToString(), actual.utf8ToString());
-        int ord = asSortedSource.getOrdByValue(expected, actual);
-        assertEquals(i, ord);
-      }
-      AtomicReader slowR = SlowCompositeReaderWrapper.wrap(reader);
-      Set<Entry<String, String>> entrySet = docToString.entrySet();
-
-      for (Entry<String, String> entry : entrySet) {
-        int docId = docId(slowR, new Term("id", entry.getKey()));
-        expected = new BytesRef(entry.getValue());
-        assertEquals(expected, asSortedSource.getBytes(docId, actual));
-      }
-
-      reader.close();
-      w.close();
-      d.close();
-    }
-  }
-  
-  public int docId(AtomicReader reader, Term term) throws IOException {
-    int docFreq = reader.docFreq(term);
-    assertEquals(1, docFreq);
-    DocsEnum termDocsEnum = reader.termDocsEnum(term);
-    int nextDoc = termDocsEnum.nextDoc();
-    assertEquals(DocIdSetIterator.NO_MORE_DOCS, termDocsEnum.nextDoc());
-    return nextDoc;
-  }
-
-  public void testWithThreads() throws Exception {
-    Random random = random();
-    final int NUM_DOCS = atLeast(100);
-    final Directory dir = newDirectory();
-    final RandomIndexWriter writer = new RandomIndexWriter(random, dir);
-    final boolean allowDups = random.nextBoolean();
-    final Set<String> seen = new HashSet<String>();
-    if (VERBOSE) {
-      System.out.println("TEST: NUM_DOCS=" + NUM_DOCS + " allowDups=" + allowDups);
-    }
-    int numDocs = 0;
-    final List<BytesRef> docValues = new ArrayList<BytesRef>();
-
-    // TODO: deletions
-    while (numDocs < NUM_DOCS) {
-      final String s;
-      if (random.nextBoolean()) {
-        s = _TestUtil.randomSimpleString(random);
-      } else {
-        s = _TestUtil.randomUnicodeString(random);
-      }
-      final BytesRef br = new BytesRef(s);
-
-      if (!allowDups) {
-        if (seen.contains(s)) {
-          continue;
-        }
-        seen.add(s);
-      }
-
-      if (VERBOSE) {
-        System.out.println("  " + numDocs + ": s=" + s);
-      }
-      
-      final Document doc = new Document();
-      doc.add(new SortedBytesDocValuesField("stringdv", br));
-      doc.add(new PackedLongDocValuesField("id", numDocs));
-      docValues.add(br);
-      writer.addDocument(doc);
-      numDocs++;
-
-      if (random.nextInt(40) == 17) {
-        // force flush
-        writer.getReader().close();
-      }
-    }
-
-    writer.forceMerge(1);
-    final DirectoryReader r = writer.getReader();
-    writer.close();
-    
-    final AtomicReader sr = getOnlySegmentReader(r);
-    final DocValues dv = sr.docValues("stringdv");
-    assertNotNull(dv);
-
-    final long END_TIME = System.currentTimeMillis() + (TEST_NIGHTLY ? 30 : 1);
-
-    final DocValues.Source docIDToID = sr.docValues("id").getSource();
-
-    final int NUM_THREADS = _TestUtil.nextInt(random(), 1, 10);
-    Thread[] threads = new Thread[NUM_THREADS];
-    for(int thread=0;thread<NUM_THREADS;thread++) {
-      threads[thread] = new Thread() {
-          @Override
-          public void run() {
-            Random random = random();            
-            final DocValues.Source stringDVSource;
-            final DocValues.Source stringDVDirectSource;
-            try {
-              stringDVSource = dv.getSource();
-              assertNotNull(stringDVSource);
-              stringDVDirectSource = dv.getDirectSource();
-              assertNotNull(stringDVDirectSource);
-            } catch (IOException ioe) {
-              throw new RuntimeException(ioe);
-            }
-            while(System.currentTimeMillis() < END_TIME) {
-              final DocValues.Source source;
-              if (random.nextBoolean()) {
-                source = stringDVSource;
-              } else {
-                source = stringDVDirectSource;
-              }
-
-              final DocValues.SortedSource sortedSource = source.asSortedSource();
-              assertNotNull(sortedSource);
-
-              final BytesRef scratch = new BytesRef();
-
-              for(int iter=0;iter<100;iter++) {
-                final int docID = random.nextInt(sr.maxDoc());
-                final BytesRef br = sortedSource.getBytes(docID, scratch);
-                assertEquals(docValues.get((int) docIDToID.getInt(docID)), br);
-              }
-            }
-          }
-        };
-      threads[thread].start();
-    }
 
-    for(Thread thread : threads) {
-      thread.join();
+  public void testDifferentTypedDocValuesField2() throws Exception {
+    Directory d = newDirectory();
+    RandomIndexWriter w = new RandomIndexWriter(random(), d);
+    Document doc = new Document();
+    // Index doc values are single-valued so we should not
+    // be able to add same field more than once:
+    Field f = new NumericDocValuesField("field", 17);
+    doc.add(f);
+    doc.add(new SortedDocValuesField("field", new BytesRef("hello")));
+    try {
+      w.addDocument(doc);
+      fail("didn't hit expected exception");
+    } catch (IllegalArgumentException iae) {
+      // expected
     }
-
+    doc = new Document();
+    doc.add(f);
+    w.addDocument(doc);
+    w.forceMerge(1);
+    DirectoryReader r = w.getReader();
+    assertEquals(17, getOnlySegmentReader(r).getNumericDocValues("field").get(0));
     r.close();
-    dir.close();
+    w.close();
+    d.close();
   }
 
   // LUCENE-3870
@@ -1017,20 +173,21 @@ public class TestDocValuesIndexing exten
     BytesRef b = new BytesRef();
     b.bytes = bytes;
     b.length = bytes.length;
-    doc.add(new DerefBytesDocValuesField("field", b));
+    doc.add(new SortedDocValuesField("field", b));
     w.addDocument(doc);
     bytes[0] = 1;
     w.addDocument(doc);
     w.forceMerge(1);
     DirectoryReader r = w.getReader();
-    Source s = getOnlySegmentReader(r).docValues("field").getSource();
+    BinaryDocValues s = FieldCache.DEFAULT.getTerms(getOnlySegmentReader(r), "field");
 
-    BytesRef bytes1 = s.getBytes(0, new BytesRef());
+    BytesRef bytes1 = new BytesRef();
+    s.get(0, bytes1);
     assertEquals(bytes.length, bytes1.length);
     bytes[0] = 0;
     assertEquals(b, bytes1);
     
-    bytes1 = s.getBytes(1, new BytesRef());
+    s.get(1, bytes1);
     assertEquals(bytes.length, bytes1.length);
     bytes[0] = 1;
     assertEquals(b, bytes1);
@@ -1038,24 +195,7 @@ public class TestDocValuesIndexing exten
     w.close();
     d.close();
   }
-  
-  public void testFixedLengthNotReallyFixed() throws IOException {
-    Directory d = newDirectory();
-    IndexWriter w = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
-    Document doc = new Document();
-    doc.add(new DerefBytesDocValuesField("foo", new BytesRef("bar"), true));
-    w.addDocument(doc);
-    doc = new Document();
-    doc.add(new DerefBytesDocValuesField("foo", new BytesRef("bazz"), true));
-    try {
-      w.addDocument(doc);
-    } catch (IllegalArgumentException expected) {
-      // expected
-    }
-    w.close();
-    d.close();
-  }
-  
+
   public void testDocValuesUnstored() throws IOException {
     Directory dir = newDirectory();
     IndexWriterConfig iwconfig = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
@@ -1063,7 +203,7 @@ public class TestDocValuesIndexing exten
     IndexWriter writer = new IndexWriter(dir, iwconfig);
     for (int i = 0; i < 50; i++) {
       Document doc = new Document();
-      doc.add(new PackedLongDocValuesField("dv", i));
+      doc.add(new NumericDocValuesField("dv", i));
       doc.add(new TextField("docId", "" + i, Field.Store.YES));
       writer.addDocument(doc);
     }
@@ -1072,10 +212,9 @@ public class TestDocValuesIndexing exten
     FieldInfos fi = slow.getFieldInfos();
     FieldInfo dvInfo = fi.fieldInfo("dv");
     assertTrue(dvInfo.hasDocValues());
-    DocValues dv = slow.docValues("dv");
-    Source source = dv.getDirectSource();
+    NumericDocValues dv = slow.getNumericDocValues("dv");
     for (int i = 0; i < 50; i++) {
-      assertEquals(i, source.getInt(i));
+      assertEquals(i, dv.get(i));
       StoredDocument d = slow.document(i);
       // cannot use d.get("dv") due to another bug!
       assertNull(d.getField("dv"));
@@ -1085,24 +224,530 @@ public class TestDocValuesIndexing exten
     writer.close();
     dir.close();
   }
+
+  // Same field in one document as different types:
+  public void testMixedTypesSameDocument() throws Exception {
+    Directory dir = newDirectory();
+    IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
+    Document doc = new Document();
+    doc.add(new NumericDocValuesField("foo", 0));
+    doc.add(new SortedDocValuesField("foo", new BytesRef("hello")));
+    try {
+      w.addDocument(doc);
+    } catch (IllegalArgumentException iae) {
+      // expected
+    }
+    w.close();
+    dir.close();
+  }
+
+  // Two documents with same field as different types:
+  public void testMixedTypesDifferentDocuments() throws Exception {
+    Directory dir = newDirectory();
+    IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
+    Document doc = new Document();
+    doc.add(new NumericDocValuesField("foo", 0));
+    w.addDocument(doc);
+
+    doc = new Document();
+    doc.add(new SortedDocValuesField("foo", new BytesRef("hello")));
+    try {
+      w.addDocument(doc);
+    } catch (IllegalArgumentException iae) {
+      // expected
+    }
+    w.close();
+    dir.close();
+  }
+  
+  public void testAddSortedTwice() throws IOException {
+    Analyzer analyzer = new MockAnalyzer(random());
+
+    Directory directory = newDirectory();
+    // we don't use RandomIndexWriter because it might add more docvalues than we expect !!!!1
+    IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
+    iwc.setMergePolicy(newLogMergePolicy());
+    IndexWriter iwriter = new IndexWriter(directory, iwc);
+    Document doc = new Document();
+    doc.add(new SortedDocValuesField("dv", new BytesRef("foo!")));
+    doc.add(new SortedDocValuesField("dv", new BytesRef("bar!")));
+    try {
+      iwriter.addDocument(doc);
+      fail("didn't hit expected exception");
+    } catch (IllegalArgumentException expected) {
+      // expected
+    }
+    
+    iwriter.close();
+    directory.close();
+  }
   
-  /**
-  *
-  */
- public static class NotCachingSourceCache extends SourceCache {
-   
-   @Override
-   public Source load(DocValues values) throws IOException {
-     return values.loadSource();
-   }
-   
-   @Override
-   public Source loadDirect(DocValues values) throws IOException {
-     return values.loadDirectSource();
-   }
-   
-   @Override
-   public void invalidate(DocValues values) {}
- }
- 
+  public void testAddBinaryTwice() throws IOException {
+    Analyzer analyzer = new MockAnalyzer(random());
+
+    Directory directory = newDirectory();
+    // we don't use RandomIndexWriter because it might add more docvalues than we expect !!!!1
+    IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
+    iwc.setMergePolicy(newLogMergePolicy());
+    IndexWriter iwriter = new IndexWriter(directory, iwc);
+    Document doc = new Document();
+    doc.add(new BinaryDocValuesField("dv", new BytesRef("foo!")));
+    doc.add(new BinaryDocValuesField("dv", new BytesRef("bar!")));
+    try {
+      iwriter.addDocument(doc);
+      fail("didn't hit expected exception");
+    } catch (IllegalArgumentException expected) {
+      // expected
+    }
+    
+    iwriter.close();
+    directory.close();
+  }
+  
+  public void testAddNumericTwice() throws IOException {
+    Analyzer analyzer = new MockAnalyzer(random());
+
+    Directory directory = newDirectory();
+    // we don't use RandomIndexWriter because it might add more docvalues than we expect !!!!1
+    IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
+    iwc.setMergePolicy(newLogMergePolicy());
+    IndexWriter iwriter = new IndexWriter(directory, iwc);
+    Document doc = new Document();
+    doc.add(new NumericDocValuesField("dv", 1));
+    doc.add(new NumericDocValuesField("dv", 2));
+    try {
+      iwriter.addDocument(doc);
+      fail("didn't hit expected exception");
+    } catch (IllegalArgumentException expected) {
+      // expected
+    }
+    
+    iwriter.close();
+    directory.close();
+  }
+  
+  public void testTooLargeBytes() throws IOException {
+    Analyzer analyzer = new MockAnalyzer(random());
+
+    Directory directory = newDirectory();
+    // we don't use RandomIndexWriter because it might add more docvalues than we expect !!!!1
+    IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
+    iwc.setMergePolicy(newLogMergePolicy());
+    IndexWriter iwriter = new IndexWriter(directory, iwc);
+    Document doc = new Document();
+    byte bytes[] = new byte[100000];
+    BytesRef b = new BytesRef(bytes);
+    random().nextBytes(bytes);
+    doc.add(new BinaryDocValuesField("dv", b));
+    try {
+      iwriter.addDocument(doc);
+      fail("did not get expected exception");
+    } catch (IllegalArgumentException expected) {
+      // expected
+    }
+    iwriter.close();
+
+    directory.close();
+  }
+  
+  public void testTooLargeSortedBytes() throws IOException {
+    Analyzer analyzer = new MockAnalyzer(random());
+
+    Directory directory = newDirectory();
+    // we don't use RandomIndexWriter because it might add more docvalues than we expect !!!!1
+    IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
+    iwc.setMergePolicy(newLogMergePolicy());
+    IndexWriter iwriter = new IndexWriter(directory, iwc);
+    Document doc = new Document();
+    byte bytes[] = new byte[100000];
+    BytesRef b = new BytesRef(bytes);
+    random().nextBytes(bytes);
+    doc.add(new SortedDocValuesField("dv", b));
+    try {
+      iwriter.addDocument(doc);
+      fail("did not get expected exception");
+    } catch (IllegalArgumentException expected) {
+      // expected
+    }
+    iwriter.close();
+    directory.close();
+  }
+
+  // Two documents across segments
+  public void testMixedTypesDifferentSegments() throws Exception {
+    Directory dir = newDirectory();
+    IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
+    Document doc = new Document();
+    doc.add(new NumericDocValuesField("foo", 0));
+    w.addDocument(doc);
+    w.commit();
+
+    doc = new Document();
+    doc.add(new SortedDocValuesField("foo", new BytesRef("hello")));
+    try {
+      w.addDocument(doc);
+    } catch (IllegalArgumentException iae) {
+      // expected
+    }
+    w.close();
+    dir.close();
+  }
+
+  // Add inconsistent document after deleteAll
+  public void testMixedTypesAfterDeleteAll() throws Exception {
+    Directory dir = newDirectory();
+    IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
+    Document doc = new Document();
+    doc.add(new NumericDocValuesField("foo", 0));
+    w.addDocument(doc);
+    w.deleteAll();
+
+    doc = new Document();
+    doc.add(new SortedDocValuesField("foo", new BytesRef("hello")));
+    w.addDocument(doc);
+    w.close();
+    dir.close();
+  }
+
+  // Add inconsistent document after reopening IW w/ create
+  public void testMixedTypesAfterReopenCreate() throws Exception {
+    Directory dir = newDirectory();
+    IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
+    Document doc = new Document();
+    doc.add(new NumericDocValuesField("foo", 0));
+    w.addDocument(doc);
+    w.close();
+
+    IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
+    iwc.setOpenMode(IndexWriterConfig.OpenMode.CREATE);
+    w = new IndexWriter(dir, iwc);
+    doc = new Document();
+    doc.add(new SortedDocValuesField("foo", new BytesRef("hello")));
+    w.addDocument(doc);
+    w.close();
+    dir.close();
+  }
+
+  // Two documents with same field as different types, added
+  // from separate threads:
+  public void testMixedTypesDifferentThreads() throws Exception {
+    Directory dir = newDirectory();
+    final IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
+
+    final CountDownLatch startingGun = new CountDownLatch(1);
+    final AtomicBoolean hitExc = new AtomicBoolean();
+    Thread[] threads = new Thread[3];
+    for(int i=0;i<3;i++) {
+      Field field;
+      if (i == 0) {
+        field = new SortedDocValuesField("foo", new BytesRef("hello"));
+      } else if (i == 1) {
+        field = new NumericDocValuesField("foo", 0);
+      } else {
+        field = new BinaryDocValuesField("foo", new BytesRef("bazz"));
+      }
+      final Document doc = new Document();
+      doc.add(field);
+
+      threads[i] = new Thread() {
+          @Override
+          public void run() {
+            try {
+              startingGun.await();
+              w.addDocument(doc);
+            } catch (IllegalArgumentException iae) {
+              // expected
+              hitExc.set(true);
+            } catch (Exception e) {
+              throw new RuntimeException(e);
+            }
+          }
+        };
+      threads[i].start();
+    }
+
+    startingGun.countDown();
+
+    for(Thread t : threads) {
+      t.join();
+    }
+    assertTrue(hitExc.get());
+    w.close();
+    dir.close();
+  }
+
+  // Adding documents via addIndexes
+  public void testMixedTypesViaAddIndexes() throws Exception {
+    Directory dir = newDirectory();
+    IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
+    Document doc = new Document();
+    doc.add(new NumericDocValuesField("foo", 0));
+    w.addDocument(doc);
+
+    // Make 2nd index w/ inconsistent field
+    Directory dir2 = newDirectory();
+    IndexWriter w2 = new IndexWriter(dir2, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
+    doc = new Document();
+    doc.add(new SortedDocValuesField("foo", new BytesRef("hello")));
+    w2.addDocument(doc);
+    w2.close();
+
+    try {
+      w.addIndexes(new Directory[] {dir2});
+    } catch (IllegalArgumentException iae) {
+      // expected
+    }
+
+    IndexReader r = DirectoryReader.open(dir2);
+    try {
+      w.addIndexes(new IndexReader[] {r});
+    } catch (IllegalArgumentException iae) {
+      // expected
+    }
+
+    r.close();
+    dir2.close();
+    w.close();
+    dir.close();
+  }
+
+  public void testIllegalTypeChange() throws Exception {
+    Directory dir = newDirectory();
+    IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
+    IndexWriter writer = new IndexWriter(dir, conf);
+    Document doc = new Document();
+    doc.add(new NumericDocValuesField("dv", 0L));
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new SortedDocValuesField("dv", new BytesRef("foo")));
+    try {
+      writer.addDocument(doc);
+      fail("did not hit exception");
+    } catch (IllegalArgumentException iae) {
+      // expected
+    }
+    writer.close();
+    dir.close();
+  }
+
+  public void testIllegalTypeChangeAcrossSegments() throws Exception {
+    Directory dir = newDirectory();
+    IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
+    IndexWriter writer = new IndexWriter(dir, conf);
+    Document doc = new Document();
+    doc.add(new NumericDocValuesField("dv", 0L));
+    writer.addDocument(doc);
+    writer.close();
+
+    writer = new IndexWriter(dir, conf);
+    doc = new Document();
+    doc.add(new SortedDocValuesField("dv", new BytesRef("foo")));
+    try {
+      writer.addDocument(doc);
+      fail("did not hit exception");
+    } catch (IllegalArgumentException iae) {
+      // expected
+    }
+    writer.close();
+    dir.close();
+  }
+
+  public void testTypeChangeAfterCloseAndDeleteAll() throws Exception {
+    Directory dir = newDirectory();
+    IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
+    IndexWriter writer = new IndexWriter(dir, conf);
+    Document doc = new Document();
+    doc.add(new NumericDocValuesField("dv", 0L));
+    writer.addDocument(doc);
+    writer.close();
+
+    writer = new IndexWriter(dir, conf);
+    writer.deleteAll();
+    doc = new Document();
+    doc.add(new SortedDocValuesField("dv", new BytesRef("foo")));
+    writer.addDocument(doc);
+    writer.close();
+    dir.close();
+  }
+
+  public void testTypeChangeAfterDeleteAll() throws Exception {
+    Directory dir = newDirectory();
+    IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
+    IndexWriter writer = new IndexWriter(dir, conf);
+    Document doc = new Document();
+    doc.add(new NumericDocValuesField("dv", 0L));
+    writer.addDocument(doc);
+    writer.deleteAll();
+    doc = new Document();
+    doc.add(new SortedDocValuesField("dv", new BytesRef("foo")));
+    writer.addDocument(doc);
+    writer.close();
+    dir.close();
+  }
+
+  public void testTypeChangeAfterCommitAndDeleteAll() throws Exception {
+    Directory dir = newDirectory();
+    IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
+    IndexWriter writer = new IndexWriter(dir, conf);
+    Document doc = new Document();
+    doc.add(new NumericDocValuesField("dv", 0L));
+    writer.addDocument(doc);
+    writer.commit();
+    writer.deleteAll();
+    doc = new Document();
+    doc.add(new SortedDocValuesField("dv", new BytesRef("foo")));
+    writer.addDocument(doc);
+    writer.close();
+    dir.close();
+  }
+
+  public void testTypeChangeAfterOpenCreate() throws Exception {
+    Directory dir = newDirectory();
+    IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
+    IndexWriter writer = new IndexWriter(dir, conf);
+    Document doc = new Document();
+    doc.add(new NumericDocValuesField("dv", 0L));
+    writer.addDocument(doc);
+    writer.close();
+    conf.setOpenMode(IndexWriterConfig.OpenMode.CREATE);
+    writer = new IndexWriter(dir, conf);
+    doc = new Document();
+    doc.add(new SortedDocValuesField("dv", new BytesRef("foo")));
+    writer.addDocument(doc);
+    writer.close();
+    dir.close();
+  }
+
+  public void testTypeChangeViaAddIndexes() throws Exception {
+    Directory dir = newDirectory();
+    IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
+    IndexWriter writer = new IndexWriter(dir, conf);
+    Document doc = new Document();
+    doc.add(new NumericDocValuesField("dv", 0L));
+    writer.addDocument(doc);
+    writer.close();
+
+    Directory dir2 = newDirectory();
+    writer = new IndexWriter(dir2, conf);
+    doc = new Document();
+    doc.add(new SortedDocValuesField("dv", new BytesRef("foo")));
+    writer.addDocument(doc);
+    try {
+      writer.addIndexes(dir);
+      fail("did not hit exception");
+    } catch (IllegalArgumentException iae) {
+      // expected
+    }
+    writer.close();
+
+    dir.close();
+    dir2.close();
+  }
+
+  public void testTypeChangeViaAddIndexesIR() throws Exception {
+    Directory dir = newDirectory();
+    IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
+    IndexWriter writer = new IndexWriter(dir, conf);
+    Document doc = new Document();
+    doc.add(new NumericDocValuesField("dv", 0L));
+    writer.addDocument(doc);
+    writer.close();
+
+    Directory dir2 = newDirectory();
+    writer = new IndexWriter(dir2, conf);
+    doc = new Document();
+    doc.add(new SortedDocValuesField("dv", new BytesRef("foo")));
+    writer.addDocument(doc);
+    IndexReader[] readers = new IndexReader[] {DirectoryReader.open(dir)};
+    try {
+      writer.addIndexes(readers);
+      fail("did not hit exception");
+    } catch (IllegalArgumentException iae) {
+      // expected
+    }
+    readers[0].close();
+    writer.close();
+
+    dir.close();
+    dir2.close();
+  }
+
+  public void testTypeChangeViaAddIndexes2() throws Exception {
+    Directory dir = newDirectory();
+    IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
+    IndexWriter writer = new IndexWriter(dir, conf);
+    Document doc = new Document();
+    doc.add(new NumericDocValuesField("dv", 0L));
+    writer.addDocument(doc);
+    writer.close();
+
+    Directory dir2 = newDirectory();
+    writer = new IndexWriter(dir2, conf);
+    writer.addIndexes(dir);
+    doc = new Document();
+    doc.add(new SortedDocValuesField("dv", new BytesRef("foo")));
+    try {
+      writer.addDocument(doc);
+      fail("did not hit exception");
+    } catch (IllegalArgumentException iae) {
+      // expected
+    }
+    writer.close();
+    dir2.close();
+    dir.close();
+  }
+
+  public void testTypeChangeViaAddIndexesIR2() throws Exception {
+    Directory dir = newDirectory();
+    IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
+    IndexWriter writer = new IndexWriter(dir, conf);
+    Document doc = new Document();
+    doc.add(new NumericDocValuesField("dv", 0L));
+    writer.addDocument(doc);
+    writer.close();
+
+    Directory dir2 = newDirectory();
+    writer = new IndexWriter(dir2, conf);
+    IndexReader[] readers = new IndexReader[] {DirectoryReader.open(dir)};
+    writer.addIndexes(readers);
+    readers[0].close();
+    doc = new Document();
+    doc.add(new SortedDocValuesField("dv", new BytesRef("foo")));
+    try {
+      writer.addDocument(doc);
+      fail("did not hit exception");
+    } catch (IllegalArgumentException iae) {
+      // expected
+    }
+    writer.close();
+    dir2.close();
+    dir.close();
+  }
+
+  public void testDocsWithField() throws Exception {
+    Directory dir = newDirectory();
+    IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
+    IndexWriter writer = new IndexWriter(dir, conf);
+    Document doc = new Document();
+    doc.add(new NumericDocValuesField("dv", 0L));
+    writer.addDocument(doc);
+
+    doc = new Document();
+    doc.add(new TextField("dv", "some text", Field.Store.NO));
+    doc.add(new NumericDocValuesField("dv", 0L));
+    writer.addDocument(doc);
+    
+    DirectoryReader r = writer.getReader();
+    writer.close();
+
+    AtomicReader subR = r.leaves().get(0).reader();
+    assertEquals(2, subR.numDocs());
+
+    Bits bits = FieldCache.DEFAULT.getDocsWithField(subR, "dv");
+    assertTrue(bits.get(0));
+    assertTrue(bits.get(1));
+    r.close();
+    dir.close();
+  }
+
 }

Modified: lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDocumentWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDocumentWriter.java?rev=1443717&r1=1443716&r2=1443717&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDocumentWriter.java (original)
+++ lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDocumentWriter.java Thu Feb  7 20:48:21 2013
@@ -97,7 +97,7 @@ public class TestDocumentWriter extends 
     // omitNorms is true
     for (FieldInfo fi : reader.getFieldInfos()) {
       if (fi.isIndexed()) {
-        assertTrue(fi.omitsNorms() == (reader.normValues(fi.name) == null));
+        assertTrue(fi.omitsNorms() == (reader.getNormValues(fi.name) == null));
       }
     }
     reader.close();

Modified: lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDuelingCodecs.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDuelingCodecs.java?rev=1443717&r1=1443716&r2=1443717&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDuelingCodecs.java (original)
+++ lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDuelingCodecs.java Thu Feb  7 20:48:21 2013
@@ -65,12 +65,9 @@ public class TestDuelingCodecs extends L
     // as this gives the best overall coverage. when we have more
     // codecs we should probably pick 2 from Codec.availableCodecs()
     
-    // TODO: it would also be nice to support preflex, but it doesn't
-    // support a lot of the current feature set (docvalues, statistics)
-    // so this would make assertEquals complicated.
-
     leftCodec = Codec.forName("SimpleText");
     rightCodec = new RandomCodec(random());
+
     leftDir = newDirectory();
     rightDir = newDirectory();
 
@@ -110,10 +107,19 @@ public class TestDuelingCodecs extends L
   
   @Override
   public void tearDown() throws Exception {
-    leftReader.close();
-    rightReader.close();   
-    leftDir.close();
-    rightDir.close();
+    if (leftReader != null) {
+      leftReader.close();
+    }
+    if (rightReader != null) {
+      rightReader.close();   
+    }
+
+    if (leftDir != null) {
+      leftDir.close();
+    }
+    if (rightDir != null) {
+      rightDir.close();
+    }
     
     super.tearDown();
   }
@@ -536,13 +542,13 @@ public class TestDuelingCodecs extends L
     }
     
     for (String field : leftFields) {
-      DocValues leftNorms = MultiDocValues.getNormDocValues(leftReader, field);
-      DocValues rightNorms = MultiDocValues.getNormDocValues(rightReader, field);
+      NumericDocValues leftNorms = MultiDocValues.getNormValues(leftReader, field);
+      NumericDocValues rightNorms = MultiDocValues.getNormValues(rightReader, field);
       if (leftNorms != null && rightNorms != null) {
-        assertDocValues(leftNorms, rightNorms);
+        assertDocValues(leftReader.maxDoc(), leftNorms, rightNorms);
       } else {
-        assertNull(leftNorms);
-        assertNull(rightNorms);
+        assertNull(info, leftNorms);
+        assertNull(info, rightNorms);
       }
     }
   }
@@ -618,68 +624,74 @@ public class TestDuelingCodecs extends L
    * checks that docvalues across all fields are equivalent
    */
   public void assertDocValues(IndexReader leftReader, IndexReader rightReader) throws Exception {
-    Set<String> leftValues = getDVFields(leftReader);
-    Set<String> rightValues = getDVFields(rightReader);
-    assertEquals(info, leftValues, rightValues);
-
-    for (String field : leftValues) {
-      DocValues leftDocValues = MultiDocValues.getDocValues(leftReader, field);
-      DocValues rightDocValues = MultiDocValues.getDocValues(rightReader, field);
-      if (leftDocValues != null && rightDocValues != null) {
-        assertDocValues(leftDocValues, rightDocValues);
-      } else {
-        assertNull(leftDocValues);
-        assertNull(rightDocValues);
+    Set<String> leftFields = getDVFields(leftReader);
+    Set<String> rightFields = getDVFields(rightReader);
+    assertEquals(info, leftFields, rightFields);
+
+    for (String field : leftFields) {
+      // TODO: clean this up... very messy
+      {
+        NumericDocValues leftValues = MultiDocValues.getNumericValues(leftReader, field);
+        NumericDocValues rightValues = MultiDocValues.getNumericValues(rightReader, field);
+        if (leftValues != null && rightValues != null) {
+          assertDocValues(leftReader.maxDoc(), leftValues, rightValues);
+        } else {
+          assertNull(info, leftValues);
+          assertNull(info, rightValues);
+        }
+      }
+
+      {
+        BinaryDocValues leftValues = MultiDocValues.getBinaryValues(leftReader, field);
+        BinaryDocValues rightValues = MultiDocValues.getBinaryValues(rightReader, field);
+        if (leftValues != null && rightValues != null) {
+          BytesRef scratchLeft = new BytesRef();
+          BytesRef scratchRight = new BytesRef();
+          for(int docID=0;docID<leftReader.maxDoc();docID++) {
+            leftValues.get(docID, scratchLeft);
+            rightValues.get(docID, scratchRight);
+            assertEquals(info, scratchLeft, scratchRight);
+          }
+        } else {
+          assertNull(info, leftValues);
+          assertNull(info, rightValues);
+        }
+      }
+      
+      {
+        SortedDocValues leftValues = MultiDocValues.getSortedValues(leftReader, field);
+        SortedDocValues rightValues = MultiDocValues.getSortedValues(rightReader, field);
+        if (leftValues != null && rightValues != null) {
+          // numOrds
+          assertEquals(info, leftValues.getValueCount(), rightValues.getValueCount());
+          // ords
+          BytesRef scratchLeft = new BytesRef();
+          BytesRef scratchRight = new BytesRef();
+          for (int i = 0; i < leftValues.getValueCount(); i++) {
+            leftValues.lookupOrd(i, scratchLeft);
+            rightValues.lookupOrd(i, scratchRight);
+            assertEquals(info, scratchLeft, scratchRight);
+          }
+          // bytes
+          for(int docID=0;docID<leftReader.maxDoc();docID++) {
+            leftValues.get(docID, scratchLeft);
+            rightValues.get(docID, scratchRight);
+            assertEquals(info, scratchLeft, scratchRight);
+          }
+        } else {
+          assertNull(info, leftValues);
+          assertNull(info, rightValues);
+        }
       }
     }
   }
   
-  public void assertDocValues(DocValues leftDocValues, DocValues rightDocValues) throws Exception {
+  public void assertDocValues(int num, NumericDocValues leftDocValues, NumericDocValues rightDocValues) throws Exception {
     assertNotNull(info, leftDocValues);
     assertNotNull(info, rightDocValues);
-    assertEquals(info, leftDocValues.getType(), rightDocValues.getType());
-    assertEquals(info, leftDocValues.getValueSize(), rightDocValues.getValueSize());
-    assertDocValuesSource(leftDocValues.getDirectSource(), rightDocValues.getDirectSource());
-    assertDocValuesSource(leftDocValues.getSource(), rightDocValues.getSource());
-  }
-  
-  /**
-   * checks source API
-   */
-  public void assertDocValuesSource(DocValues.Source left, DocValues.Source right) throws Exception {
-    DocValues.Type leftType = left.getType();
-    assertEquals(info, leftType, right.getType());
-    switch(leftType) {
-      case VAR_INTS:
-      case FIXED_INTS_8:
-      case FIXED_INTS_16:
-      case FIXED_INTS_32:
-      case FIXED_INTS_64:
-        for (int i = 0; i < leftReader.maxDoc(); i++) {
-          assertEquals(info, left.getInt(i), right.getInt(i));
-        }
-        break;
-      case FLOAT_32:
-      case FLOAT_64:
-        for (int i = 0; i < leftReader.maxDoc(); i++) {
-          assertEquals(info, left.getFloat(i), right.getFloat(i), 0F);
-        }
-        break;
-      case BYTES_FIXED_STRAIGHT:
-      case BYTES_FIXED_DEREF:
-      case BYTES_VAR_STRAIGHT:
-      case BYTES_VAR_DEREF:
-        BytesRef b1 = new BytesRef();
-        BytesRef b2 = new BytesRef();
-        for (int i = 0; i < leftReader.maxDoc(); i++) {
-          left.getBytes(i, b1);
-          right.getBytes(i, b2);
-          assertEquals(info, b1, b2);
-        }
-        break;
-      // TODO: can we test these?
-      case BYTES_VAR_SORTED:
-      case BYTES_FIXED_SORTED:
+    for(int docID=0;docID<num;docID++) {
+      assertEquals(leftDocValues.get(docID),
+                   rightDocValues.get(docID));
     }
   }
   

Modified: lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestFieldsReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestFieldsReader.java?rev=1443717&r1=1443716&r2=1443717&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestFieldsReader.java (original)
+++ lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestFieldsReader.java Thu Feb  7 20:48:21 2013
@@ -48,11 +48,12 @@ import org.junit.BeforeClass;
 
 public class TestFieldsReader extends LuceneTestCase {
   private static Directory dir;
-  private static Document testDoc = new Document();
+  private static Document testDoc;
   private static FieldInfos.Builder fieldInfos = null;
 
   @BeforeClass
   public static void beforeClass() throws Exception {
+    testDoc = new Document();
     fieldInfos = new FieldInfos.Builder();
     DocHelper.setupDoc(testDoc);
     for (IndexableField field : testDoc.getFields()) {
@@ -291,12 +292,12 @@ public class TestFieldsReader extends Lu
 
     for(AtomicReaderContext ctx : r.leaves()) {
       final AtomicReader sub = ctx.reader();
-      final int[] ids = FieldCache.DEFAULT.getInts(sub, "id", false);
+      final FieldCache.Ints ids = FieldCache.DEFAULT.getInts(sub, "id", false);
       for(int docID=0;docID<sub.numDocs();docID++) {
         final StoredDocument doc = sub.document(docID);
         final Field f = (Field) doc.getField("nf");
         assertTrue("got f=" + f, f instanceof StoredField);
-        assertEquals(answers[ids[docID]], f.numericValue());
+        assertEquals(answers[ids.get(docID)], f.numericValue());
       }
     }
     r.close();

Modified: lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java?rev=1443717&r1=1443716&r2=1443717&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java (original)
+++ lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java Thu Feb  7 20:48:21 2013
@@ -34,9 +34,12 @@ import org.apache.lucene.analysis.tokena
 import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
 import org.apache.lucene.codecs.Codec;
 import org.apache.lucene.codecs.simpletext.SimpleTextCodec;
+import org.apache.lucene.document.BinaryDocValuesField;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.Field;
 import org.apache.lucene.document.FieldType;
+import org.apache.lucene.document.NumericDocValuesField;
+import org.apache.lucene.document.SortedDocValuesField;
 import org.apache.lucene.document.StoredField;
 import org.apache.lucene.document.StringField;
 import org.apache.lucene.document.TextField;
@@ -1017,10 +1020,16 @@ public class TestIndexWriter extends Luc
       Document doc = new Document();
       doc.add(newStringField(random, "id", "500", Field.Store.NO));
       doc.add(newField(random, "field", "some prepackaged text contents", storedTextType));
+      doc.add(new BinaryDocValuesField("binarydv", new BytesRef("500")));
+      doc.add(new NumericDocValuesField("numericdv", 500));
+      doc.add(new SortedDocValuesField("sorteddv", new BytesRef("500")));
       w.addDocument(doc);
       doc = new Document();
       doc.add(newStringField(random, "id", "501", Field.Store.NO));
       doc.add(newField(random, "field", "some more contents", storedTextType));
+      doc.add(new BinaryDocValuesField("binarydv", new BytesRef("501")));
+      doc.add(new NumericDocValuesField("numericdv", 501));
+      doc.add(new SortedDocValuesField("sorteddv", new BytesRef("501")));
       w.addDocument(doc);
       w.deleteDocuments(new Term("id", "500"));
       w.close();
@@ -1045,10 +1054,19 @@ public class TestIndexWriter extends Luc
 
             Document doc = new Document();
             Field idField = newStringField(random, "id", "", Field.Store.NO);
+            Field binaryDVField = new BinaryDocValuesField("binarydv", new BytesRef());
+            Field numericDVField = new NumericDocValuesField("numericdv", 0);
+            Field sortedDVField = new SortedDocValuesField("sorteddv", new BytesRef());
             doc.add(idField);
             doc.add(newField(random, "field", "some text contents", storedTextType));
+            doc.add(binaryDVField);
+            doc.add(numericDVField);
+            doc.add(sortedDVField);
             for(int i=0;i<100;i++) {
               idField.setStringValue(Integer.toString(i));
+              binaryDVField.setBytesValue(new BytesRef(idField.stringValue()));
+              numericDVField.setLongValue(i);
+              sortedDVField.setBytesValue(new BytesRef(idField.stringValue()));
               int action = random.nextInt(100);
               if (action == 17) {
                 w.addIndexes(adder);
@@ -1694,10 +1712,11 @@ public class TestIndexWriter extends Luc
     w.close();
     assertEquals(1, reader.docFreq(new Term("content", bigTerm)));
 
-    FieldCache.DocTermsIndex dti = FieldCache.DEFAULT.getTermsIndex(SlowCompositeReaderWrapper.wrap(reader), "content", random().nextFloat() * PackedInts.FAST);
-    assertEquals(5, dti.numOrd());                // +1 for null ord
-    assertEquals(4, dti.size());
-    assertEquals(bigTermBytesRef, dti.lookup(3, new BytesRef()));
+    SortedDocValues dti = FieldCache.DEFAULT.getTermsIndex(SlowCompositeReaderWrapper.wrap(reader), "content", random().nextFloat() * PackedInts.FAST);
+    assertEquals(4, dti.getValueCount());
+    BytesRef br = new BytesRef();
+    dti.lookupOrd(2, br);
+    assertEquals(bigTermBytesRef, br);
     reader.close();
     dir.close();
   }

Modified: lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterDelete.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterDelete.java?rev=1443717&r1=1443716&r2=1443717&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterDelete.java (original)
+++ lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterDelete.java Thu Feb  7 20:48:21 2013
@@ -32,6 +32,7 @@ import org.apache.lucene.analysis.*;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.Field;
 import org.apache.lucene.document.FieldType;
+import org.apache.lucene.document.NumericDocValuesField;
 import org.apache.lucene.document.StringField;
 import org.apache.lucene.search.IndexSearcher;
 import org.apache.lucene.search.ScoreDoc;
@@ -389,6 +390,7 @@ public class TestIndexWriterDelete exten
     doc.add(newTextField("content", "aaa", Field.Store.NO));
     doc.add(newStringField("id", String.valueOf(id), Field.Store.YES));
     doc.add(newStringField("value", String.valueOf(value), Field.Store.NO));
+    doc.add(new NumericDocValuesField("dv", value));
     modifier.updateDocument(new Term("id", String.valueOf(id)), doc);
   }
 
@@ -399,6 +401,7 @@ public class TestIndexWriterDelete exten
     doc.add(newTextField("content", "aaa", Field.Store.NO));
     doc.add(newStringField("id", String.valueOf(id), Field.Store.YES));
     doc.add(newStringField("value", String.valueOf(value), Field.Store.NO));
+    doc.add(new NumericDocValuesField("dv", value));
     modifier.addDocument(doc);
   }
 
@@ -437,6 +440,7 @@ public class TestIndexWriterDelete exten
       Document d = new Document();
       d.add(newStringField("id", Integer.toString(i), Field.Store.YES));
       d.add(newTextField("content", "aaa " + i, Field.Store.NO));
+      d.add(new NumericDocValuesField("dv", i));
       writer.addDocument(d);
     }
     writer.close();
@@ -515,6 +519,7 @@ public class TestIndexWriterDelete exten
                 Document d = new Document();
                 d.add(newStringField("id", Integer.toString(i), Field.Store.YES));
                 d.add(newTextField("content", "bbb " + i, Field.Store.NO));
+                d.add(new NumericDocValuesField("dv", i));
                 modifier.updateDocument(new Term("id", Integer.toString(docId)), d);
               } else { // deletes
                 modifier.deleteDocuments(new Term("id", Integer.toString(docId)));

Modified: lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java?rev=1443717&r1=1443716&r2=1443717&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java (original)
+++ lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java Thu Feb  7 20:48:21 2013
@@ -27,9 +27,12 @@ import java.util.Random;
 import java.util.concurrent.atomic.AtomicBoolean;
 
 import org.apache.lucene.analysis.*;
+import org.apache.lucene.document.BinaryDocValuesField;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.Field;
 import org.apache.lucene.document.FieldType;
+import org.apache.lucene.document.NumericDocValuesField;
+import org.apache.lucene.document.SortedDocValuesField;
 import org.apache.lucene.document.StringField;
 import org.apache.lucene.document.TextField;
 import org.apache.lucene.index.IndexWriterConfig.OpenMode;
@@ -137,6 +140,9 @@ public class TestIndexWriterExceptions e
 
       doc.add(newTextField(r, "content4", "aaa bbb ccc ddd", Field.Store.NO));
       doc.add(newStringField(r, "content5", "aaa bbb ccc ddd", Field.Store.NO));
+      doc.add(new NumericDocValuesField("numericdv", 5));
+      doc.add(new BinaryDocValuesField("binarydv", new BytesRef("hello")));
+      doc.add(new SortedDocValuesField("sorteddv", new BytesRef("world")));
 
       doc.add(newField(r, "content7", "aaa bbb ccc ddd", DocCopyIterator.custom4));