You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by rm...@apache.org on 2013/02/08 04:26:21 UTC

svn commit: r1443834 [8/16] - in /lucene/dev/branches/branch_4x: ./ dev-tools/ lucene/ lucene/analysis/ lucene/analysis/icu/src/java/org/apache/lucene/collation/ lucene/analysis/icu/src/test/org/apache/lucene/collation/ lucene/backwards/ lucene/benchma...

Modified: lucene/dev/branches/branch_4x/lucene/core/src/test/org/apache/lucene/index/TestBackwardsCompatibility3x.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/lucene/core/src/test/org/apache/lucene/index/TestBackwardsCompatibility3x.java?rev=1443834&r1=1443833&r2=1443834&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/lucene/core/src/test/org/apache/lucene/index/TestBackwardsCompatibility3x.java (original)
+++ lucene/dev/branches/branch_4x/lucene/core/src/test/org/apache/lucene/index/TestBackwardsCompatibility3x.java Fri Feb  8 03:26:14 2013
@@ -30,24 +30,18 @@ import java.util.Map;
 import java.util.Random;
 
 import org.apache.lucene.analysis.MockAnalyzer;
-import org.apache.lucene.document.ByteDocValuesField;
-import org.apache.lucene.document.DerefBytesDocValuesField;
+import org.apache.lucene.document.BinaryDocValuesField;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.DoubleDocValuesField;
 import org.apache.lucene.document.Field;
 import org.apache.lucene.document.FieldType;
 import org.apache.lucene.document.FloatDocValuesField;
-import org.apache.lucene.document.IntDocValuesField;
 import org.apache.lucene.document.IntField;
-import org.apache.lucene.document.LongDocValuesField;
 import org.apache.lucene.document.LongField;
-import org.apache.lucene.document.PackedLongDocValuesField;
-import org.apache.lucene.document.ShortDocValuesField;
-import org.apache.lucene.document.SortedBytesDocValuesField;
-import org.apache.lucene.document.StraightBytesDocValuesField;
+import org.apache.lucene.document.NumericDocValuesField;
+import org.apache.lucene.document.SortedDocValuesField;
 import org.apache.lucene.document.StringField;
 import org.apache.lucene.document.TextField;
-import org.apache.lucene.index.DocValues.Source;
 import org.apache.lucene.index.FieldInfo.IndexOptions;
 import org.apache.lucene.index.IndexWriterConfig.OpenMode;
 import org.apache.lucene.search.DocIdSetIterator;
@@ -408,23 +402,23 @@ public class TestBackwardsCompatibility3
     
     if (is40Index) {
       // check docvalues fields
-      Source dvByte = MultiDocValues.getDocValues(reader, "dvByte").getSource();
-      Source dvBytesDerefFixed = MultiDocValues.getDocValues(reader, "dvBytesDerefFixed").getSource();
-      Source dvBytesDerefVar = MultiDocValues.getDocValues(reader, "dvBytesDerefVar").getSource();
-      Source dvBytesSortedFixed = MultiDocValues.getDocValues(reader, "dvBytesSortedFixed").getSource();
-      Source dvBytesSortedVar = MultiDocValues.getDocValues(reader, "dvBytesSortedVar").getSource();
-      Source dvBytesStraightFixed = MultiDocValues.getDocValues(reader, "dvBytesStraightFixed").getSource();
-      Source dvBytesStraightVar = MultiDocValues.getDocValues(reader, "dvBytesStraightVar").getSource();
-      Source dvDouble = MultiDocValues.getDocValues(reader, "dvDouble").getSource();
-      Source dvFloat = MultiDocValues.getDocValues(reader, "dvFloat").getSource();
-      Source dvInt = MultiDocValues.getDocValues(reader, "dvInt").getSource();
-      Source dvLong = MultiDocValues.getDocValues(reader, "dvLong").getSource();
-      Source dvPacked = MultiDocValues.getDocValues(reader, "dvPacked").getSource();
-      Source dvShort = MultiDocValues.getDocValues(reader, "dvShort").getSource();
+      NumericDocValues dvByte = MultiDocValues.getNumericValues(reader, "dvByte");
+      BinaryDocValues dvBytesDerefFixed = MultiDocValues.getBinaryValues(reader, "dvBytesDerefFixed");
+      BinaryDocValues dvBytesDerefVar = MultiDocValues.getBinaryValues(reader, "dvBytesDerefVar");
+      SortedDocValues dvBytesSortedFixed = MultiDocValues.getSortedValues(reader, "dvBytesSortedFixed");
+      SortedDocValues dvBytesSortedVar = MultiDocValues.getSortedValues(reader, "dvBytesSortedVar");
+      BinaryDocValues dvBytesStraightFixed = MultiDocValues.getBinaryValues(reader, "dvBytesStraightFixed");
+      BinaryDocValues dvBytesStraightVar = MultiDocValues.getBinaryValues(reader, "dvBytesStraightVar");
+      NumericDocValues dvDouble = MultiDocValues.getNumericValues(reader, "dvDouble");
+      NumericDocValues dvFloat = MultiDocValues.getNumericValues(reader, "dvFloat");
+      NumericDocValues dvInt = MultiDocValues.getNumericValues(reader, "dvInt");
+      NumericDocValues dvLong = MultiDocValues.getNumericValues(reader, "dvLong");
+      NumericDocValues dvPacked = MultiDocValues.getNumericValues(reader, "dvPacked");
+      NumericDocValues dvShort = MultiDocValues.getNumericValues(reader, "dvShort");
       
       for (int i=0;i<35;i++) {
         int id = Integer.parseInt(reader.document(i).get("id"));
-        assertEquals((byte)id, dvByte.getInt(i));
+        assertEquals(id, dvByte.get(i));
         
         byte bytes[] = new byte[] {
             (byte)(id >>> 24), (byte)(id >>> 16),(byte)(id >>> 8),(byte)id
@@ -432,19 +426,25 @@ public class TestBackwardsCompatibility3
         BytesRef expectedRef = new BytesRef(bytes);
         BytesRef scratch = new BytesRef();
         
-        assertEquals(expectedRef, dvBytesDerefFixed.getBytes(i, scratch));
-        assertEquals(expectedRef, dvBytesDerefVar.getBytes(i, scratch));
-        assertEquals(expectedRef, dvBytesSortedFixed.getBytes(i, scratch));
-        assertEquals(expectedRef, dvBytesSortedVar.getBytes(i, scratch));
-        assertEquals(expectedRef, dvBytesStraightFixed.getBytes(i, scratch));
-        assertEquals(expectedRef, dvBytesStraightVar.getBytes(i, scratch));
+        dvBytesDerefFixed.get(i, scratch);
+        assertEquals(expectedRef, scratch);
+        dvBytesDerefVar.get(i, scratch);
+        assertEquals(expectedRef, scratch);
+        dvBytesSortedFixed.get(i, scratch);
+        assertEquals(expectedRef, scratch);
+        dvBytesSortedVar.get(i, scratch);
+        assertEquals(expectedRef, scratch);
+        dvBytesStraightFixed.get(i, scratch);
+        assertEquals(expectedRef, scratch);
+        dvBytesStraightVar.get(i, scratch);
+        assertEquals(expectedRef, scratch);
         
-        assertEquals((double)id, dvDouble.getFloat(i), 0D);
-        assertEquals((float)id, dvFloat.getFloat(i), 0F);
-        assertEquals(id, dvInt.getInt(i));
-        assertEquals(id, dvLong.getInt(i));
-        assertEquals(id, dvPacked.getInt(i));
-        assertEquals(id, dvShort.getInt(i));
+        assertEquals((double)id, Double.longBitsToDouble(dvDouble.get(i)), 0D);
+        assertEquals((float)id, Float.intBitsToFloat((int)dvFloat.get(i)), 0F);
+        assertEquals(id, dvInt.get(i));
+        assertEquals(id, dvLong.get(i));
+        assertEquals(id, dvPacked.get(i));
+        assertEquals(id, dvShort.get(i));
       }
     }
     
@@ -692,23 +692,23 @@ public class TestBackwardsCompatibility3
     doc.add(new IntField("trieInt", id, Field.Store.NO));
     doc.add(new LongField("trieLong", (long) id, Field.Store.NO));
     // add docvalues fields
-    doc.add(new ByteDocValuesField("dvByte", (byte) id));
+    doc.add(new NumericDocValuesField("dvByte", (byte) id));
     byte bytes[] = new byte[] {
       (byte)(id >>> 24), (byte)(id >>> 16),(byte)(id >>> 8),(byte)id
     };
     BytesRef ref = new BytesRef(bytes);
-    doc.add(new DerefBytesDocValuesField("dvBytesDerefFixed", ref, true));
-    doc.add(new DerefBytesDocValuesField("dvBytesDerefVar", ref, false));
-    doc.add(new SortedBytesDocValuesField("dvBytesSortedFixed", ref, true));
-    doc.add(new SortedBytesDocValuesField("dvBytesSortedVar", ref, false));
-    doc.add(new StraightBytesDocValuesField("dvBytesStraightFixed", ref, true));
-    doc.add(new StraightBytesDocValuesField("dvBytesStraightVar", ref, false));
+    doc.add(new BinaryDocValuesField("dvBytesDerefFixed", ref));
+    doc.add(new BinaryDocValuesField("dvBytesDerefVar", ref));
+    doc.add(new SortedDocValuesField("dvBytesSortedFixed", ref));
+    doc.add(new SortedDocValuesField("dvBytesSortedVar", ref));
+    doc.add(new BinaryDocValuesField("dvBytesStraightFixed", ref));
+    doc.add(new BinaryDocValuesField("dvBytesStraightVar", ref));
     doc.add(new DoubleDocValuesField("dvDouble", (double)id));
     doc.add(new FloatDocValuesField("dvFloat", (float)id));
-    doc.add(new IntDocValuesField("dvInt", id));
-    doc.add(new LongDocValuesField("dvLong", id));
-    doc.add(new PackedLongDocValuesField("dvPacked", id));
-    doc.add(new ShortDocValuesField("dvShort", (short)id));
+    doc.add(new NumericDocValuesField("dvInt", id));
+    doc.add(new NumericDocValuesField("dvLong", id));
+    doc.add(new NumericDocValuesField("dvPacked", id));
+    doc.add(new NumericDocValuesField("dvShort", (short)id));
     // a field with both offsets and term vectors for a cross-check
     FieldType customType3 = new FieldType(TextField.TYPE_STORED);
     customType3.setStoreTermVectors(true);
@@ -855,13 +855,16 @@ public class TestBackwardsCompatibility3
       assertEquals("wrong number of hits", 34, hits.length);
       
       // check decoding into field cache
-      int[] fci = FieldCache.DEFAULT.getInts(SlowCompositeReaderWrapper.wrap(searcher.getIndexReader()), "trieInt", false);
-      for (int val : fci) {
+      FieldCache.Ints fci = FieldCache.DEFAULT.getInts(SlowCompositeReaderWrapper.wrap(searcher.getIndexReader()), "trieInt", false);
+      int maxDoc = searcher.getIndexReader().maxDoc();
+      for(int doc=0;doc<maxDoc;doc++) {
+        int val = fci.get(doc);
         assertTrue("value in id bounds", val >= 0 && val < 35);
       }
       
-      long[] fcl = FieldCache.DEFAULT.getLongs(SlowCompositeReaderWrapper.wrap(searcher.getIndexReader()), "trieLong", false);
-      for (long val : fcl) {
+      FieldCache.Longs fcl = FieldCache.DEFAULT.getLongs(SlowCompositeReaderWrapper.wrap(searcher.getIndexReader()), "trieLong", false);
+      for(int doc=0;doc<maxDoc;doc++) {
+        long val = fcl.get(doc);
         assertTrue("value in id bounds", val >= 0L && val < 35L);
       }
       

Modified: lucene/dev/branches/branch_4x/lucene/core/src/test/org/apache/lucene/index/TestCodecs.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/lucene/core/src/test/org/apache/lucene/index/TestCodecs.java?rev=1443834&r1=1443833&r2=1443834&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/lucene/core/src/test/org/apache/lucene/index/TestCodecs.java (original)
+++ lucene/dev/branches/branch_4x/lucene/core/src/test/org/apache/lucene/index/TestCodecs.java Fri Feb  8 03:26:14 2013
@@ -37,6 +37,7 @@ import org.apache.lucene.document.Field.
 import org.apache.lucene.document.FieldType;
 import org.apache.lucene.document.StringField;
 import org.apache.lucene.document.TextField;
+import org.apache.lucene.index.FieldInfo.DocValuesType;
 import org.apache.lucene.index.FieldInfo.IndexOptions;
 import org.apache.lucene.search.DocIdSetIterator;
 import org.apache.lucene.search.IndexSearcher;
@@ -95,8 +96,41 @@ public class TestCodecs extends LuceneTe
       this.omitTF = omitTF;
       this.storePayloads = storePayloads;
       // TODO: change this test to use all three
-      fieldInfos.addOrUpdate(name, true, false, false, storePayloads, omitTF ? IndexOptions.DOCS_ONLY : IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, null, null);
-      fieldInfo = fieldInfos.fieldInfo(name);
+      fieldInfo = fieldInfos.addOrUpdate(name, new IndexableFieldType() {
+
+        @Override
+        public boolean indexed() { return true; }
+
+        @Override
+        public boolean stored() { return false; }
+
+        @Override
+        public boolean tokenized() { return false; }
+
+        @Override
+        public boolean storeTermVectors() { return false; }
+
+        @Override
+        public boolean storeTermVectorOffsets() { return false; }
+
+        @Override
+        public boolean storeTermVectorPositions() { return false; }
+
+        @Override
+        public boolean storeTermVectorPayloads() { return false; }
+
+        @Override
+        public boolean omitNorms() { return false; }
+
+        @Override
+        public IndexOptions indexOptions() { return omitTF ? IndexOptions.DOCS_ONLY : IndexOptions.DOCS_AND_FREQS_AND_POSITIONS; }
+
+        @Override
+        public DocValuesType docValueType() { return null; }
+      });
+      if (storePayloads) {
+        fieldInfo.setStorePayloads();
+      }
       this.terms = terms;
       for(int i=0;i<terms.length;i++)
         terms[i].field = this;

Modified: lucene/dev/branches/branch_4x/lucene/core/src/test/org/apache/lucene/index/TestCustomNorms.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/lucene/core/src/test/org/apache/lucene/index/TestCustomNorms.java?rev=1443834&r1=1443833&r2=1443834&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/lucene/core/src/test/org/apache/lucene/index/TestCustomNorms.java (original)
+++ lucene/dev/branches/branch_4x/lucene/core/src/test/org/apache/lucene/index/TestCustomNorms.java Fri Feb  8 03:26:14 2013
@@ -17,25 +17,17 @@ package org.apache.lucene.index;
  * limitations under the License.
  */
 import java.io.IOException;
-import java.util.Random;
 
 import org.apache.lucene.analysis.MockAnalyzer;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.Field;
 import org.apache.lucene.document.TextField;
-import org.apache.lucene.index.DocValues.Source;
-import org.apache.lucene.index.DocValues.Type;
 import org.apache.lucene.search.CollectionStatistics;
 import org.apache.lucene.search.TermStatistics;
 import org.apache.lucene.search.similarities.DefaultSimilarity;
 import org.apache.lucene.search.similarities.PerFieldSimilarityWrapper;
 import org.apache.lucene.search.similarities.Similarity;
-import org.apache.lucene.search.similarities.Similarity.ExactSimScorer;
-import org.apache.lucene.search.similarities.Similarity.SimWeight;
-import org.apache.lucene.search.similarities.Similarity.SloppySimScorer;
 import org.apache.lucene.store.Directory;
-import org.apache.lucene.util.Bits;
-import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.LineFileDocs;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
@@ -75,106 +67,18 @@ public class TestCustomNorms extends Luc
     writer.commit();
     writer.close();
     AtomicReader open = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(dir));
-    DocValues normValues = open.normValues(floatTestField);
-    assertNotNull(normValues);
-    Source source = normValues.getSource();
-    assertTrue(source.hasArray());
-    assertEquals(Type.FLOAT_32, normValues.getType());
-    float[] norms = (float[]) source.getArray();
+    NumericDocValues norms = open.getNormValues(floatTestField);
+    assertNotNull(norms);
     for (int i = 0; i < open.maxDoc(); i++) {
       Document document = open.document(i);
       float expected = Float.parseFloat(document.get(floatTestField));
-      assertEquals(expected, norms[i], 0.0f);
+      assertEquals(expected, Float.intBitsToFloat((int)norms.get(i)), 0.0f);
     }
     open.close();
     dir.close();
     docs.close();
   }
 
-  public void testExceptionOnRandomType() throws IOException {
-    Directory dir = newDirectory();
-    IndexWriterConfig config = newIndexWriterConfig(TEST_VERSION_CURRENT,
-        new MockAnalyzer(random()));
-    Similarity provider = new MySimProvider();
-    config.setSimilarity(provider);
-    RandomIndexWriter writer = new RandomIndexWriter(random(), dir, config);
-    final LineFileDocs docs = new LineFileDocs(random());
-    int num = atLeast(100);
-    try {
-      for (int i = 0; i < num; i++) {
-        Document doc = docs.nextDoc();
-        float nextFloat = random().nextFloat();
-        Field f = new TextField(exceptionTestField, "" + nextFloat, Field.Store.YES);
-        f.setBoost(nextFloat);
-
-        doc.add(f);
-        writer.addDocument(doc);
-        doc.removeField(exceptionTestField);
-        if (rarely()) {
-          writer.commit();
-        }
-      }
-      fail("expected exception - incompatible types");
-    } catch (IllegalArgumentException e) {
-      // expected
-    }
-    writer.commit();
-    writer.close();
-    dir.close();
-    docs.close();
-
-  }
-  
-  public void testIllegalCustomEncoder() throws Exception {
-    Directory dir = newDirectory();
-    IllegalCustomEncodingSimilarity similarity = new IllegalCustomEncodingSimilarity();
-    IndexWriterConfig config = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
-    config.setSimilarity(similarity);
-    RandomIndexWriter writer = new RandomIndexWriter(random(), dir, config);
-    Document doc = new Document();
-    Field foo = newTextField("foo", "", Field.Store.NO);
-    Field bar = newTextField("bar", "", Field.Store.NO);
-    doc.add(foo);
-    doc.add(bar);
-    
-    int numAdded = 0;
-    for (int i = 0; i < 100; i++) {
-      try {
-        bar.setStringValue("singleton");
-        similarity.useByte = random().nextBoolean();
-        writer.addDocument(doc);
-        numAdded++;
-      } catch (IllegalArgumentException e) {}
-    }
-    
-    
-    IndexReader reader = writer.getReader();
-    writer.close();
-    assertEquals(numAdded, reader.numDocs());
-    IndexReaderContext topReaderContext = reader.getContext();
-    for (final AtomicReaderContext ctx : topReaderContext.leaves()) {
-      AtomicReader atomicReader = ctx.reader();
-      Source source = random().nextBoolean() ? atomicReader.normValues("foo").getSource() : atomicReader.normValues("foo").getDirectSource();
-      Bits liveDocs = atomicReader.getLiveDocs();
-      Type t = source.getType();
-      for (int i = 0; i < atomicReader.maxDoc(); i++) {
-          assertEquals(0, source.getFloat(i), 0.000f);
-      }
-      
-  
-      source = random().nextBoolean() ? atomicReader.normValues("bar").getSource() : atomicReader.normValues("bar").getDirectSource();
-      for (int i = 0; i < atomicReader.maxDoc(); i++) {
-        if (liveDocs == null || liveDocs.get(i)) {
-          assertEquals("type: " + t, 1, source.getFloat(i), 0.000f);
-        } else {
-          assertEquals("type: " + t, 0, source.getFloat(i), 0.000f);
-        }
-      }
-    }
-    reader.close();
-    dir.close();
-  }
-
   public class MySimProvider extends PerFieldSimilarityWrapper {
     Similarity delegate = new DefaultSimilarity();
 
@@ -187,8 +91,6 @@ public class TestCustomNorms extends Luc
     public Similarity get(String field) {
       if (floatTestField.equals(field)) {
         return new FloatEncodingBoostSimilarity();
-      } else if (exceptionTestField.equals(field)) {
-        return new RandomTypeSimilarity(random());
       } else {
         return delegate;
       }
@@ -203,9 +105,8 @@ public class TestCustomNorms extends Luc
   public static class FloatEncodingBoostSimilarity extends Similarity {
 
     @Override
-    public void computeNorm(FieldInvertState state, Norm norm) {
-      float boost = state.getBoost();
-      norm.setFloat(boost);
+    public long computeNorm(FieldInvertState state) {
+      return Float.floatToIntBits(state.getBoost());
     }
     
     @Override
@@ -223,87 +124,4 @@ public class TestCustomNorms extends Luc
       throw new UnsupportedOperationException();
     }
   }
-
-  public static class RandomTypeSimilarity extends Similarity {
-
-    private final Random random;
-    
-    public RandomTypeSimilarity(Random random) {
-      this.random = random;
-    }
-
-    @Override
-    public void computeNorm(FieldInvertState state, Norm norm) {
-      float boost = state.getBoost();
-      int nextInt = random.nextInt(10);
-      switch (nextInt) {
-      case 0:
-        norm.setDouble((double) boost);
-        break;
-      case 1:
-        norm.setFloat(boost);
-        break;
-      case 2:
-        norm.setLong((long) boost);
-        break;
-      case 3:
-        norm.setBytes(new BytesRef(new byte[6]));
-        break;
-      case 4:
-        norm.setInt((int) boost);
-        break;
-      case 5:
-        norm.setShort((short) boost);
-        break;
-      default:
-        norm.setByte((byte) boost);
-      }
-
-    }
-
-    @Override
-    public SimWeight computeWeight(float queryBoost, CollectionStatistics collectionStats, TermStatistics... termStats) {
-      throw new UnsupportedOperationException();
-    }
-
-    @Override
-    public ExactSimScorer exactSimScorer(SimWeight weight, AtomicReaderContext context) throws IOException {
-      throw new UnsupportedOperationException();
-    }
-
-    @Override
-    public SloppySimScorer sloppySimScorer(SimWeight weight, AtomicReaderContext context) throws IOException {
-      throw new UnsupportedOperationException();
-    }
-  }
-  
-  class IllegalCustomEncodingSimilarity extends Similarity {
-    
-    public boolean useByte = false;
-
-    @Override
-    public void computeNorm(FieldInvertState state, Norm norm) {
-      if (useByte) {
-        norm.setByte((byte)state.getLength());
-      } else {
-        norm.setFloat((float)state.getLength());
-      }
-    }
-
-    @Override
-    public SimWeight computeWeight(float queryBoost, CollectionStatistics collectionStats, TermStatistics... termStats) {
-      throw new UnsupportedOperationException();
-    }
-
-    @Override
-    public ExactSimScorer exactSimScorer(SimWeight weight, AtomicReaderContext context) throws IOException {
-      throw new UnsupportedOperationException();
-    }
-
-    @Override
-    public SloppySimScorer sloppySimScorer(SimWeight weight, AtomicReaderContext context) throws IOException {
-      throw new UnsupportedOperationException();
-    }
-  }
-
 }

Modified: lucene/dev/branches/branch_4x/lucene/core/src/test/org/apache/lucene/index/TestDirectoryReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/lucene/core/src/test/org/apache/lucene/index/TestDirectoryReader.java?rev=1443834&r1=1443833&r2=1443834&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/lucene/core/src/test/org/apache/lucene/index/TestDirectoryReader.java (original)
+++ lucene/dev/branches/branch_4x/lucene/core/src/test/org/apache/lucene/index/TestDirectoryReader.java Fri Feb  8 03:26:14 2013
@@ -39,7 +39,6 @@ import org.apache.lucene.index.IndexWrit
 import org.apache.lucene.search.DocIdSetIterator;
 import org.apache.lucene.search.FieldCache;
 import org.apache.lucene.store.Directory;
-import org.apache.lucene.store.MockDirectoryWrapper;
 import org.apache.lucene.store.NoSuchDirectoryException;
 import org.apache.lucene.util.Bits;
 import org.apache.lucene.util.BytesRef;
@@ -368,57 +367,57 @@ void assertTermDocsCount(String msg,
 
   
   public void testBinaryFields() throws IOException {
-      Directory dir = newDirectory();
-      byte[] bin = new byte[]{0, 1, 2, 3, 4, 5, 6, 7, 8, 9};
+    Directory dir = newDirectory();
+    byte[] bin = new byte[]{0, 1, 2, 3, 4, 5, 6, 7, 8, 9};
       
-      IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy()));
+    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy()));
       
-      for (int i = 0; i < 10; i++) {
-        addDoc(writer, "document number " + (i + 1));
-        addDocumentWithFields(writer);
-        addDocumentWithDifferentFields(writer);
-        addDocumentWithTermVectorFields(writer);
-      }
-      writer.close();
-      writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND).setMergePolicy(newLogMergePolicy()));
-      Document doc = new Document();
-      doc.add(new StoredField("bin1", bin));
-      doc.add(new TextField("junk", "junk text", Field.Store.NO));
-      writer.addDocument(doc);
-      writer.close();
-      DirectoryReader reader = DirectoryReader.open(dir);
-      Document doc2 = reader.document(reader.maxDoc() - 1);
-      IndexableField[] fields = doc2.getFields("bin1");
-      assertNotNull(fields);
-      assertEquals(1, fields.length);
-      IndexableField b1 = fields[0];
-      assertTrue(b1.binaryValue() != null);
-      BytesRef bytesRef = b1.binaryValue();
-      assertEquals(bin.length, bytesRef.length);
-      for (int i = 0; i < bin.length; i++) {
-        assertEquals(bin[i], bytesRef.bytes[i + bytesRef.offset]);
-      }
-      reader.close();
-      // force merge
+    for (int i = 0; i < 10; i++) {
+      addDoc(writer, "document number " + (i + 1));
+      addDocumentWithFields(writer);
+      addDocumentWithDifferentFields(writer);
+      addDocumentWithTermVectorFields(writer);
+    }
+    writer.close();
+    writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND).setMergePolicy(newLogMergePolicy()));
+    Document doc = new Document();
+    doc.add(new StoredField("bin1", bin));
+    doc.add(new TextField("junk", "junk text", Field.Store.NO));
+    writer.addDocument(doc);
+    writer.close();
+    DirectoryReader reader = DirectoryReader.open(dir);
+    Document doc2 = reader.document(reader.maxDoc() - 1);
+    IndexableField[] fields = doc2.getFields("bin1");
+    assertNotNull(fields);
+    assertEquals(1, fields.length);
+    IndexableField b1 = fields[0];
+    assertTrue(b1.binaryValue() != null);
+    BytesRef bytesRef = b1.binaryValue();
+    assertEquals(bin.length, bytesRef.length);
+    for (int i = 0; i < bin.length; i++) {
+      assertEquals(bin[i], bytesRef.bytes[i + bytesRef.offset]);
+    }
+    reader.close();
+    // force merge
 
 
-      writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND).setMergePolicy(newLogMergePolicy()));
-      writer.forceMerge(1);
-      writer.close();
-      reader = DirectoryReader.open(dir);
-      doc2 = reader.document(reader.maxDoc() - 1);
-      fields = doc2.getFields("bin1");
-      assertNotNull(fields);
-      assertEquals(1, fields.length);
-      b1 = fields[0];
-      assertTrue(b1.binaryValue() != null);
-      bytesRef = b1.binaryValue();
-      assertEquals(bin.length, bytesRef.length);
-      for (int i = 0; i < bin.length; i++) {
-        assertEquals(bin[i], bytesRef.bytes[i + bytesRef.offset]);
-      }
-      reader.close();
-      dir.close();
+    writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND).setMergePolicy(newLogMergePolicy()));
+    writer.forceMerge(1);
+    writer.close();
+    reader = DirectoryReader.open(dir);
+    doc2 = reader.document(reader.maxDoc() - 1);
+    fields = doc2.getFields("bin1");
+    assertNotNull(fields);
+    assertEquals(1, fields.length);
+    b1 = fields[0];
+    assertTrue(b1.binaryValue() != null);
+    bytesRef = b1.binaryValue();
+    assertEquals(bin.length, bytesRef.length);
+    for (int i = 0; i < bin.length; i++) {
+      assertEquals(bin[i], bytesRef.bytes[i + bytesRef.offset]);
+    }
+    reader.close();
+    dir.close();
   }
 
   /* ??? public void testOpenEmptyDirectory() throws IOException{
@@ -551,7 +550,7 @@ public void testFilesOpenClose() throws 
     assertEquals("IndexReaders have different values for maxDoc.", index1.maxDoc(), index2.maxDoc());
     assertEquals("Only one IndexReader has deletions.", index1.hasDeletions(), index2.hasDeletions());
     assertEquals("Single segment test differs.", index1.leaves().size() == 1, index2.leaves().size() == 1);
-    
+
     // check field names
     FieldInfos fieldInfos1 = MultiFields.getMergedFieldInfos(index1);
     FieldInfos fieldInfos2 = MultiFields.getMergedFieldInfos(index2);
@@ -566,21 +565,16 @@ public void testFilesOpenClose() throws 
     // check norms
     for(FieldInfo fieldInfo : fieldInfos1) {
       String curField = fieldInfo.name;
-      DocValues norms1 = MultiDocValues.getNormDocValues(index1, curField);
-      DocValues norms2 = MultiDocValues.getNormDocValues(index2, curField);
-      if (norms1 != null && norms2 != null)
-      {
+      NumericDocValues norms1 = MultiDocValues.getNormValues(index1, curField);
+      NumericDocValues norms2 = MultiDocValues.getNormValues(index2, curField);
+      if (norms1 != null && norms2 != null) {
         // todo: generalize this (like TestDuelingCodecs assert)
-        byte[] b1 = (byte[]) norms1.getSource().getArray();
-        byte[] b2 = (byte[]) norms2.getSource().getArray();
-        assertEquals(b1.length, b2.length);
-        for (int i = 0; i < b1.length; i++) {
-          assertEquals("Norm different for doc " + i + " and field '" + curField + "'.", b1[i], b2[i]);
+        for (int i = 0; i < index1.maxDoc(); i++) {
+          assertEquals("Norm different for doc " + i + " and field '" + curField + "'.", norms1.get(i), norms2.get(i));
         }
-      }
-      else
-      {
-        assertSame(norms1, norms2);
+      } else {
+        assertNull(norms1);
+        assertNull(norms2);
       }
     }
     
@@ -776,9 +770,8 @@ public void testFilesOpenClose() throws 
     // Open reader1
     DirectoryReader r = DirectoryReader.open(dir);
     AtomicReader r1 = getOnlySegmentReader(r);
-    final int[] ints = FieldCache.DEFAULT.getInts(r1, "number", false);
-    assertEquals(1, ints.length);
-    assertEquals(17, ints[0]);
+    final FieldCache.Ints ints = FieldCache.DEFAULT.getInts(r1, "number", false);
+    assertEquals(17, ints.get(0));
   
     // Add new segment
     writer.addDocument(doc);
@@ -789,7 +782,7 @@ public void testFilesOpenClose() throws 
     assertNotNull(r2);
     r.close();
     AtomicReader sub0 = r2.leaves().get(0).reader();
-    final int[] ints2 = FieldCache.DEFAULT.getInts(sub0, "number", false);
+    final FieldCache.Ints ints2 = FieldCache.DEFAULT.getInts(sub0, "number", false);
     r2.close();
     assertTrue(ints == ints2);
   

Modified: lucene/dev/branches/branch_4x/lucene/core/src/test/org/apache/lucene/index/TestDirectoryReaderReopen.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/lucene/core/src/test/org/apache/lucene/index/TestDirectoryReaderReopen.java?rev=1443834&r1=1443833&r2=1443834&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/lucene/core/src/test/org/apache/lucene/index/TestDirectoryReaderReopen.java (original)
+++ lucene/dev/branches/branch_4x/lucene/core/src/test/org/apache/lucene/index/TestDirectoryReaderReopen.java Fri Feb  8 03:26:14 2013
@@ -16,7 +16,6 @@ package org.apache.lucene.index;
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Collection;
@@ -38,7 +37,6 @@ import org.apache.lucene.search.IndexSea
 import org.apache.lucene.search.ScoreDoc;
 import org.apache.lucene.search.TermQuery;
 import org.apache.lucene.store.Directory;
-import org.apache.lucene.store.MockDirectoryWrapper;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util._TestUtil;
 

Modified: lucene/dev/branches/branch_4x/lucene/core/src/test/org/apache/lucene/index/TestDocTermOrds.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/lucene/core/src/test/org/apache/lucene/index/TestDocTermOrds.java?rev=1443834&r1=1443833&r2=1443834&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/lucene/core/src/test/org/apache/lucene/index/TestDocTermOrds.java (original)
+++ lucene/dev/branches/branch_4x/lucene/core/src/test/org/apache/lucene/index/TestDocTermOrds.java Fri Feb  8 03:26:14 2013
@@ -32,7 +32,6 @@ import org.apache.lucene.document.IntFie
 import org.apache.lucene.index.DocTermOrds.TermOrdsIterator;
 import org.apache.lucene.search.FieldCache;
 import org.apache.lucene.store.Directory;
-import org.apache.lucene.store.MockDirectoryWrapper;
 import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util.StringHelper;
@@ -303,7 +302,7 @@ public class TestDocTermOrds extends Luc
                                             _TestUtil.nextInt(random(), 2, 10));
                                             
 
-    final int[] docIDToID = FieldCache.DEFAULT.getInts(r, "id", false);
+    final FieldCache.Ints docIDToID = FieldCache.DEFAULT.getInts(r, "id", false);
     /*
       for(int docID=0;docID<subR.maxDoc();docID++) {
       System.out.println("  docID=" + docID + " id=" + docIDToID[docID]);
@@ -357,10 +356,10 @@ public class TestDocTermOrds extends Luc
     final int[] buffer = new int[5];
     for(int docID=0;docID<r.maxDoc();docID++) {
       if (VERBOSE) {
-        System.out.println("TEST: docID=" + docID + " of " + r.maxDoc() + " (id=" + docIDToID[docID] + ")");
+        System.out.println("TEST: docID=" + docID + " of " + r.maxDoc() + " (id=" + docIDToID.get(docID) + ")");
       }
       iter = dto.lookup(docID, iter);
-      final int[] answers = idToOrds[docIDToID[docID]];
+      final int[] answers = idToOrds[docIDToID.get(docID)];
       int upto = 0;
       while(true) {
         final int chunk = iter.read(buffer);

Copied: lucene/dev/branches/branch_4x/lucene/core/src/test/org/apache/lucene/index/TestDocValuesFormat.java (from r1443717, lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDocValuesFormat.java)
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/lucene/core/src/test/org/apache/lucene/index/TestDocValuesFormat.java?p2=lucene/dev/branches/branch_4x/lucene/core/src/test/org/apache/lucene/index/TestDocValuesFormat.java&p1=lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDocValuesFormat.java&r1=1443717&r2=1443834&rev=1443834&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDocValuesFormat.java (original)
+++ lucene/dev/branches/branch_4x/lucene/core/src/test/org/apache/lucene/index/TestDocValuesFormat.java Fri Feb  8 03:26:14 2013
@@ -18,10 +18,12 @@ package org.apache.lucene.index;
  */
 
 import org.apache.lucene.codecs.Codec;
+import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
 
 /** Tests the codec configuration defined by LuceneTestCase randomly
  *  (typically a mix across different fields).
  */
+@SuppressCodecs("Lucene3x")
 public class TestDocValuesFormat extends BaseDocValuesFormatTestCase {
 
   @Override