You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by sh...@apache.org on 2013/09/28 08:19:02 UTC
svn commit: r1527154 [2/3] - in /lucene/dev/trunk:
lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/
lucene/core/src/java/org/apache/lucene/codecs/
lucene/core/src/java/org/apache/lucene/codecs/lucene40/
lucene/core/src/java/org/apache/lucene...
Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/SegmentInfoPerCommit.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/SegmentInfoPerCommit.java?rev=1527154&r1=1527153&r2=1527154&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/SegmentInfoPerCommit.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/SegmentInfoPerCommit.java Sat Sep 28 06:19:00 2013
@@ -19,9 +19,7 @@ package org.apache.lucene.index;
import java.io.IOException;
import java.util.Collection;
-import java.util.HashMap;
import java.util.HashSet;
-import java.util.Map;
import java.util.Set;
import org.apache.lucene.store.Directory;
@@ -46,16 +44,12 @@ public class SegmentInfoPerCommit { // T
// attempt to write:
private long nextWriteDelGen;
- // holds field.number to docValuesGen mapping
- // TODO (DVU_FIELDINFOS_GEN) once we gen FieldInfos, get rid of this; every FieldInfo will record its dvGen
- private final Map<Integer,Long> fieldDocValuesGens = new HashMap<Integer,Long>();
+ // Generation number of the FieldInfos (-1 if there are no updates)
+ private long fieldInfosGen;
- // Generation number of the docValues (-1 if there are no field updates)
- private long docValuesGen;
-
- // Normally 1 + docValuesGen, unless an exception was hit on last attempt to
+ // Normally 1 + fieldInfosGen, unless an exception was hit on last attempt to
// write
- private long nextWriteDocValuesGen;
+ private long nextWriteFieldInfosGen;
// Tracks the files with field updates
private Set<String> updatesFiles = new HashSet<String>();
@@ -71,10 +65,10 @@ public class SegmentInfoPerCommit { // T
* number of deleted documents in this segment
* @param delGen
* deletion generation number (used to name deletion files)
- * @param docValuesGen
- * doc-values generation number (used to name docvalues files)
+ * @param fieldInfosGen
+ * FieldInfos generation number (used to name field-infos files)
**/
- public SegmentInfoPerCommit(SegmentInfo info, int delCount, long delGen, long docValuesGen) {
+ public SegmentInfoPerCommit(SegmentInfo info, int delCount, long delGen, long fieldInfosGen) {
this.info = info;
this.delCount = delCount;
this.delGen = delGen;
@@ -84,11 +78,11 @@ public class SegmentInfoPerCommit { // T
nextWriteDelGen = delGen+1;
}
- this.docValuesGen = docValuesGen;
- if (docValuesGen == -1) {
- nextWriteDocValuesGen = 1;
+ this.fieldInfosGen = fieldInfosGen;
+ if (fieldInfosGen == -1) {
+ nextWriteFieldInfosGen = 1;
} else {
- nextWriteDocValuesGen = docValuesGen + 1;
+ nextWriteFieldInfosGen = fieldInfosGen + 1;
}
}
@@ -116,19 +110,19 @@ public class SegmentInfoPerCommit { // T
nextWriteDelGen++;
}
- /** Called when we succeed in writing docvalues updates */
- void advanceDocValuesGen() {
- docValuesGen = nextWriteDocValuesGen;
- nextWriteDocValuesGen = docValuesGen + 1;
+ /** Called when we succeed in writing a new FieldInfos generation. */
+ void advanceFieldInfosGen() {
+ fieldInfosGen = nextWriteFieldInfosGen;
+ nextWriteFieldInfosGen = fieldInfosGen + 1;
sizeInBytes = -1;
}
/**
- * Called if there was an exception while writing docvalues updates, so that
- * we don't try to write to the same file more than once.
+ * Called if there was an exception while writing a new generation of
+ * FieldInfos, so that we don't try to write to the same file more than once.
*/
- void advanceNextWriteDocValuesGen() {
- nextWriteDocValuesGen++;
+ void advanceNextWriteFieldInfosGen() {
+ nextWriteFieldInfosGen++;
}
/** Returns total size in bytes of all files for this
@@ -183,43 +177,20 @@ public class SegmentInfoPerCommit { // T
/** Returns true if there are any field updates for the segment in this commit. */
public boolean hasFieldUpdates() {
- return docValuesGen != -1;
- }
-
- /** Returns the next available generation number of the docvalues files. */
- public long getNextDocValuesGen() {
- return nextWriteDocValuesGen;
- }
-
- /**
- * Returns the docvalues generation of this field, or -1 if there are
- * no updates to it.
- */
- public long getDocValuesGen(int fieldNumber) {
- Long gen = fieldDocValuesGens.get(fieldNumber);
- return gen == null ? -1 : gen.longValue();
- }
-
- /** Sets the docvalues generation for this field. */
- public void setDocValuesGen(int fieldNumber, long gen) {
- fieldDocValuesGens.put(fieldNumber, gen);
+ return fieldInfosGen != -1;
}
- /**
- * Returns a mapping from a field number to its DV generation.
- *
- * @see #getDocValuesGen(int)
- */
- public Map<Integer,Long> getFieldDocValuesGens() {
- return fieldDocValuesGens;
+ /** Returns the next available generation number of the FieldInfos files. */
+ public long getNextFieldInfosGen() {
+ return nextWriteFieldInfosGen;
}
/**
* Returns the generation number of the field infos file or -1 if there are no
* field updates yet.
*/
- public long getDocValuesGen() {
- return docValuesGen;
+ public long getFieldInfosGen() {
+ return fieldInfosGen;
}
/**
@@ -261,25 +232,24 @@ public class SegmentInfoPerCommit { // T
if (delGen != -1) {
s += ":delGen=" + delGen;
}
- if (docValuesGen != -1) {
- s += ":docValuesGen=" + docValuesGen;
+ if (fieldInfosGen != -1) {
+ s += ":fieldInfosGen=" + fieldInfosGen;
}
return s;
}
@Override
public SegmentInfoPerCommit clone() {
- SegmentInfoPerCommit other = new SegmentInfoPerCommit(info, delCount, delGen, docValuesGen);
+ SegmentInfoPerCommit other = new SegmentInfoPerCommit(info, delCount, delGen, fieldInfosGen);
// Not clear that we need to carry over nextWriteDelGen
// (i.e. do we ever clone after a failed write and
// before the next successful write?), but just do it to
// be safe:
other.nextWriteDelGen = nextWriteDelGen;
- other.nextWriteDocValuesGen = nextWriteDocValuesGen;
+ other.nextWriteFieldInfosGen = nextWriteFieldInfosGen;
other.updatesFiles.addAll(updatesFiles);
- other.fieldDocValuesGens.putAll(fieldDocValuesGens);
return other;
}
}
Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/SegmentInfos.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/SegmentInfos.java?rev=1527154&r1=1527153&r2=1527154&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/SegmentInfos.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/SegmentInfos.java Sat Sep 28 06:19:00 2013
@@ -28,11 +28,11 @@ import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
-import java.util.Map.Entry;
import java.util.Set;
import org.apache.lucene.codecs.Codec;
import org.apache.lucene.codecs.CodecUtil;
+import org.apache.lucene.codecs.FieldInfosFormat;
import org.apache.lucene.codecs.LiveDocsFormat;
import org.apache.lucene.store.ChecksumIndexInput;
import org.apache.lucene.store.ChecksumIndexOutput;
@@ -70,7 +70,7 @@ import org.apache.lucene.util.IOUtils;
* <ul>
* <li><tt>segments.gen</tt>: GenHeader, Generation, Generation
* <li><tt>segments_N</tt>: Header, Version, NameCounter, SegCount,
- * <SegName, SegCodec, DelGen, DeletionCount><sup>SegCount</sup>,
+ * <SegName, SegCodec, DelGen, DeletionCount, FieldInfosGen, UpdatesFiles><sup>SegCount</sup>,
* CommitUserData, Checksum
* </ul>
* </p>
@@ -79,9 +79,10 @@ import org.apache.lucene.util.IOUtils;
* <ul>
* <li>Header --> {@link CodecUtil#writeHeader CodecHeader}</li>
* <li>GenHeader, NameCounter, SegCount, DeletionCount --> {@link DataOutput#writeInt Int32}</li>
- * <li>Generation, Version, DelGen, Checksum --> {@link DataOutput#writeLong Int64}</li>
+ * <li>Generation, Version, DelGen, Checksum, FieldInfosGen --> {@link DataOutput#writeLong Int64}</li>
* <li>SegName, SegCodec --> {@link DataOutput#writeString String}</li>
* <li>CommitUserData --> {@link DataOutput#writeStringStringMap Map<String,String>}</li>
+ * <li>UpdatesFiles --> {@link DataOutput#writeStringSet(Set) Set<String>}</li>
* </ul>
* </p>
* Field Descriptions:
@@ -104,6 +105,10 @@ import org.apache.lucene.util.IOUtils;
* <li>CommitUserData stores an optional user-supplied opaque
* Map<String,String> that was passed to
* {@link IndexWriter#setCommitData(java.util.Map)}.</li>
+ * <li>FieldInfosGen is the generation count of the fieldInfos file. If this is -1,
+ * there are no updates to the fieldInfos in that segment. Anything above zero
+ * means there are updates to fieldInfos stored by {@link FieldInfosFormat}.</li>
+ * <li>UpdatesFiles stores the list of files that were updated in that segment.</li>
* </ul>
* </p>
*
@@ -111,11 +116,11 @@ import org.apache.lucene.util.IOUtils;
*/
public final class SegmentInfos implements Cloneable, Iterable<SegmentInfoPerCommit> {
- /** The file format version for the segments_N codec header, up to 4.4. */
+ /** The file format version for the segments_N codec header, up to 4.5. */
public static final int VERSION_40 = 0;
- /** The file format version for the segments_N codec header, since 4.5+. */
- public static final int VERSION_45 = 1;
+ /** The file format version for the segments_N codec header, since 4.6+. */
+ public static final int VERSION_46 = 1;
/** Used for the segments.gen file only!
* Whenever you add a new format, make it 1 smaller (negative version logic)! */
@@ -320,7 +325,7 @@ public final class SegmentInfos implemen
throw new IndexFormatTooOldException(input, magic, CodecUtil.CODEC_MAGIC, CodecUtil.CODEC_MAGIC);
}
// 4.0+
- int format = CodecUtil.checkHeaderNoMagic(input, "segments", VERSION_40, VERSION_45);
+ int format = CodecUtil.checkHeaderNoMagic(input, "segments", VERSION_40, VERSION_46);
version = input.readLong();
counter = input.readInt();
int numSegments = input.readInt();
@@ -338,16 +343,12 @@ public final class SegmentInfos implemen
if (delCount < 0 || delCount > info.getDocCount()) {
throw new CorruptIndexException("invalid deletion count: " + delCount + " (resource: " + input + ")");
}
- long docValuesGen = -1;
- if (format >= VERSION_45) {
- docValuesGen = input.readLong();
+ long fieldInfosGen = -1;
+ if (format >= VERSION_46) {
+ fieldInfosGen = input.readLong();
}
- SegmentInfoPerCommit siPerCommit = new SegmentInfoPerCommit(info, delCount, delGen, docValuesGen);
- if (format >= VERSION_45) {
- int numUpdates = input.readInt();
- for (int i = 0; i < numUpdates; i++) {
- siPerCommit.setDocValuesGen(input.readInt(), input.readLong());
- }
+ SegmentInfoPerCommit siPerCommit = new SegmentInfoPerCommit(info, delCount, delGen, fieldInfosGen);
+ if (format >= VERSION_46) {
siPerCommit.addUpdatesFiles(input.readStringSet());
}
add(siPerCommit);
@@ -408,7 +409,7 @@ public final class SegmentInfos implemen
try {
segnOutput = new ChecksumIndexOutput(directory.createOutput(segmentFileName, IOContext.DEFAULT));
- CodecUtil.writeHeader(segnOutput, "segments", VERSION_45);
+ CodecUtil.writeHeader(segnOutput, "segments", VERSION_46);
segnOutput.writeLong(version);
segnOutput.writeInt(counter); // write counter
segnOutput.writeInt(size()); // write infos
@@ -418,13 +419,7 @@ public final class SegmentInfos implemen
segnOutput.writeString(si.getCodec().getName());
segnOutput.writeLong(siPerCommit.getDelGen());
segnOutput.writeInt(siPerCommit.getDelCount());
- segnOutput.writeLong(siPerCommit.getDocValuesGen());
- Map<Integer,Long> docValuesUpdatesGen = siPerCommit.getFieldDocValuesGens();
- segnOutput.writeInt(docValuesUpdatesGen.size());
- for (Entry<Integer,Long> e : docValuesUpdatesGen.entrySet()) {
- segnOutput.writeInt(e.getKey());
- segnOutput.writeLong(e.getValue());
- }
+ segnOutput.writeLong(siPerCommit.getFieldInfosGen());
segnOutput.writeStringSet(siPerCommit.getUpdatesFiles());
assert si.dir == directory;
Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/SegmentMerger.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/SegmentMerger.java?rev=1527154&r1=1527153&r2=1527154&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/SegmentMerger.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/SegmentMerger.java Sat Sep 28 06:19:00 2013
@@ -142,7 +142,7 @@ final class SegmentMerger {
// write the merged infos
FieldInfosWriter fieldInfosWriter = codec.fieldInfosFormat().getFieldInfosWriter();
- fieldInfosWriter.write(directory, mergeState.segmentInfo.name, mergeState.fieldInfos, context);
+ fieldInfosWriter.write(directory, mergeState.segmentInfo.name, "", mergeState.fieldInfos, context);
return mergeState;
}
Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/SegmentReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/SegmentReader.java?rev=1527154&r1=1527153&r2=1527154&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/SegmentReader.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/SegmentReader.java Sat Sep 28 06:19:00 2013
@@ -31,6 +31,7 @@ import org.apache.lucene.codecs.StoredFi
import org.apache.lucene.codecs.TermVectorsReader;
import org.apache.lucene.index.FieldInfo.DocValuesType;
import org.apache.lucene.search.FieldCache;
+import org.apache.lucene.store.CompoundFileDirectory;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.util.Bits;
@@ -73,6 +74,8 @@ public final class SegmentReader extends
final Map<String,DocValuesProducer> dvProducers = new HashMap<String,DocValuesProducer>();
final Map<Long,RefCount<DocValuesProducer>> genDVProducers = new HashMap<Long,RefCount<DocValuesProducer>>();
+ final FieldInfos fieldInfos;
+
/**
* Constructs a new SegmentReader with a new core.
* @throws CorruptIndexException if the index is corrupt
@@ -81,6 +84,13 @@ public final class SegmentReader extends
// TODO: why is this public?
public SegmentReader(SegmentInfoPerCommit si, IOContext context) throws IOException {
this.si = si;
+ // TODO if the segment uses CFS, we may open the CFS file twice: once for
+ // reading the FieldInfos (if they are not gen'd) and second time by
+ // SegmentCoreReaders. We can open the CFS here and pass to SCR, but then it
+ // results in less readable code (resource not closed where it was opened).
+ // Best if we could somehow read FieldInfos in SCR but not keep it there, but
+ // constructors don't allow returning two things...
+ fieldInfos = readFieldInfos(si);
core = new SegmentCoreReaders(this, si.info.dir, si, context);
boolean success = false;
@@ -95,7 +105,7 @@ public final class SegmentReader extends
}
numDocs = si.info.getDocCount() - si.getDelCount();
- if (core.fieldInfos.hasDocValues()) {
+ if (fieldInfos.hasDocValues()) {
final Directory dir = core.cfsReader != null ? core.cfsReader : si.info.dir;
final DocValuesFormat dvFormat = codec.docValuesFormat();
// initialize the per generation numericDVProducers and put the correct
@@ -157,8 +167,14 @@ public final class SegmentReader extends
// increment refCount of DocValuesProducers that are used by this reader
boolean success = false;
try {
- if (core.fieldInfos.hasDocValues()) {
- final Codec codec = si.info.getCodec();
+ final Codec codec = si.info.getCodec();
+ if (si.getFieldInfosGen() == -1) {
+ fieldInfos = sr.fieldInfos;
+ } else {
+ fieldInfos = readFieldInfos(si);
+ }
+
+ if (fieldInfos.hasDocValues()) {
final Directory dir = core.cfsReader != null ? core.cfsReader : si.info.dir;
final DocValuesFormat dvFormat = codec.docValuesFormat();
@@ -196,14 +212,45 @@ public final class SegmentReader extends
}
}
+ /**
+ * Reads the most recent {@link FieldInfos} of the given segment info.
+ *
+ * @lucene.internal
+ */
+ static FieldInfos readFieldInfos(SegmentInfoPerCommit info) throws IOException {
+ final Directory dir;
+ final boolean closeDir;
+ if (info.getFieldInfosGen() == -1 && info.info.getUseCompoundFile()) {
+ // no fieldInfos gen and segment uses a compound file
+ dir = new CompoundFileDirectory(info.info.dir,
+ IndexFileNames.segmentFileName(info.info.name, "", IndexFileNames.COMPOUND_FILE_EXTENSION),
+ IOContext.READONCE,
+ false);
+ closeDir = true;
+ } else {
+ // gen'd FIS are read outside CFS, or the segment doesn't use a compound file
+ dir = info.info.dir;
+ closeDir = false;
+ }
+
+ try {
+ final String segmentSuffix = info.getFieldInfosGen() == -1 ? "" : Long.toString(info.getFieldInfosGen(), Character.MAX_RADIX);
+ return info.info.getCodec().fieldInfosFormat().getFieldInfosReader().read(dir, info.info.name, segmentSuffix, IOContext.READONCE);
+ } finally {
+ if (closeDir) {
+ dir.close();
+ }
+ }
+ }
+
// returns a gen->List<FieldInfo> mapping. Fields without DV updates have gen=-1
private Map<Long,List<FieldInfo>> getGenInfos(SegmentInfoPerCommit si) {
final Map<Long,List<FieldInfo>> genInfos = new HashMap<Long,List<FieldInfo>>();
- for (FieldInfo fi : core.fieldInfos) {
+ for (FieldInfo fi : fieldInfos) {
if (fi.getDocValuesType() == null) {
continue;
}
- long gen = si.getDocValuesGen(fi.number);
+ long gen = fi.getDocValuesGen();
List<FieldInfo> infos = genInfos.get(gen);
if (infos == null) {
infos = new ArrayList<FieldInfo>();
@@ -267,7 +314,7 @@ public final class SegmentReader extends
@Override
public FieldInfos getFieldInfos() {
ensureOpen();
- return core.fieldInfos;
+ return fieldInfos;
}
/** Expert: retrieve thread-private {@link
@@ -372,7 +419,7 @@ public final class SegmentReader extends
// null if the field does not exist, or not indexed as the requested
// DovDocValuesType.
private FieldInfo getDVField(String field, DocValuesType type) {
- FieldInfo fi = core.fieldInfos.fieldInfo(field);
+ FieldInfo fi = fieldInfos.fieldInfo(field);
if (fi == null) {
// Field does not exist
return null;
@@ -414,7 +461,7 @@ public final class SegmentReader extends
@Override
public Bits getDocsWithField(String field) throws IOException {
ensureOpen();
- FieldInfo fi = core.fieldInfos.fieldInfo(field);
+ FieldInfo fi = fieldInfos.fieldInfo(field);
if (fi == null) {
// Field does not exist
return null;
@@ -507,7 +554,12 @@ public final class SegmentReader extends
@Override
public NumericDocValues getNormValues(String field) throws IOException {
ensureOpen();
- return core.getNormValues(field);
+ FieldInfo fi = fieldInfos.fieldInfo(field);
+ if (fi == null || !fi.hasNorms()) {
+ // Field does not exist or does not index norms
+ return null;
+ }
+ return core.getNormValues(fi);
}
/**
Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/SegmentWriteState.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/SegmentWriteState.java?rev=1527154&r1=1527153&r2=1527154&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/SegmentWriteState.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/SegmentWriteState.java Sat Sep 28 06:19:00 2013
@@ -71,24 +71,20 @@ public class SegmentWriteState {
* to {@link Directory#createOutput(String,IOContext)}. */
public final IOContext context;
- /** True is this instance represents a field update. */
- public final boolean isFieldUpdate; // TODO (DVU_FIELDINFOS_GEN) once we gen FieldInfos, get rid of this
-
/** Sole constructor. */
public SegmentWriteState(InfoStream infoStream, Directory directory, SegmentInfo segmentInfo, FieldInfos fieldInfos,
BufferedDeletes segDeletes, IOContext context) {
- this(infoStream, directory, segmentInfo, fieldInfos, segDeletes, context, "", false);
+ this(infoStream, directory, segmentInfo, fieldInfos, segDeletes, context, "");
}
/**
- * Constructor which takes segment suffix and isFieldUpdate in addition to the
- * other parameters.
+ * Constructor which takes segment suffix.
*
* @see #SegmentWriteState(InfoStream, Directory, SegmentInfo, FieldInfos,
* BufferedDeletes, IOContext)
*/
public SegmentWriteState(InfoStream infoStream, Directory directory, SegmentInfo segmentInfo, FieldInfos fieldInfos,
- BufferedDeletes segDeletes, IOContext context, String segmentSuffix, boolean isFieldUpdate) {
+ BufferedDeletes segDeletes, IOContext context, String segmentSuffix) {
this.infoStream = infoStream;
this.segDeletes = segDeletes;
this.directory = directory;
@@ -96,7 +92,6 @@ public class SegmentWriteState {
this.fieldInfos = fieldInfos;
this.segmentSuffix = segmentSuffix;
this.context = context;
- this.isFieldUpdate = isFieldUpdate;
}
/** Create a shallow copy of {@link SegmentWriteState} with a new segment suffix. */
@@ -109,6 +104,5 @@ public class SegmentWriteState {
this.segmentSuffix = segmentSuffix;
segDeletes = state.segDeletes;
delCountOnFlush = state.delCountOnFlush;
- isFieldUpdate = state.isFieldUpdate;
}
}
Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/StandardDirectoryReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/StandardDirectoryReader.java?rev=1527154&r1=1527153&r2=1527154&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/StandardDirectoryReader.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/StandardDirectoryReader.java Sat Sep 28 06:19:00 2013
@@ -163,7 +163,7 @@ final class StandardDirectoryReader exte
newReaders[i] = newReader;
} else {
if (newReaders[i].getSegmentInfo().getDelGen() == infos.info(i).getDelGen()
- && newReaders[i].getSegmentInfo().getDocValuesGen() == infos.info(i).getDocValuesGen()) {
+ && newReaders[i].getSegmentInfo().getFieldInfosGen() == infos.info(i).getFieldInfosGen()) {
// No change; this reader will be shared between
// the old and the new one, so we must incRef
// it:
Modified: lucene/dev/trunk/lucene/core/src/resources/META-INF/services/org.apache.lucene.codecs.Codec
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/resources/META-INF/services/org.apache.lucene.codecs.Codec?rev=1527154&r1=1527153&r2=1527154&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/resources/META-INF/services/org.apache.lucene.codecs.Codec (original)
+++ lucene/dev/trunk/lucene/core/src/resources/META-INF/services/org.apache.lucene.codecs.Codec Sat Sep 28 06:19:00 2013
@@ -16,4 +16,5 @@
org.apache.lucene.codecs.lucene40.Lucene40Codec
org.apache.lucene.codecs.lucene41.Lucene41Codec
org.apache.lucene.codecs.lucene42.Lucene42Codec
-org.apache.lucene.codecs.lucene45.Lucene45Codec
\ No newline at end of file
+org.apache.lucene.codecs.lucene45.Lucene45Codec
+org.apache.lucene.codecs.lucene46.Lucene46Codec
\ No newline at end of file
Modified: lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/TestExternalCodecs.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/TestExternalCodecs.java?rev=1527154&r1=1527153&r2=1527154&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/TestExternalCodecs.java (original)
+++ lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/TestExternalCodecs.java Sat Sep 28 06:19:00 2013
@@ -19,7 +19,7 @@ package org.apache.lucene;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.codecs.PostingsFormat;
-import org.apache.lucene.codecs.lucene45.Lucene45Codec;
+import org.apache.lucene.codecs.lucene46.Lucene46Codec;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.DirectoryReader;
@@ -37,7 +37,7 @@ import org.apache.lucene.util.LuceneTest
public class TestExternalCodecs extends LuceneTestCase {
- private static final class CustomPerFieldCodec extends Lucene45Codec {
+ private static final class CustomPerFieldCodec extends Lucene46Codec {
private final PostingsFormat ramFormat = PostingsFormat.forName("RAMOnly");
private final PostingsFormat defaultFormat = PostingsFormat.forName("Lucene41");
Modified: lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/codecs/perfield/TestPerFieldDocValuesFormat.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/codecs/perfield/TestPerFieldDocValuesFormat.java?rev=1527154&r1=1527153&r2=1527154&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/codecs/perfield/TestPerFieldDocValuesFormat.java (original)
+++ lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/codecs/perfield/TestPerFieldDocValuesFormat.java Sat Sep 28 06:19:00 2013
@@ -25,7 +25,7 @@ import org.apache.lucene.analysis.Analyz
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.codecs.Codec;
import org.apache.lucene.codecs.DocValuesFormat;
-import org.apache.lucene.codecs.lucene45.Lucene45Codec;
+import org.apache.lucene.codecs.lucene46.Lucene46Codec;
import org.apache.lucene.document.BinaryDocValuesField;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
@@ -81,7 +81,7 @@ public class TestPerFieldDocValuesFormat
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
final DocValuesFormat fast = DocValuesFormat.forName("Lucene45");
final DocValuesFormat slow = DocValuesFormat.forName("SimpleText");
- iwc.setCodec(new Lucene45Codec() {
+ iwc.setCodec(new Lucene46Codec() {
@Override
public DocValuesFormat getDocValuesFormatForField(String field) {
if ("dv1".equals(field)) {
Modified: lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/codecs/perfield/TestPerFieldPostingsFormat2.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/codecs/perfield/TestPerFieldPostingsFormat2.java?rev=1527154&r1=1527153&r2=1527154&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/codecs/perfield/TestPerFieldPostingsFormat2.java (original)
+++ lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/codecs/perfield/TestPerFieldPostingsFormat2.java Sat Sep 28 06:19:00 2013
@@ -21,8 +21,8 @@ import java.io.IOException;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.codecs.Codec;
import org.apache.lucene.codecs.PostingsFormat;
-import org.apache.lucene.codecs.lucene45.Lucene45Codec;
import org.apache.lucene.codecs.lucene41.Lucene41PostingsFormat;
+import org.apache.lucene.codecs.lucene46.Lucene46Codec;
import org.apache.lucene.codecs.mocksep.MockSepPostingsFormat;
import org.apache.lucene.codecs.pulsing.Pulsing41PostingsFormat;
import org.apache.lucene.codecs.simpletext.SimpleTextPostingsFormat;
@@ -34,10 +34,10 @@ import org.apache.lucene.index.Directory
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
+import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.index.LogDocMergePolicy;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.Term;
-import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TopDocs;
@@ -200,7 +200,7 @@ public class TestPerFieldPostingsFormat2
}
- public static class MockCodec extends Lucene45Codec {
+ public static class MockCodec extends Lucene46Codec {
final PostingsFormat lucene40 = new Lucene41PostingsFormat();
final PostingsFormat simpleText = new SimpleTextPostingsFormat();
final PostingsFormat mockSep = new MockSepPostingsFormat();
@@ -217,7 +217,7 @@ public class TestPerFieldPostingsFormat2
}
}
- public static class MockCodec2 extends Lucene45Codec {
+ public static class MockCodec2 extends Lucene46Codec {
final PostingsFormat lucene40 = new Lucene41PostingsFormat();
final PostingsFormat simpleText = new SimpleTextPostingsFormat();
@@ -268,7 +268,7 @@ public class TestPerFieldPostingsFormat2
}
public void testSameCodecDifferentInstance() throws Exception {
- Codec codec = new Lucene45Codec() {
+ Codec codec = new Lucene46Codec() {
@Override
public PostingsFormat getPostingsFormatForField(String field) {
if ("id".equals(field)) {
@@ -284,7 +284,7 @@ public class TestPerFieldPostingsFormat2
}
public void testSameCodecDifferentParams() throws Exception {
- Codec codec = new Lucene45Codec() {
+ Codec codec = new Lucene46Codec() {
@Override
public PostingsFormat getPostingsFormatForField(String field) {
if ("id".equals(field)) {
Modified: lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestAddIndexes.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestAddIndexes.java?rev=1527154&r1=1527153&r2=1527154&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestAddIndexes.java (original)
+++ lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestAddIndexes.java Sat Sep 28 06:19:00 2013
@@ -28,7 +28,7 @@ import org.apache.lucene.analysis.MockAn
import org.apache.lucene.codecs.Codec;
import org.apache.lucene.codecs.FilterCodec;
import org.apache.lucene.codecs.PostingsFormat;
-import org.apache.lucene.codecs.lucene45.Lucene45Codec;
+import org.apache.lucene.codecs.lucene46.Lucene46Codec;
import org.apache.lucene.codecs.pulsing.Pulsing41PostingsFormat;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
@@ -43,7 +43,6 @@ import org.apache.lucene.store.BaseDirec
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.store.RAMDirectory;
-import org.apache.lucene.util.Bits;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util._TestUtil;
@@ -1060,7 +1059,7 @@ public class TestAddIndexes extends Luce
aux2.close();
}
- private static final class CustomPerFieldCodec extends Lucene45Codec {
+ private static final class CustomPerFieldCodec extends Lucene46Codec {
private final PostingsFormat simpleTextFormat = PostingsFormat.forName("SimpleText");
private final PostingsFormat defaultFormat = PostingsFormat.forName("Lucene41");
private final PostingsFormat mockSepFormat = PostingsFormat.forName("MockSep");
@@ -1111,7 +1110,7 @@ public class TestAddIndexes extends Luce
private static final class UnRegisteredCodec extends FilterCodec {
public UnRegisteredCodec() {
- super("NotRegistered", new Lucene45Codec());
+ super("NotRegistered", new Lucene46Codec());
}
}
Modified: lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestAllFilesHaveCodecHeader.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestAllFilesHaveCodecHeader.java?rev=1527154&r1=1527153&r2=1527154&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestAllFilesHaveCodecHeader.java (original)
+++ lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestAllFilesHaveCodecHeader.java Sat Sep 28 06:19:00 2013
@@ -21,12 +21,9 @@ import java.io.IOException;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.codecs.CodecUtil;
-import org.apache.lucene.codecs.lucene45.Lucene45Codec;
+import org.apache.lucene.codecs.lucene46.Lucene46Codec;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
-import org.apache.lucene.index.IndexFileNames;
-import org.apache.lucene.index.IndexWriterConfig;
-import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.store.CompoundFileDirectory;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IndexInput;
@@ -41,7 +38,7 @@ public class TestAllFilesHaveCodecHeader
public void test() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
- conf.setCodec(new Lucene45Codec());
+ conf.setCodec(new Lucene46Codec());
// riw should sometimes create docvalues fields, etc
RandomIndexWriter riw = new RandomIndexWriter(random(), dir, conf);
Document doc = new Document();
Modified: lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java?rev=1527154&r1=1527153&r2=1527154&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java (original)
+++ lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java Sat Sep 28 06:19:00 2013
@@ -37,8 +37,8 @@ import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.FloatDocValuesField;
import org.apache.lucene.document.IntField;
-import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.document.LongField;
+import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.document.SortedDocValuesField;
import org.apache.lucene.document.SortedSetDocValuesField;
import org.apache.lucene.document.StringField;
@@ -54,19 +54,18 @@ import org.apache.lucene.search.TermQuer
import org.apache.lucene.store.BaseDirectoryWrapper;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
-import org.apache.lucene.store.SimpleFSDirectory;
import org.apache.lucene.store.NIOFSDirectory;
import org.apache.lucene.store.RAMDirectory;
+import org.apache.lucene.store.SimpleFSDirectory;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.Constants;
-import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
import org.apache.lucene.util.LuceneTestCase;
+import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
import org.apache.lucene.util.StringHelper;
import org.apache.lucene.util._TestUtil;
import org.junit.AfterClass;
import org.junit.BeforeClass;
-import org.junit.Ignore;
/*
Verify we can read the pre-5.0 file format, do searches
@@ -77,7 +76,7 @@ import org.junit.Ignore;
// we won't even be running the actual code, only the impostor
// @SuppressCodecs("Lucene4x")
// Sep codec cannot yet handle the offsets in our 4.x index!
-@SuppressCodecs({"MockFixedIntBlock", "MockVariableIntBlock", "MockSep", "MockRandom", "Lucene40", "Lucene41", "Lucene42"})
+@SuppressCodecs({"MockFixedIntBlock", "MockVariableIntBlock", "MockSep", "MockRandom", "Lucene40", "Lucene41", "Lucene42", "Lucene45"})
public class TestBackwardsCompatibility extends LuceneTestCase {
// Uncomment these cases & run them on an older Lucene version,
Modified: lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestConsistentFieldNumbers.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestConsistentFieldNumbers.java?rev=1527154&r1=1527153&r2=1527154&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestConsistentFieldNumbers.java (original)
+++ lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestConsistentFieldNumbers.java Sat Sep 28 06:19:00 2013
@@ -29,7 +29,6 @@ import org.apache.lucene.document.TextFi
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.FailOnNonBulkMergesInfoStream;
import org.apache.lucene.util.LuceneTestCase;
-import org.apache.lucene.util._TestUtil;
import org.junit.Test;
public class TestConsistentFieldNumbers extends LuceneTestCase {
@@ -67,8 +66,8 @@ public class TestConsistentFieldNumbers
sis.read(dir);
assertEquals(2, sis.size());
- FieldInfos fis1 = _TestUtil.getFieldInfos(sis.info(0).info);
- FieldInfos fis2 = _TestUtil.getFieldInfos(sis.info(1).info);
+ FieldInfos fis1 = SegmentReader.readFieldInfos(sis.info(0));
+ FieldInfos fis2 = SegmentReader.readFieldInfos(sis.info(1));
assertEquals("f1", fis1.fieldInfo(0).name);
assertEquals("f2", fis1.fieldInfo(1).name);
@@ -85,7 +84,7 @@ public class TestConsistentFieldNumbers
sis.read(dir);
assertEquals(1, sis.size());
- FieldInfos fis3 = _TestUtil.getFieldInfos(sis.info(0).info);
+ FieldInfos fis3 = SegmentReader.readFieldInfos(sis.info(0));
assertEquals("f1", fis3.fieldInfo(0).name);
assertEquals("f2", fis3.fieldInfo(1).name);
@@ -130,8 +129,8 @@ public class TestConsistentFieldNumbers
sis.read(dir1);
assertEquals(2, sis.size());
- FieldInfos fis1 = _TestUtil.getFieldInfos(sis.info(0).info);
- FieldInfos fis2 = _TestUtil.getFieldInfos(sis.info(1).info);
+ FieldInfos fis1 = SegmentReader.readFieldInfos(sis.info(0));
+ FieldInfos fis2 = SegmentReader.readFieldInfos(sis.info(1));
assertEquals("f1", fis1.fieldInfo(0).name);
assertEquals("f2", fis1.fieldInfo(1).name);
@@ -161,7 +160,7 @@ public class TestConsistentFieldNumbers
SegmentInfos sis = new SegmentInfos();
sis.read(dir);
assertEquals(1, sis.size());
- FieldInfos fis1 = _TestUtil.getFieldInfos(sis.info(0).info);
+ FieldInfos fis1 = SegmentReader.readFieldInfos(sis.info(0));
assertEquals("f1", fis1.fieldInfo(0).name);
assertEquals("f2", fis1.fieldInfo(1).name);
}
@@ -180,8 +179,8 @@ public class TestConsistentFieldNumbers
SegmentInfos sis = new SegmentInfos();
sis.read(dir);
assertEquals(2, sis.size());
- FieldInfos fis1 = _TestUtil.getFieldInfos(sis.info(0).info);
- FieldInfos fis2 = _TestUtil.getFieldInfos(sis.info(1).info);
+ FieldInfos fis1 = SegmentReader.readFieldInfos(sis.info(0));
+ FieldInfos fis2 = SegmentReader.readFieldInfos(sis.info(1));
assertEquals("f1", fis1.fieldInfo(0).name);
assertEquals("f2", fis1.fieldInfo(1).name);
assertEquals("f1", fis2.fieldInfo(0).name);
@@ -203,9 +202,9 @@ public class TestConsistentFieldNumbers
SegmentInfos sis = new SegmentInfos();
sis.read(dir);
assertEquals(3, sis.size());
- FieldInfos fis1 = _TestUtil.getFieldInfos(sis.info(0).info);
- FieldInfos fis2 = _TestUtil.getFieldInfos(sis.info(1).info);
- FieldInfos fis3 = _TestUtil.getFieldInfos(sis.info(2).info);
+ FieldInfos fis1 = SegmentReader.readFieldInfos(sis.info(0));
+ FieldInfos fis2 = SegmentReader.readFieldInfos(sis.info(1));
+ FieldInfos fis3 = SegmentReader.readFieldInfos(sis.info(2));
assertEquals("f1", fis1.fieldInfo(0).name);
assertEquals("f2", fis1.fieldInfo(1).name);
assertEquals("f1", fis2.fieldInfo(0).name);
@@ -237,7 +236,7 @@ public class TestConsistentFieldNumbers
SegmentInfos sis = new SegmentInfos();
sis.read(dir);
assertEquals(1, sis.size());
- FieldInfos fis1 = _TestUtil.getFieldInfos(sis.info(0).info);
+ FieldInfos fis1 = SegmentReader.readFieldInfos(sis.info(0));
assertEquals("f1", fis1.fieldInfo(0).name);
assertEquals("f2", fis1.fieldInfo(1).name);
assertEquals("f3", fis1.fieldInfo(2).name);
@@ -275,7 +274,7 @@ public class TestConsistentFieldNumbers
SegmentInfos sis = new SegmentInfos();
sis.read(dir);
for (SegmentInfoPerCommit si : sis) {
- FieldInfos fis = _TestUtil.getFieldInfos(si.info);
+ FieldInfos fis = SegmentReader.readFieldInfos(si);
for (FieldInfo fi : fis) {
Field expected = getField(Integer.parseInt(fi.name));
Modified: lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDuelingCodecs.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDuelingCodecs.java?rev=1527154&r1=1527153&r2=1527154&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDuelingCodecs.java (original)
+++ lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDuelingCodecs.java Sat Sep 28 06:19:00 2013
@@ -50,7 +50,7 @@ public class TestDuelingCodecs extends L
public void setUp() throws Exception {
super.setUp();
- // for now its SimpleText vs Lucene45(random postings format)
+ // for now its SimpleText vs Lucene46(random postings format)
// as this gives the best overall coverage. when we have more
// codecs we should probably pick 2 from Codec.availableCodecs()
Modified: lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestFieldInfos.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestFieldInfos.java?rev=1527154&r1=1527153&r2=1527154&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestFieldInfos.java (original)
+++ lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestFieldInfos.java Sat Sep 28 06:19:00 2013
@@ -17,19 +17,16 @@ package org.apache.lucene.index;
* limitations under the License.
*/
-import org.apache.lucene.util.LuceneTestCase;
-import org.apache.lucene.util._TestUtil;
+import java.io.IOException;
+
import org.apache.lucene.codecs.Codec;
import org.apache.lucene.codecs.FieldInfosReader;
import org.apache.lucene.codecs.FieldInfosWriter;
import org.apache.lucene.document.Document;
-import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexOutput;
-
-import java.io.IOException;
-import java.util.Arrays;
+import org.apache.lucene.util.LuceneTestCase;
//import org.cnlp.utils.properties.ResourceBundleHelper;
@@ -60,14 +57,14 @@ public class TestFieldInfos extends Luce
//Use a RAMOutputStream
FieldInfosWriter writer = Codec.getDefault().fieldInfosFormat().getFieldInfosWriter();
- writer.write(dir, filename, fieldInfos, IOContext.DEFAULT);
+ writer.write(dir, filename, "", fieldInfos, IOContext.DEFAULT);
output.close();
return fieldInfos;
}
public FieldInfos readFieldInfos(Directory dir, String filename) throws IOException {
FieldInfosReader reader = Codec.getDefault().fieldInfosFormat().getFieldInfosReader();
- return reader.read(dir, filename, IOContext.DEFAULT);
+ return reader.read(dir, filename, "", IOContext.DEFAULT);
}
public void test() throws IOException {
Modified: lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestNumericDocValuesUpdates.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestNumericDocValuesUpdates.java?rev=1527154&r1=1527153&r2=1527154&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestNumericDocValuesUpdates.java (original)
+++ lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestNumericDocValuesUpdates.java Sat Sep 28 06:19:00 2013
@@ -1,7 +1,9 @@
package org.apache.lucene.index;
import java.io.IOException;
+import java.util.HashSet;
import java.util.Random;
+import java.util.Set;
import java.util.concurrent.CountDownLatch;
import org.apache.lucene.analysis.MockAnalyzer;
@@ -11,8 +13,9 @@ import org.apache.lucene.codecs.assertin
import org.apache.lucene.codecs.lucene40.Lucene40RWCodec;
import org.apache.lucene.codecs.lucene41.Lucene41RWCodec;
import org.apache.lucene.codecs.lucene42.Lucene42RWCodec;
-import org.apache.lucene.codecs.lucene45.Lucene45Codec;
import org.apache.lucene.codecs.lucene45.Lucene45DocValuesFormat;
+import org.apache.lucene.codecs.lucene45.Lucene45RWCodec;
+import org.apache.lucene.codecs.lucene46.Lucene46Codec;
import org.apache.lucene.document.BinaryDocValuesField;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field.Store;
@@ -25,9 +28,12 @@ import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.LuceneTestCase;
+import org.apache.lucene.util._TestUtil;
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
import org.junit.Test;
+import com.carrotsearch.randomizedtesting.generators.RandomPicks;
+
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
@@ -45,7 +51,7 @@ import org.junit.Test;
* limitations under the License.
*/
-@SuppressCodecs({"Lucene40","Lucene41","Lucene42"})
+@SuppressCodecs({"Lucene40","Lucene41","Lucene42","Lucene45"})
public class TestNumericDocValuesUpdates extends LuceneTestCase {
private Document doc(int id) {
@@ -154,7 +160,7 @@ public class TestNumericDocValuesUpdates
writer.commit();
reader1 = DirectoryReader.open(dir);
}
-
+
// update doc
writer.updateNumericDocValue(new Term("id", "doc-0"), "val", 10L); // update doc-0's value to 10
if (!isNRT) {
@@ -165,7 +171,7 @@ public class TestNumericDocValuesUpdates
final DirectoryReader reader2 = DirectoryReader.openIfChanged(reader1);
assertNotNull(reader2);
assertTrue(reader1 != reader2);
-
+
assertEquals(1, reader1.leaves().get(0).reader().getNumericDocValues("val").get(0));
assertEquals(10, reader2.leaves().get(0).reader().getNumericDocValues("val").get(0));
@@ -517,7 +523,7 @@ public class TestNumericDocValuesUpdates
public void testDifferentDVFormatPerField() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
- conf.setCodec(new Lucene45Codec() {
+ conf.setCodec(new Lucene46Codec() {
@Override
public DocValuesFormat getDocValuesFormatForField(String field) {
return new Lucene45DocValuesFormat();
@@ -792,14 +798,18 @@ public class TestNumericDocValuesUpdates
// update document in the second segment
writer.updateNumericDocValue(new Term("id", "doc1"), "ndv", 5L);
- try {
- writer.close();
- fail("should not have succeeded updating a segment with no numeric DocValues field");
- } catch (UnsupportedOperationException e) {
- // expected
- writer.rollback();
+ writer.close();
+
+ DirectoryReader reader = DirectoryReader.open(dir);
+ for (AtomicReaderContext context : reader.leaves()) {
+ AtomicReader r = context.reader();
+ NumericDocValues ndv = r.getNumericDocValues("ndv");
+ for (int i = 0; i < r.maxDoc(); i++) {
+ assertEquals(5L, ndv.get(i));
+ }
}
-
+ reader.close();
+
dir.close();
}
@@ -828,15 +838,19 @@ public class TestNumericDocValuesUpdates
writer.addDocument(doc);
writer.commit();
- // update documentin the second segment
+ // update document in the second segment
writer.updateNumericDocValue(new Term("id", "doc1"), "ndv", 5L);
- try {
- writer.close();
- fail("should not have succeeded updating a segment with no numeric DocValues field");
- } catch (UnsupportedOperationException e) {
- // expected
- writer.rollback();
+ writer.close();
+
+ DirectoryReader reader = DirectoryReader.open(dir);
+ for (AtomicReaderContext context : reader.leaves()) {
+ AtomicReader r = context.reader();
+ NumericDocValues ndv = r.getNumericDocValues("ndv");
+ for (int i = 0; i < r.maxDoc(); i++) {
+ assertEquals(5L, ndv.get(i));
+ }
}
+ reader.close();
dir.close();
}
@@ -867,7 +881,7 @@ public class TestNumericDocValuesUpdates
@Test
public void testUpdateOldSegments() throws Exception {
- Codec[] oldCodecs = new Codec[] { new Lucene40RWCodec(), new Lucene41RWCodec(), new Lucene42RWCodec() };
+ Codec[] oldCodecs = new Codec[] { new Lucene40RWCodec(), new Lucene41RWCodec(), new Lucene42RWCodec(), new Lucene45RWCodec() };
Directory dir = newDirectory();
// create a segment with an old Codec
@@ -1038,7 +1052,7 @@ public class TestNumericDocValuesUpdates
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
conf.setMergePolicy(NoMergePolicy.COMPOUND_FILES); // disable merges to simplify test assertions.
- conf.setCodec(new Lucene45Codec() {
+ conf.setCodec(new Lucene46Codec() {
@Override
public DocValuesFormat getDocValuesFormatForField(String field) {
return new Lucene45DocValuesFormat();
@@ -1053,7 +1067,7 @@ public class TestNumericDocValuesUpdates
writer.close();
// change format
- conf.setCodec(new Lucene45Codec() {
+ conf.setCodec(new Lucene46Codec() {
@Override
public DocValuesFormat getDocValuesFormatForField(String field) {
return new AssertingDocValuesFormat();
@@ -1080,4 +1094,63 @@ public class TestNumericDocValuesUpdates
dir.close();
}
+ @Test
+ public void testAddIndexes() throws Exception {
+ Directory dir1 = newDirectory();
+ IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
+ IndexWriter writer = new IndexWriter(dir1, conf);
+
+ final int numDocs = atLeast(50);
+ final int numTerms = _TestUtil.nextInt(random(), 1, numDocs / 5);
+ Set<String> randomTerms = new HashSet<String>();
+ while (randomTerms.size() < numTerms) {
+ randomTerms.add(_TestUtil.randomSimpleString(random()));
+ }
+
+ // create first index
+ for (int i = 0; i < numDocs; i++) {
+ Document doc = new Document();
+ doc.add(new StringField("id", RandomPicks.randomFrom(random(), randomTerms), Store.NO));
+ doc.add(new NumericDocValuesField("ndv", 4L));
+ doc.add(new NumericDocValuesField("control", 8L));
+ writer.addDocument(doc);
+ }
+
+ if (random().nextBoolean()) {
+ writer.commit();
+ }
+
+ // update some docs to a random value
+ long value = random().nextInt();
+ Term term = new Term("id", RandomPicks.randomFrom(random(), randomTerms));
+ writer.updateNumericDocValue(term, "ndv", value);
+ writer.updateNumericDocValue(term, "control", value * 2);
+ writer.close();
+
+ Directory dir2 = newDirectory();
+ conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
+ writer = new IndexWriter(dir2, conf);
+ if (random().nextBoolean()) {
+ writer.addIndexes(dir1);
+ } else {
+ DirectoryReader reader = DirectoryReader.open(dir1);
+ writer.addIndexes(reader);
+ reader.close();
+ }
+ writer.close();
+
+ DirectoryReader reader = DirectoryReader.open(dir2);
+ for (AtomicReaderContext context : reader.leaves()) {
+ AtomicReader r = context.reader();
+ NumericDocValues ndv = r.getNumericDocValues("ndv");
+ NumericDocValues control = r.getNumericDocValues("control");
+ for (int i = 0; i < r.maxDoc(); i++) {
+ assertEquals(ndv.get(i)*2, control.get(i));
+ }
+ }
+ reader.close();
+
+ IOUtils.close(dir1, dir2);
+ }
+
}
Modified: lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestTermVectorsReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestTermVectorsReader.java?rev=1527154&r1=1527153&r2=1527154&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestTermVectorsReader.java (original)
+++ lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestTermVectorsReader.java Sat Sep 28 06:19:00 2013
@@ -127,7 +127,7 @@ public class TestTermVectorsReader exten
seg = writer.newestSegment();
writer.close();
- fieldInfos = _TestUtil.getFieldInfos(seg.info);
+ fieldInfos = SegmentReader.readFieldInfos(seg);
}
@Override
Modified: lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/util/TestNamedSPILoader.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/util/TestNamedSPILoader.java?rev=1527154&r1=1527153&r2=1527154&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/util/TestNamedSPILoader.java (original)
+++ lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/util/TestNamedSPILoader.java Sat Sep 28 06:19:00 2013
@@ -24,21 +24,22 @@ import org.apache.lucene.codecs.Codec;
// TODO: maybe we should test this with mocks, but its easy
// enough to test the basics via Codec
public class TestNamedSPILoader extends LuceneTestCase {
+
public void testLookup() {
- Codec codec = Codec.forName("Lucene45");
- assertEquals("Lucene45", codec.getName());
+ Codec codec = Codec.forName("Lucene46");
+ assertEquals("Lucene46", codec.getName());
}
// we want an exception if its not found.
public void testBogusLookup() {
try {
- Codec codec = Codec.forName("dskfdskfsdfksdfdsf");
+ Codec.forName("dskfdskfsdfksdfdsf");
fail();
} catch (IllegalArgumentException expected) {}
}
public void testAvailableServices() {
Set<String> codecs = Codec.availableCodecs();
- assertTrue(codecs.contains("Lucene45"));
+ assertTrue(codecs.contains("Lucene46"));
}
}
Added: lucene/dev/trunk/lucene/facet/src/java/org/apache/lucene/facet/codecs/facet46/Facet46Codec.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/facet/src/java/org/apache/lucene/facet/codecs/facet46/Facet46Codec.java?rev=1527154&view=auto
==============================================================================
--- lucene/dev/trunk/lucene/facet/src/java/org/apache/lucene/facet/codecs/facet46/Facet46Codec.java (added)
+++ lucene/dev/trunk/lucene/facet/src/java/org/apache/lucene/facet/codecs/facet46/Facet46Codec.java Sat Sep 28 06:19:00 2013
@@ -0,0 +1,79 @@
+package org.apache.lucene.facet.codecs.facet46;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.util.HashSet;
+import java.util.Set;
+
+import org.apache.lucene.codecs.DocValuesFormat;
+import org.apache.lucene.codecs.lucene46.Lucene46Codec;
+import org.apache.lucene.facet.codecs.facet42.Facet42DocValuesFormat;
+import org.apache.lucene.facet.params.CategoryListParams;
+import org.apache.lucene.facet.params.FacetIndexingParams;
+
+/**
+ * Same as {@link Lucene46Codec} except it uses {@link Facet42DocValuesFormat}
+ * for facet fields (faster-but-more-RAM-consuming doc values).
+ *
+ * <p>
+ * <b>NOTE</b>: this codec does not support facet partitions (see
+ * {@link FacetIndexingParams#getPartitionSize()}).
+ *
+ * <p>
+ * <b>NOTE</b>: this format cannot handle more than 2 GB
+ * of facet data in a single segment. If your usage may hit
+ * this limit, you can either use Lucene's default
+ * DocValuesFormat, limit the maximum segment size in your
+ * MergePolicy, or send us a patch fixing the limitation.
+ *
+ * @lucene.experimental
+ */
+public class Facet46Codec extends Lucene46Codec {
+
+ private final Set<String> facetFields;
+ private final DocValuesFormat facetsDVFormat = DocValuesFormat.forName("Facet42");
+
+ /** Default constructor, uses {@link FacetIndexingParams#DEFAULT}. */
+ public Facet46Codec() {
+ this(FacetIndexingParams.DEFAULT);
+ }
+
+ /**
+ * Initializes with the given {@link FacetIndexingParams}. Returns the proper
+ * {@link DocValuesFormat} for the fields that are returned by
+ * {@link FacetIndexingParams#getAllCategoryListParams()}.
+ */
+ public Facet46Codec(FacetIndexingParams fip) {
+ if (fip.getPartitionSize() != Integer.MAX_VALUE) {
+ throw new IllegalArgumentException("this Codec does not support partitions");
+ }
+ this.facetFields = new HashSet<String>();
+ for (CategoryListParams clp : fip.getAllCategoryListParams()) {
+ facetFields.add(clp.field);
+ }
+ }
+
+ @Override
+ public DocValuesFormat getDocValuesFormatForField(String field) {
+ if (facetFields.contains(field)) {
+ return facetsDVFormat;
+ } else {
+ return super.getDocValuesFormatForField(field);
+ }
+ }
+}
Added: lucene/dev/trunk/lucene/facet/src/java/org/apache/lucene/facet/codecs/facet46/package.html
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/facet/src/java/org/apache/lucene/facet/codecs/facet46/package.html?rev=1527154&view=auto
==============================================================================
--- lucene/dev/trunk/lucene/facet/src/java/org/apache/lucene/facet/codecs/facet46/package.html (added)
+++ lucene/dev/trunk/lucene/facet/src/java/org/apache/lucene/facet/codecs/facet46/package.html Sat Sep 28 06:19:00 2013
@@ -0,0 +1,22 @@
+<!doctype html public "-//w3c//dtd html 4.0 transitional//en">
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<html>
+<body>
+Codec + DocValuesFormat that are optimized for facets.
+</body>
+</html>
Modified: lucene/dev/trunk/lucene/facet/src/test/org/apache/lucene/facet/FacetTestCase.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/facet/src/test/org/apache/lucene/facet/FacetTestCase.java?rev=1527154&r1=1527153&r2=1527154&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/facet/src/test/org/apache/lucene/facet/FacetTestCase.java (original)
+++ lucene/dev/trunk/lucene/facet/src/test/org/apache/lucene/facet/FacetTestCase.java Sat Sep 28 06:19:00 2013
@@ -3,7 +3,7 @@ package org.apache.lucene.facet;
import java.util.Random;
import org.apache.lucene.codecs.Codec;
-import org.apache.lucene.facet.codecs.facet45.Facet45Codec;
+import org.apache.lucene.facet.codecs.facet46.Facet46Codec;
import org.apache.lucene.facet.encoding.DGapIntEncoder;
import org.apache.lucene.facet.encoding.DGapVInt8IntEncoder;
import org.apache.lucene.facet.encoding.EightFlagsIntEncoder;
@@ -53,7 +53,7 @@ public abstract class FacetTestCase exte
public static void beforeClassFacetTestCase() throws Exception {
if (random().nextDouble() < 0.3) {
savedDefault = Codec.getDefault(); // save to restore later
- Codec.setDefault(new Facet45Codec());
+ Codec.setDefault(new Facet46Codec());
}
}
Modified: lucene/dev/trunk/lucene/facet/src/test/org/apache/lucene/facet/search/TestDemoFacets.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/facet/src/test/org/apache/lucene/facet/search/TestDemoFacets.java?rev=1527154&r1=1527153&r2=1527154&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/facet/src/test/org/apache/lucene/facet/search/TestDemoFacets.java (original)
+++ lucene/dev/trunk/lucene/facet/src/test/org/apache/lucene/facet/search/TestDemoFacets.java Sat Sep 28 06:19:00 2013
@@ -31,7 +31,7 @@ import org.apache.lucene.document.Docume
import org.apache.lucene.document.Field;
import org.apache.lucene.facet.FacetTestCase;
import org.apache.lucene.facet.FacetTestUtils;
-import org.apache.lucene.facet.codecs.facet45.Facet45Codec;
+import org.apache.lucene.facet.codecs.facet46.Facet46Codec;
import org.apache.lucene.facet.index.FacetFields;
import org.apache.lucene.facet.params.CategoryListParams;
import org.apache.lucene.facet.params.FacetIndexingParams;
@@ -260,7 +260,7 @@ public class TestDemoFacets extends Face
Directory dir = newDirectory();
Directory taxoDir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
- iwc.setCodec(new Facet45Codec());
+ iwc.setCodec(new Facet46Codec());
RandomIndexWriter writer = new RandomIndexWriter(random(), dir, iwc);
DirectoryTaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir, IndexWriterConfig.OpenMode.CREATE);
Modified: lucene/dev/trunk/lucene/misc/src/java/org/apache/lucene/index/IndexSplitter.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/misc/src/java/org/apache/lucene/index/IndexSplitter.java?rev=1527154&r1=1527153&r2=1527154&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/misc/src/java/org/apache/lucene/index/IndexSplitter.java (original)
+++ lucene/dev/trunk/lucene/misc/src/java/org/apache/lucene/index/IndexSplitter.java Sat Sep 28 06:19:00 2013
@@ -141,7 +141,7 @@ public class IndexSplitter {
SegmentInfo newInfo = new SegmentInfo(destFSDir, info.getVersion(), info.name, info.getDocCount(),
info.getUseCompoundFile(),
info.getCodec(), info.getDiagnostics(), info.attributes());
- destInfos.add(new SegmentInfoPerCommit(newInfo, infoPerCommit.getDelCount(), infoPerCommit.getDelGen(), infoPerCommit.getDocValuesGen()));
+ destInfos.add(new SegmentInfoPerCommit(newInfo, infoPerCommit.getDelCount(), infoPerCommit.getDelGen(), infoPerCommit.getFieldInfosGen()));
// now copy files over
Collection<String> files = infoPerCommit.files();
for (final String srcName : files) {
Modified: lucene/dev/trunk/lucene/suggest/src/java/org/apache/lucene/search/suggest/analyzing/AnalyzingInfixSuggester.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/suggest/src/java/org/apache/lucene/search/suggest/analyzing/AnalyzingInfixSuggester.java?rev=1527154&r1=1527153&r2=1527154&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/suggest/src/java/org/apache/lucene/search/suggest/analyzing/AnalyzingInfixSuggester.java (original)
+++ lucene/dev/trunk/lucene/suggest/src/java/org/apache/lucene/search/suggest/analyzing/AnalyzingInfixSuggester.java Sat Sep 28 06:19:00 2013
@@ -34,7 +34,7 @@ import org.apache.lucene.analysis.TokenS
import org.apache.lucene.analysis.ngram.EdgeNGramTokenFilter;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
-import org.apache.lucene.codecs.lucene45.Lucene45Codec;
+import org.apache.lucene.codecs.lucene46.Lucene46Codec;
import org.apache.lucene.document.BinaryDocValuesField;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
@@ -164,7 +164,7 @@ public class AnalyzingInfixSuggester ext
* codec to use. */
protected IndexWriterConfig getIndexWriterConfig(Version matchVersion, Analyzer indexAnalyzer) {
IndexWriterConfig iwc = new IndexWriterConfig(matchVersion, indexAnalyzer);
- iwc.setCodec(new Lucene45Codec());
+ iwc.setCodec(new Lucene46Codec());
iwc.setOpenMode(IndexWriterConfig.OpenMode.CREATE);
return iwc;
}
Modified: lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/codecs/asserting/AssertingCodec.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/codecs/asserting/AssertingCodec.java?rev=1527154&r1=1527153&r2=1527154&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/codecs/asserting/AssertingCodec.java (original)
+++ lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/codecs/asserting/AssertingCodec.java Sat Sep 28 06:19:00 2013
@@ -23,10 +23,10 @@ import org.apache.lucene.codecs.NormsFor
import org.apache.lucene.codecs.PostingsFormat;
import org.apache.lucene.codecs.StoredFieldsFormat;
import org.apache.lucene.codecs.TermVectorsFormat;
-import org.apache.lucene.codecs.lucene45.Lucene45Codec;
+import org.apache.lucene.codecs.lucene46.Lucene46Codec;
/**
- * Acts like {@link Lucene45Codec} but with additional asserts.
+ * Acts like {@link Lucene46Codec} but with additional asserts.
*/
public final class AssertingCodec extends FilterCodec {
@@ -37,7 +37,7 @@ public final class AssertingCodec extend
private final NormsFormat norms = new AssertingNormsFormat();
public AssertingCodec() {
- super("Asserting", new Lucene45Codec());
+ super("Asserting", new Lucene46Codec());
}
@Override
Modified: lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/codecs/cheapbastard/CheapBastardCodec.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/codecs/cheapbastard/CheapBastardCodec.java?rev=1527154&r1=1527153&r2=1527154&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/codecs/cheapbastard/CheapBastardCodec.java (original)
+++ lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/codecs/cheapbastard/CheapBastardCodec.java Sat Sep 28 06:19:00 2013
@@ -28,7 +28,7 @@ import org.apache.lucene.codecs.diskdv.D
import org.apache.lucene.codecs.lucene40.Lucene40StoredFieldsFormat;
import org.apache.lucene.codecs.lucene40.Lucene40TermVectorsFormat;
import org.apache.lucene.codecs.lucene41.Lucene41PostingsFormat;
-import org.apache.lucene.codecs.lucene45.Lucene45Codec;
+import org.apache.lucene.codecs.lucene46.Lucene46Codec;
/** Codec that tries to use as little ram as possible because he spent all his money on beer */
// TODO: better name :)
@@ -45,9 +45,10 @@ public class CheapBastardCodec extends F
private final NormsFormat norms = new DiskNormsFormat();
public CheapBastardCodec() {
- super("CheapBastard", new Lucene45Codec());
+ super("CheapBastard", new Lucene46Codec());
}
+ @Override
public PostingsFormat postingsFormat() {
return postings;
}
Modified: lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/codecs/compressing/CompressingCodec.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/codecs/compressing/CompressingCodec.java?rev=1527154&r1=1527153&r2=1527154&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/codecs/compressing/CompressingCodec.java (original)
+++ lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/codecs/compressing/CompressingCodec.java Sat Sep 28 06:19:00 2013
@@ -23,13 +23,13 @@ import org.apache.lucene.codecs.FilterCo
import org.apache.lucene.codecs.StoredFieldsFormat;
import org.apache.lucene.codecs.TermVectorsFormat;
import org.apache.lucene.codecs.compressing.dummy.DummyCompressingCodec;
-import org.apache.lucene.codecs.lucene45.Lucene45Codec;
+import org.apache.lucene.codecs.lucene46.Lucene46Codec;
import com.carrotsearch.randomizedtesting.generators.RandomInts;
/**
* A codec that uses {@link CompressingStoredFieldsFormat} for its stored
- * fields and delegates to {@link Lucene45Codec} for everything else.
+ * fields and delegates to {@link Lucene46Codec} for everything else.
*/
public abstract class CompressingCodec extends FilterCodec {
@@ -73,7 +73,7 @@ public abstract class CompressingCodec e
* Creates a compressing codec with a given segment suffix
*/
public CompressingCodec(String name, String segmentSuffix, CompressionMode compressionMode, int chunkSize) {
- super(name, new Lucene45Codec());
+ super(name, new Lucene46Codec());
this.storedFieldsFormat = new CompressingStoredFieldsFormat(name, segmentSuffix, compressionMode, chunkSize);
this.termVectorsFormat = new CompressingTermVectorsFormat(name, segmentSuffix, compressionMode, chunkSize);
}
Modified: lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/codecs/lucene40/Lucene40FieldInfosWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/codecs/lucene40/Lucene40FieldInfosWriter.java?rev=1527154&r1=1527153&r2=1527154&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/codecs/lucene40/Lucene40FieldInfosWriter.java (original)
+++ lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/codecs/lucene40/Lucene40FieldInfosWriter.java Sat Sep 28 06:19:00 2013
@@ -37,6 +37,7 @@ import org.apache.lucene.util.IOUtils;
* @see Lucene40FieldInfosFormat
* @lucene.experimental
*/
+@Deprecated
public class Lucene40FieldInfosWriter extends FieldInfosWriter {
/** Sole constructor. */
@@ -44,7 +45,7 @@ public class Lucene40FieldInfosWriter ex
}
@Override
- public void write(Directory directory, String segmentName, FieldInfos infos, IOContext context) throws IOException {
+ public void write(Directory directory, String segmentName, String segmentSuffix, FieldInfos infos, IOContext context) throws IOException {
final String fileName = IndexFileNames.segmentFileName(segmentName, "", Lucene40FieldInfosFormat.FIELD_INFOS_EXTENSION);
IndexOutput output = directory.createOutput(fileName, context);
boolean success = false;
Copied: lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/codecs/lucene42/Lucene42FieldInfosWriter.java (from r1527153, lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/codecs/lucene42/Lucene42FieldInfosWriter.java)
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/codecs/lucene42/Lucene42FieldInfosWriter.java?p2=lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/codecs/lucene42/Lucene42FieldInfosWriter.java&p1=lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/codecs/lucene42/Lucene42FieldInfosWriter.java&r1=1527153&r2=1527154&rev=1527154&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/codecs/lucene42/Lucene42FieldInfosWriter.java (original)
+++ lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/codecs/lucene42/Lucene42FieldInfosWriter.java Sat Sep 28 06:19:00 2013
@@ -37,14 +37,15 @@ import org.apache.lucene.util.IOUtils;
* @see Lucene42FieldInfosFormat
* @lucene.experimental
*/
-final class Lucene42FieldInfosWriter extends FieldInfosWriter {
+@Deprecated
+public final class Lucene42FieldInfosWriter extends FieldInfosWriter {
/** Sole constructor. */
public Lucene42FieldInfosWriter() {
}
@Override
- public void write(Directory directory, String segmentName, FieldInfos infos, IOContext context) throws IOException {
+ public void write(Directory directory, String segmentName, String segmentSuffix, FieldInfos infos, IOContext context) throws IOException {
final String fileName = IndexFileNames.segmentFileName(segmentName, "", Lucene42FieldInfosFormat.EXTENSION);
IndexOutput output = directory.createOutput(fileName, context);
boolean success = false;
Modified: lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/codecs/lucene42/Lucene42RWCodec.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/codecs/lucene42/Lucene42RWCodec.java?rev=1527154&r1=1527153&r2=1527154&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/codecs/lucene42/Lucene42RWCodec.java (original)
+++ lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/codecs/lucene42/Lucene42RWCodec.java Sat Sep 28 06:19:00 2013
@@ -17,12 +17,18 @@ package org.apache.lucene.codecs.lucene4
* limitations under the License.
*/
+import java.io.IOException;
+
import org.apache.lucene.codecs.DocValuesFormat;
+import org.apache.lucene.codecs.FieldInfosFormat;
+import org.apache.lucene.codecs.FieldInfosWriter;
import org.apache.lucene.codecs.NormsFormat;
+import org.apache.lucene.util.LuceneTestCase;
/**
* Read-write version of {@link Lucene42Codec} for testing.
*/
+@SuppressWarnings("deprecation")
public class Lucene42RWCodec extends Lucene42Codec {
private static final DocValuesFormat dv = new Lucene42RWDocValuesFormat();
private static final NormsFormat norms = new Lucene42NormsFormat();
@@ -36,4 +42,19 @@ public class Lucene42RWCodec extends Luc
public NormsFormat normsFormat() {
return norms;
}
+
+ @Override
+ public FieldInfosFormat fieldInfosFormat() {
+ return new Lucene42FieldInfosFormat() {
+ @Override
+ public FieldInfosWriter getFieldInfosWriter() throws IOException {
+ if (!LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE) {
+ return super.getFieldInfosWriter();
+ } else {
+ return new Lucene42FieldInfosWriter();
+ }
+ }
+ };
+ }
+
}
Added: lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/codecs/lucene45/Lucene45RWCodec.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/codecs/lucene45/Lucene45RWCodec.java?rev=1527154&view=auto
==============================================================================
--- lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/codecs/lucene45/Lucene45RWCodec.java (added)
+++ lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/codecs/lucene45/Lucene45RWCodec.java Sat Sep 28 06:19:00 2013
@@ -0,0 +1,48 @@
+package org.apache.lucene.codecs.lucene45;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.io.IOException;
+
+import org.apache.lucene.codecs.FieldInfosFormat;
+import org.apache.lucene.codecs.FieldInfosWriter;
+import org.apache.lucene.codecs.lucene42.Lucene42FieldInfosFormat;
+import org.apache.lucene.codecs.lucene42.Lucene42FieldInfosWriter;
+import org.apache.lucene.util.LuceneTestCase;
+
+/**
+ * Read-write version of {@link Lucene45Codec} for testing.
+ */
+@SuppressWarnings("deprecation")
+public class Lucene45RWCodec extends Lucene45Codec {
+
+ @Override
+ public FieldInfosFormat fieldInfosFormat() {
+ return new Lucene42FieldInfosFormat() {
+ @Override
+ public FieldInfosWriter getFieldInfosWriter() throws IOException {
+ if (!LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE) {
+ return super.getFieldInfosWriter();
+ } else {
+ return new Lucene42FieldInfosWriter();
+ }
+ }
+ };
+ }
+
+}
Added: lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/codecs/lucene45/package.html
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/codecs/lucene45/package.html?rev=1527154&view=auto
==============================================================================
--- lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/codecs/lucene45/package.html (added)
+++ lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/codecs/lucene45/package.html Sat Sep 28 06:19:00 2013
@@ -0,0 +1,25 @@
+<!doctype html public "-//w3c//dtd html 4.0 transitional//en">
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<html>
+<head>
+ <meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1">
+</head>
+<body>
+Support for testing {@link org.apache.lucene.codecs.lucene45.Lucene45Codec}.
+</body>
+</html>
\ No newline at end of file
Modified: lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/index/BasePostingsFormatTestCase.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/index/BasePostingsFormatTestCase.java?rev=1527154&r1=1527153&r2=1527154&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/index/BasePostingsFormatTestCase.java (original)
+++ lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/index/BasePostingsFormatTestCase.java Sat Sep 28 06:19:00 2013
@@ -40,7 +40,7 @@ import org.apache.lucene.codecs.Codec;
import org.apache.lucene.codecs.FieldsConsumer;
import org.apache.lucene.codecs.FieldsProducer;
import org.apache.lucene.codecs.PostingsFormat;
-import org.apache.lucene.codecs.lucene45.Lucene45Codec;
+import org.apache.lucene.codecs.lucene46.Lucene46Codec;
import org.apache.lucene.codecs.perfield.PerFieldPostingsFormat;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
@@ -1386,7 +1386,7 @@ public abstract class BasePostingsFormat
// TODO: would be better to use / delegate to the current
// Codec returned by getCodec()
- iwc.setCodec(new Lucene45Codec() {
+ iwc.setCodec(new Lucene46Codec() {
@Override
public PostingsFormat getPostingsFormatForField(String field) {
Modified: lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/index/BaseStoredFieldsFormatTestCase.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/index/BaseStoredFieldsFormatTestCase.java?rev=1527154&r1=1527153&r2=1527154&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/index/BaseStoredFieldsFormatTestCase.java (original)
+++ lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/index/BaseStoredFieldsFormatTestCase.java Sat Sep 28 06:19:00 2013
@@ -31,21 +31,20 @@ import java.util.concurrent.atomic.Atomi
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.codecs.Codec;
import org.apache.lucene.codecs.StoredFieldsFormat;
-import org.apache.lucene.codecs.compressing.CompressingCodec;
-import org.apache.lucene.codecs.lucene45.Lucene45Codec;
+import org.apache.lucene.codecs.lucene46.Lucene46Codec;
import org.apache.lucene.codecs.simpletext.SimpleTextCodec;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.DoubleField;
import org.apache.lucene.document.Field;
+import org.apache.lucene.document.Field.Store;
import org.apache.lucene.document.FieldType;
+import org.apache.lucene.document.FieldType.NumericType;
import org.apache.lucene.document.FloatField;
import org.apache.lucene.document.IntField;
import org.apache.lucene.document.LongField;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.document.StringField;
import org.apache.lucene.document.TextField;
-import org.apache.lucene.document.Field.Store;
-import org.apache.lucene.document.FieldType.NumericType;
import org.apache.lucene.search.FieldCache;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.NumericRangeQuery;
@@ -59,7 +58,6 @@ import org.apache.lucene.store.MockDirec
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util._TestUtil;
-import org.apache.lucene.util.LuceneTestCase.Nightly;
import com.carrotsearch.randomizedtesting.generators.RandomInts;
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
@@ -78,6 +76,7 @@ public abstract class BaseStoredFieldsFo
*/
protected abstract Codec getCodec();
+ @Override
public void setUp() throws Exception {
super.setUp();
// set the default codec, so adding test cases to this isn't fragile
@@ -85,6 +84,7 @@ public abstract class BaseStoredFieldsFo
Codec.setDefault(getCodec());
}
+ @Override
public void tearDown() throws Exception {
Codec.setDefault(savedCodec); // restore
super.tearDown();
@@ -502,7 +502,7 @@ public abstract class BaseStoredFieldsFo
// get another codec, other than the default: so we are merging segments across different codecs
final Codec otherCodec;
if ("SimpleText".equals(Codec.getDefault().getName())) {
- otherCodec = new Lucene45Codec();
+ otherCodec = new Lucene46Codec();
} else {
otherCodec = new SimpleTextCodec();
}
Modified: lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/index/RandomCodec.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/index/RandomCodec.java?rev=1527154&r1=1527153&r2=1527154&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/index/RandomCodec.java (original)
+++ lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/index/RandomCodec.java Sat Sep 28 06:19:00 2013
@@ -27,19 +27,23 @@ import java.util.Map;
import java.util.Random;
import java.util.Set;
-import org.apache.lucene.codecs.PostingsFormat;
import org.apache.lucene.codecs.DocValuesFormat;
+import org.apache.lucene.codecs.PostingsFormat;
import org.apache.lucene.codecs.asserting.AssertingDocValuesFormat;
import org.apache.lucene.codecs.asserting.AssertingPostingsFormat;
+import org.apache.lucene.codecs.bloom.TestBloomFilteredLucene41Postings;
+import org.apache.lucene.codecs.diskdv.DiskDocValuesFormat;
import org.apache.lucene.codecs.lucene41.Lucene41PostingsFormat;
import org.apache.lucene.codecs.lucene41ords.Lucene41WithOrds;
import org.apache.lucene.codecs.lucene41vargap.Lucene41VarGapDocFreqInterval;
import org.apache.lucene.codecs.lucene41vargap.Lucene41VarGapFixedInterval;
-import org.apache.lucene.codecs.lucene45.Lucene45Codec;
import org.apache.lucene.codecs.lucene45.Lucene45DocValuesFormat;
-import org.apache.lucene.codecs.bloom.TestBloomFilteredLucene41Postings;
-import org.apache.lucene.codecs.diskdv.DiskDocValuesFormat;
+import org.apache.lucene.codecs.lucene46.Lucene46Codec;
import org.apache.lucene.codecs.memory.DirectPostingsFormat;
+import org.apache.lucene.codecs.memory.FSTOrdPostingsFormat;
+import org.apache.lucene.codecs.memory.FSTOrdPulsing41PostingsFormat;
+import org.apache.lucene.codecs.memory.FSTPostingsFormat;
+import org.apache.lucene.codecs.memory.FSTPulsing41PostingsFormat;
import org.apache.lucene.codecs.memory.MemoryDocValuesFormat;
import org.apache.lucene.codecs.memory.MemoryPostingsFormat;
import org.apache.lucene.codecs.mockintblock.MockFixedIntBlockPostingsFormat;
@@ -48,12 +52,8 @@ import org.apache.lucene.codecs.mockrand
import org.apache.lucene.codecs.mocksep.MockSepPostingsFormat;
import org.apache.lucene.codecs.nestedpulsing.NestedPulsingPostingsFormat;
import org.apache.lucene.codecs.pulsing.Pulsing41PostingsFormat;
-import org.apache.lucene.codecs.simpletext.SimpleTextPostingsFormat;
import org.apache.lucene.codecs.simpletext.SimpleTextDocValuesFormat;
-import org.apache.lucene.codecs.memory.FSTOrdPostingsFormat;
-import org.apache.lucene.codecs.memory.FSTOrdPulsing41PostingsFormat;
-import org.apache.lucene.codecs.memory.FSTPostingsFormat;
-import org.apache.lucene.codecs.memory.FSTPulsing41PostingsFormat;
+import org.apache.lucene.codecs.simpletext.SimpleTextPostingsFormat;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util._TestUtil;
@@ -66,7 +66,7 @@ import org.apache.lucene.util._TestUtil;
* documents in different orders and the test will still be deterministic
* and reproducable.
*/
-public class RandomCodec extends Lucene45Codec {
+public class RandomCodec extends Lucene46Codec {
/** Shuffled list of postings formats to use for new mappings */
private List<PostingsFormat> formats = new ArrayList<PostingsFormat>();