You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by dw...@apache.org on 2012/04/15 16:42:01 UTC
svn commit: r1326351 [5/22] - in /lucene/dev/trunk: ./ dev-tools/eclipse/
lucene/
lucene/contrib/highlighter/src/test/org/apache/lucene/search/highlight/
lucene/contrib/highlighter/src/test/org/apache/lucene/search/highlight/custom/
lucene/contrib/high...
Modified: lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDocValuesIndexing.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDocValuesIndexing.java?rev=1326351&r1=1326350&r2=1326351&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDocValuesIndexing.java (original)
+++ lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDocValuesIndexing.java Sun Apr 15 14:41:44 2012
@@ -18,16 +18,9 @@ package org.apache.lucene.index;
*/
import java.io.Closeable;
import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.EnumSet;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
+import java.util.*;
import java.util.Map.Entry;
-import java.util.Map;
-import java.util.Set;
+import java.util.*;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.codecs.Codec;
@@ -109,30 +102,30 @@ public class TestDocValuesIndexing exten
}
public void testIndexBytesNoDeletes() throws IOException {
- runTestIndexBytes(writerConfig(random.nextBoolean()), false);
+ runTestIndexBytes(writerConfig(random().nextBoolean()), false);
}
public void testIndexBytesDeletes() throws IOException {
- runTestIndexBytes(writerConfig(random.nextBoolean()), true);
+ runTestIndexBytes(writerConfig(random().nextBoolean()), true);
}
public void testIndexNumericsNoDeletes() throws IOException {
- runTestNumerics(writerConfig(random.nextBoolean()), false);
+ runTestNumerics(writerConfig(random().nextBoolean()), false);
}
public void testIndexNumericsDeletes() throws IOException {
- runTestNumerics(writerConfig(random.nextBoolean()), true);
+ runTestNumerics(writerConfig(random().nextBoolean()), true);
}
public void testAddIndexes() throws IOException {
int valuesPerIndex = 10;
List<Type> values = Arrays.asList(Type.values());
- Collections.shuffle(values, random);
+ Collections.shuffle(values, random());
Type first = values.get(0);
Type second = values.get(1);
// index first index
Directory d_1 = newDirectory();
- IndexWriter w_1 = new IndexWriter(d_1, writerConfig(random.nextBoolean()));
+ IndexWriter w_1 = new IndexWriter(d_1, writerConfig(random().nextBoolean()));
indexValues(w_1, valuesPerIndex, first, values, false, 7);
w_1.commit();
assertEquals(valuesPerIndex, w_1.maxDoc());
@@ -140,17 +133,17 @@ public class TestDocValuesIndexing exten
// index second index
Directory d_2 = newDirectory();
- IndexWriter w_2 = new IndexWriter(d_2, writerConfig(random.nextBoolean()));
+ IndexWriter w_2 = new IndexWriter(d_2, writerConfig(random().nextBoolean()));
indexValues(w_2, valuesPerIndex, second, values, false, 7);
w_2.commit();
assertEquals(valuesPerIndex, w_2.maxDoc());
_TestUtil.checkIndex(d_2);
Directory target = newDirectory();
- IndexWriter w = new IndexWriter(target, writerConfig(random.nextBoolean()));
+ IndexWriter w = new IndexWriter(target, writerConfig(random().nextBoolean()));
DirectoryReader r_1 = DirectoryReader.open(w_1, true);
DirectoryReader r_2 = DirectoryReader.open(w_2, true);
- if (random.nextBoolean()) {
+ if (random().nextBoolean()) {
w.addIndexes(d_1, d_2);
} else {
w.addIndexes(r_1, r_2);
@@ -238,8 +231,8 @@ public class TestDocValuesIndexing exten
private IndexWriterConfig writerConfig(boolean useCompoundFile) {
final IndexWriterConfig cfg = newIndexWriterConfig(TEST_VERSION_CURRENT,
- new MockAnalyzer(random));
- cfg.setMergePolicy(newLogMergePolicy(random));
+ new MockAnalyzer(random()));
+ cfg.setMergePolicy(newLogMergePolicy(random()));
LogMergePolicy policy = new LogDocMergePolicy();
cfg.setMergePolicy(policy);
policy.setUseCompoundFile(useCompoundFile);
@@ -255,7 +248,7 @@ public class TestDocValuesIndexing exten
final List<Type> numVariantList = new ArrayList<Type>(NUMERICS);
// run in random order to test if fill works correctly during merges
- Collections.shuffle(numVariantList, random);
+ Collections.shuffle(numVariantList, random());
for (Type val : numVariantList) {
FixedBitSet deleted = indexValues(w, numValues, val, numVariantList,
withDeletions, 7);
@@ -331,7 +324,7 @@ public class TestDocValuesIndexing exten
IndexWriter w = new IndexWriter(d, cfg);
final List<Type> byteVariantList = new ArrayList<Type>(BYTES);
// run in random order to test if fill works correctly during merges
- Collections.shuffle(byteVariantList, random);
+ Collections.shuffle(byteVariantList, random());
final int numValues = 50 + atLeast(10);
for (Type byteIndexValue : byteVariantList) {
List<Closeable> closeables = new ArrayList<Closeable>();
@@ -414,11 +407,11 @@ public class TestDocValuesIndexing exten
public void testGetArrayNumerics() throws CorruptIndexException, IOException {
Directory d = newDirectory();
- IndexWriterConfig cfg = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
+ IndexWriterConfig cfg = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriter w = new IndexWriter(d, cfg);
final int numValues = 50 + atLeast(10);
final List<Type> numVariantList = new ArrayList<Type>(NUMERICS);
- Collections.shuffle(numVariantList, random);
+ Collections.shuffle(numVariantList, random());
for (Type val : numVariantList) {
indexValues(w, numValues, val, numVariantList,
false, 7);
@@ -502,7 +495,7 @@ public class TestDocValuesIndexing exten
public void testGetArrayBytes() throws CorruptIndexException, IOException {
Directory d = newDirectory();
IndexWriterConfig cfg = newIndexWriterConfig(TEST_VERSION_CURRENT,
- new MockAnalyzer(random));
+ new MockAnalyzer(random()));
IndexWriter w = new IndexWriter(d, cfg);
final int numValues = 50 + atLeast(10);
// only single byte fixed straight supports getArray()
@@ -542,7 +535,7 @@ public class TestDocValuesIndexing exten
@SuppressWarnings("fallthrough")
private Source getSource(DocValues values) throws IOException {
// getSource uses cache internally
- switch(random.nextInt(5)) {
+ switch(random().nextInt(5)) {
case 3:
return values.load();
case 2:
@@ -656,17 +649,17 @@ public class TestDocValuesIndexing exten
w.addDocument(doc);
if (i % 7 == 0) {
- if (withDeletions && random.nextBoolean()) {
- Type val = valueVarList.get(random.nextInt(1 + valueVarList
+ if (withDeletions && random().nextBoolean()) {
+ Type val = valueVarList.get(random().nextInt(1 + valueVarList
.indexOf(valueType)));
- final int randInt = val == valueType ? random.nextInt(1 + i) : random
+ final int randInt = val == valueType ? random().nextInt(1 + i) : random()
.nextInt(numValues);
w.deleteDocuments(new Term("id", val.name() + "_" + randInt));
if (val == valueType) {
deleted.set(randInt);
}
}
- if (random.nextInt(10) == 0) {
+ if (random().nextInt(10) == 0) {
w.commit();
}
}
@@ -674,7 +667,7 @@ public class TestDocValuesIndexing exten
w.commit();
// TODO test multi seg with deletions
- if (withDeletions || random.nextBoolean()) {
+ if (withDeletions || random().nextBoolean()) {
w.forceMerge(1, true);
}
return deleted;
@@ -682,7 +675,7 @@ public class TestDocValuesIndexing exten
public void testMultiValuedDocValuesField() throws Exception {
Directory d = newDirectory();
- RandomIndexWriter w = new RandomIndexWriter(random, d);
+ RandomIndexWriter w = new RandomIndexWriter(random(), d);
Document doc = new Document();
DocValuesField f = new DocValuesField("field", 17, Type.VAR_INTS);
// Index doc values are single-valued so we should not
@@ -709,7 +702,7 @@ public class TestDocValuesIndexing exten
public void testDifferentTypedDocValuesField() throws Exception {
Directory d = newDirectory();
- RandomIndexWriter w = new RandomIndexWriter(random, d);
+ RandomIndexWriter w = new RandomIndexWriter(random(), d);
Document doc = new Document();
// Index doc values are single-valued so we should not
// be able to add same field more than once:
@@ -740,17 +733,17 @@ public class TestDocValuesIndexing exten
boolean fixed = type == Type.BYTES_FIXED_SORTED;
final Directory d = newDirectory();
IndexWriterConfig cfg = newIndexWriterConfig(TEST_VERSION_CURRENT,
- new MockAnalyzer(random));
+ new MockAnalyzer(random()));
IndexWriter w = new IndexWriter(d, cfg);
int numDocs = atLeast(100);
BytesRefHash hash = new BytesRefHash();
Map<String, String> docToString = new HashMap<String, String>();
- int len = 1 + random.nextInt(50);
+ int len = 1 + random().nextInt(50);
for (int i = 0; i < numDocs; i++) {
Document doc = new Document();
doc.add(newField("id", "" + i, TextField.TYPE_STORED));
- String string =fixed ? _TestUtil.randomFixedByteLengthUnicodeString(random,
- len) : _TestUtil.randomRealisticUnicodeString(random, 1, len);
+ String string =fixed ? _TestUtil.randomFixedByteLengthUnicodeString(random(),
+ len) : _TestUtil.randomRealisticUnicodeString(random(), 1, len);
BytesRef br = new BytesRef(string);
doc.add(new DocValuesField("field", br, type));
hash.add(br);
@@ -777,8 +770,8 @@ public class TestDocValuesIndexing exten
Document doc = new Document();
String id = "" + i + numDocs;
doc.add(newField("id", id, TextField.TYPE_STORED));
- String string = fixed ? _TestUtil.randomFixedByteLengthUnicodeString(random,
- len) : _TestUtil.randomRealisticUnicodeString(random, 1, len);
+ String string = fixed ? _TestUtil.randomFixedByteLengthUnicodeString(random(),
+ len) : _TestUtil.randomRealisticUnicodeString(random(), 1, len);
BytesRef br = new BytesRef(string);
hash.add(br);
docToString.put(id, string);
@@ -826,6 +819,7 @@ public class TestDocValuesIndexing exten
}
public void testWithThreads() throws Exception {
+ Random random = random();
final int NUM_DOCS = atLeast(100);
final Directory dir = newDirectory();
final RandomIndexWriter writer = new RandomIndexWriter(random, dir);
@@ -883,12 +877,13 @@ public class TestDocValuesIndexing exten
final DocValues.Source docIDToID = sr.docValues("id").getSource();
- final int NUM_THREADS = _TestUtil.nextInt(random, 1, 10);
+ final int NUM_THREADS = _TestUtil.nextInt(random(), 1, 10);
Thread[] threads = new Thread[NUM_THREADS];
for(int thread=0;thread<NUM_THREADS;thread++) {
threads[thread] = new Thread() {
@Override
public void run() {
+ Random random = random();
final DocValues.Source stringDVSource;
final DocValues.Source stringDVDirectSource;
try {
@@ -934,7 +929,7 @@ public class TestDocValuesIndexing exten
// LUCENE-3870
public void testLengthPrefixAcrossTwoPages() throws Exception {
Directory d = newDirectory();
- IndexWriter w = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
+ IndexWriter w = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
Document doc = new Document();
byte[] bytes = new byte[32764];
BytesRef b = new BytesRef();
@@ -960,4 +955,4 @@ public class TestDocValuesIndexing exten
w.close();
d.close();
}
-}
\ No newline at end of file
+}
Modified: lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDocsAndPositions.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDocsAndPositions.java?rev=1326351&r1=1326350&r2=1326351&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDocsAndPositions.java (original)
+++ lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDocsAndPositions.java Sun Apr 15 14:41:44 2012
@@ -38,7 +38,7 @@ public class TestDocsAndPositions extend
@Override
public void setUp() throws Exception {
super.setUp();
- fieldName = "field" + random.nextInt();
+ fieldName = "field" + random().nextInt();
}
/**
@@ -46,8 +46,8 @@ public class TestDocsAndPositions extend
*/
public void testPositionsSimple() throws IOException {
Directory directory = newDirectory();
- RandomIndexWriter writer = new RandomIndexWriter(random, directory,
- newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
+ RandomIndexWriter writer = new RandomIndexWriter(random(), directory,
+ newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
for (int i = 0; i < 39; i++) {
Document doc = new Document();
FieldType customType = new FieldType(TextField.TYPE_UNSTORED);
@@ -72,7 +72,7 @@ public class TestDocsAndPositions extend
if (atomicReaderContext.reader().maxDoc() == 0) {
continue;
}
- final int advance = docsAndPosEnum.advance(random.nextInt(atomicReaderContext.reader().maxDoc()));
+ final int advance = docsAndPosEnum.advance(random().nextInt(atomicReaderContext.reader().maxDoc()));
do {
String msg = "Advanced to: " + advance + " current doc: "
+ docsAndPosEnum.docID(); // TODO: + " usePayloads: " + usePayload;
@@ -104,11 +104,11 @@ public class TestDocsAndPositions extend
*/
public void testRandomPositions() throws IOException {
Directory dir = newDirectory();
- RandomIndexWriter writer = new RandomIndexWriter(random, dir,
- newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
+ RandomIndexWriter writer = new RandomIndexWriter(random(), dir,
+ newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy()));
int numDocs = atLeast(47);
int max = 1051;
- int term = random.nextInt(max);
+ int term = random().nextInt(max);
Integer[][] positionsInDoc = new Integer[numDocs][];
FieldType customType = new FieldType(TextField.TYPE_UNSTORED);
customType.setOmitNorms(true);
@@ -118,7 +118,7 @@ public class TestDocsAndPositions extend
StringBuilder builder = new StringBuilder();
int num = atLeast(131);
for (int j = 0; j < num; j++) {
- int nextInt = random.nextInt(max);
+ int nextInt = random().nextInt(max);
builder.append(nextInt).append(" ");
if (nextInt == term) {
positions.add(Integer.valueOf(j));
@@ -148,10 +148,10 @@ public class TestDocsAndPositions extend
int initDoc = 0;
int maxDoc = atomicReaderContext.reader().maxDoc();
// initially advance or do next doc
- if (random.nextBoolean()) {
+ if (random().nextBoolean()) {
initDoc = docsAndPosEnum.nextDoc();
} else {
- initDoc = docsAndPosEnum.advance(random.nextInt(maxDoc));
+ initDoc = docsAndPosEnum.advance(random().nextInt(maxDoc));
}
// now run through the scorer and check if all positions are there...
do {
@@ -163,8 +163,8 @@ public class TestDocsAndPositions extend
assertEquals(pos.length, docsAndPosEnum.freq());
// number of positions read should be random - don't read all of them
// allways
- final int howMany = random.nextInt(20) == 0 ? pos.length
- - random.nextInt(pos.length) : pos.length;
+ final int howMany = random().nextInt(20) == 0 ? pos.length
+ - random().nextInt(pos.length) : pos.length;
for (int j = 0; j < howMany; j++) {
assertEquals("iteration: " + i + " initDoc: " + initDoc + " doc: "
+ docID + " base: " + atomicReaderContext.docBase
@@ -172,9 +172,9 @@ public class TestDocsAndPositions extend
+ usePayload*/, pos[j].intValue(), docsAndPosEnum.nextPosition());
}
- if (random.nextInt(10) == 0) { // once is a while advance
+ if (random().nextInt(10) == 0) { // once is a while advance
docsAndPosEnum
- .advance(docID + 1 + random.nextInt((maxDoc - docID)));
+ .advance(docID + 1 + random().nextInt((maxDoc - docID)));
}
} while (docsAndPosEnum.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
@@ -187,11 +187,11 @@ public class TestDocsAndPositions extend
public void testRandomDocs() throws IOException {
Directory dir = newDirectory();
- RandomIndexWriter writer = new RandomIndexWriter(random, dir,
- newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
+ RandomIndexWriter writer = new RandomIndexWriter(random(), dir,
+ newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy()));
int numDocs = atLeast(49);
int max = 15678;
- int term = random.nextInt(max);
+ int term = random().nextInt(max);
int[] freqInDoc = new int[numDocs];
FieldType customType = new FieldType(TextField.TYPE_UNSTORED);
customType.setOmitNorms(true);
@@ -199,7 +199,7 @@ public class TestDocsAndPositions extend
Document doc = new Document();
StringBuilder builder = new StringBuilder();
for (int j = 0; j < 199; j++) {
- int nextInt = random.nextInt(max);
+ int nextInt = random().nextInt(max);
builder.append(nextInt).append(' ');
if (nextInt == term) {
freqInDoc[i]++;
@@ -219,7 +219,7 @@ public class TestDocsAndPositions extend
AtomicReaderContext[] leaves = topReaderContext.leaves();
for (AtomicReaderContext context : leaves) {
int maxDoc = context.reader().maxDoc();
- DocsEnum docsEnum = _TestUtil.docs(random, context.reader(), fieldName, bytes, null, null, true);
+ DocsEnum docsEnum = _TestUtil.docs(random(), context.reader(), fieldName, bytes, null, null, true);
if (findNext(freqInDoc, context.docBase, context.docBase + maxDoc) == Integer.MAX_VALUE) {
assertNull(docsEnum);
continue;
@@ -230,7 +230,7 @@ public class TestDocsAndPositions extend
if (freqInDoc[context.docBase + j] != 0) {
assertEquals(j, docsEnum.docID());
assertEquals(docsEnum.freq(), freqInDoc[context.docBase +j]);
- if (i % 2 == 0 && random.nextInt(10) == 0) {
+ if (i % 2 == 0 && random().nextInt(10) == 0) {
int next = findNext(freqInDoc, context.docBase+j+1, context.docBase + maxDoc) - context.docBase;
int advancedTo = docsEnum.advance(next);
if (next >= maxDoc) {
@@ -267,8 +267,8 @@ public class TestDocsAndPositions extend
*/
public void testLargeNumberOfPositions() throws IOException {
Directory dir = newDirectory();
- RandomIndexWriter writer = new RandomIndexWriter(random, dir,
- newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
+ RandomIndexWriter writer = new RandomIndexWriter(random(), dir,
+ newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
int howMany = 1000;
FieldType customType = new FieldType(TextField.TYPE_UNSTORED);
customType.setOmitNorms(true);
@@ -304,10 +304,10 @@ public class TestDocsAndPositions extend
int initDoc = 0;
int maxDoc = atomicReaderContext.reader().maxDoc();
// initially advance or do next doc
- if (random.nextBoolean()) {
+ if (random().nextBoolean()) {
initDoc = docsAndPosEnum.nextDoc();
} else {
- initDoc = docsAndPosEnum.advance(random.nextInt(maxDoc));
+ initDoc = docsAndPosEnum.advance(random().nextInt(maxDoc));
}
String msg = "Iteration: " + i + " initDoc: " + initDoc; // TODO: + " payloads: " + usePayload;
assertEquals(howMany / 2, docsAndPosEnum.freq());
@@ -324,13 +324,13 @@ public class TestDocsAndPositions extend
public void testDocsEnumStart() throws Exception {
Directory dir = newDirectory();
- RandomIndexWriter writer = new RandomIndexWriter(random, dir);
+ RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
Document doc = new Document();
doc.add(newField("foo", "bar", StringField.TYPE_UNSTORED));
writer.addDocument(doc);
DirectoryReader reader = writer.getReader();
AtomicReader r = getOnlySegmentReader(reader);
- DocsEnum disi = _TestUtil.docs(random, r, "foo", new BytesRef("bar"), null, null, false);
+ DocsEnum disi = _TestUtil.docs(random(), r, "foo", new BytesRef("bar"), null, null, false);
int docid = disi.docID();
assertTrue(docid == -1 || docid == DocIdSetIterator.NO_MORE_DOCS);
assertTrue(disi.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
@@ -338,7 +338,7 @@ public class TestDocsAndPositions extend
// now reuse and check again
TermsEnum te = r.terms("foo").iterator(null);
assertTrue(te.seekExact(new BytesRef("bar"), true));
- disi = _TestUtil.docs(random, te, null, disi, false);
+ disi = _TestUtil.docs(random(), te, null, disi, false);
docid = disi.docID();
assertTrue(docid == -1 || docid == DocIdSetIterator.NO_MORE_DOCS);
assertTrue(disi.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
@@ -349,7 +349,7 @@ public class TestDocsAndPositions extend
public void testDocsAndPositionsEnumStart() throws Exception {
Directory dir = newDirectory();
- RandomIndexWriter writer = new RandomIndexWriter(random, dir);
+ RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
Document doc = new Document();
doc.add(newField("foo", "bar", TextField.TYPE_UNSTORED));
writer.addDocument(doc);
Modified: lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDocumentWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDocumentWriter.java?rev=1326351&r1=1326350&r2=1326351&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDocumentWriter.java (original)
+++ lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDocumentWriter.java Sun Apr 15 14:41:44 2012
@@ -59,13 +59,13 @@ public class TestDocumentWriter extends
public void testAddDocument() throws Exception {
Document testDoc = new Document();
DocHelper.setupDoc(testDoc);
- IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
+ IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
writer.addDocument(testDoc);
writer.commit();
SegmentInfo info = writer.newestSegment();
writer.close();
//After adding the document, we should be able to read it back in
- SegmentReader reader = new SegmentReader(info, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, newIOContext(random));
+ SegmentReader reader = new SegmentReader(info, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, newIOContext(random()));
assertTrue(reader != null);
Document doc = reader.document(0);
assertTrue(doc != null);
@@ -126,7 +126,7 @@ public class TestDocumentWriter extends
writer.commit();
SegmentInfo info = writer.newestSegment();
writer.close();
- SegmentReader reader = new SegmentReader(info, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, newIOContext(random));
+ SegmentReader reader = new SegmentReader(info, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, newIOContext(random()));
DocsAndPositionsEnum termPositions = MultiFields.getTermPositionsEnum(reader, MultiFields.getLiveDocs(reader),
"repeated", new BytesRef("repeated"), false);
@@ -198,7 +198,7 @@ public class TestDocumentWriter extends
writer.commit();
SegmentInfo info = writer.newestSegment();
writer.close();
- SegmentReader reader = new SegmentReader(info, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, newIOContext(random));
+ SegmentReader reader = new SegmentReader(info, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, newIOContext(random()));
DocsAndPositionsEnum termPositions = MultiFields.getTermPositionsEnum(reader, reader.getLiveDocs(), "f1", new BytesRef("a"), false);
assertTrue(termPositions.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
@@ -216,7 +216,7 @@ public class TestDocumentWriter extends
public void testPreAnalyzedField() throws IOException {
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
- TEST_VERSION_CURRENT, new MockAnalyzer(random)));
+ TEST_VERSION_CURRENT, new MockAnalyzer(random())));
Document doc = new Document();
doc.add(new TextField("preanalyzed", new TokenStream() {
@@ -242,7 +242,7 @@ public class TestDocumentWriter extends
writer.commit();
SegmentInfo info = writer.newestSegment();
writer.close();
- SegmentReader reader = new SegmentReader(info, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, newIOContext(random));
+ SegmentReader reader = new SegmentReader(info, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, newIOContext(random()));
DocsAndPositionsEnum termPositions = reader.termPositionsEnum(reader.getLiveDocs(), "preanalyzed", new BytesRef("term1"), false);
assertTrue(termPositions.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
@@ -280,7 +280,7 @@ public class TestDocumentWriter extends
doc.add(newField("f2", "v2", StringField.TYPE_STORED));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
- TEST_VERSION_CURRENT, new MockAnalyzer(random)));
+ TEST_VERSION_CURRENT, new MockAnalyzer(random())));
writer.addDocument(doc);
writer.close();
@@ -320,7 +320,7 @@ public class TestDocumentWriter extends
doc.add(newField("f2", "v2", customType2));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
- TEST_VERSION_CURRENT, new MockAnalyzer(random)));
+ TEST_VERSION_CURRENT, new MockAnalyzer(random())));
writer.addDocument(doc);
writer.forceMerge(1); // be sure to have a single segment
writer.close();
Modified: lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDocumentsWriterDeleteQueue.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDocumentsWriterDeleteQueue.java?rev=1326351&r1=1326350&r2=1326351&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDocumentsWriterDeleteQueue.java (original)
+++ lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDocumentsWriterDeleteQueue.java Sun Apr 15 14:41:44 2012
@@ -36,10 +36,10 @@ public class TestDocumentsWriterDeleteQu
public void testUpdateDelteSlices() {
DocumentsWriterDeleteQueue queue = new DocumentsWriterDeleteQueue();
- final int size = 200 + random.nextInt(500) * RANDOM_MULTIPLIER;
+ final int size = 200 + random().nextInt(500) * RANDOM_MULTIPLIER;
Integer[] ids = new Integer[size];
for (int i = 0; i < ids.length; i++) {
- ids[i] = random.nextInt();
+ ids[i] = random().nextInt();
}
DeleteSlice slice1 = queue.newSlice();
DeleteSlice slice2 = queue.newSlice();
@@ -54,14 +54,14 @@ public class TestDocumentsWriterDeleteQu
Term[] term = new Term[] {new Term("id", i.toString())};
uniqueValues.add(term[0]);
queue.addDelete(term);
- if (random.nextInt(20) == 0 || j == ids.length - 1) {
+ if (random().nextInt(20) == 0 || j == ids.length - 1) {
queue.updateSlice(slice1);
assertTrue(slice1.isTailItem(term));
slice1.apply(bd1, j);
assertAllBetween(last1, j, bd1, ids);
last1 = j + 1;
}
- if (random.nextInt(10) == 5 || j == ids.length - 1) {
+ if (random().nextInt(10) == 5 || j == ids.length - 1) {
queue.updateSlice(slice2);
assertTrue(slice2.isTailItem(term));
slice2.apply(bd2, j);
@@ -96,12 +96,12 @@ public class TestDocumentsWriterDeleteQu
assertFalse(queue.anyChanges());
queue.clear();
assertFalse(queue.anyChanges());
- final int size = 200 + random.nextInt(500) * RANDOM_MULTIPLIER;
+ final int size = 200 + random().nextInt(500) * RANDOM_MULTIPLIER;
int termsSinceFreeze = 0;
int queriesSinceFreeze = 0;
for (int i = 0; i < size; i++) {
Term term = new Term("id", "" + i);
- if (random.nextInt(10) == 0) {
+ if (random().nextInt(10) == 0) {
queue.addDelete(new TermQuery(term));
queriesSinceFreeze++;
} else {
@@ -109,7 +109,7 @@ public class TestDocumentsWriterDeleteQu
termsSinceFreeze++;
}
assertTrue(queue.anyChanges());
- if (random.nextInt(10) == 0) {
+ if (random().nextInt(10) == 0) {
queue.clear();
queue.tryApplyGlobalSlice();
assertFalse(queue.anyChanges());
@@ -120,12 +120,12 @@ public class TestDocumentsWriterDeleteQu
public void testAnyChanges() {
DocumentsWriterDeleteQueue queue = new DocumentsWriterDeleteQueue();
- final int size = 200 + random.nextInt(500) * RANDOM_MULTIPLIER;
+ final int size = 200 + random().nextInt(500) * RANDOM_MULTIPLIER;
int termsSinceFreeze = 0;
int queriesSinceFreeze = 0;
for (int i = 0; i < size; i++) {
Term term = new Term("id", "" + i);
- if (random.nextInt(10) == 0) {
+ if (random().nextInt(10) == 0) {
queue.addDelete(new TermQuery(term));
queriesSinceFreeze++;
} else {
@@ -133,7 +133,7 @@ public class TestDocumentsWriterDeleteQu
termsSinceFreeze++;
}
assertTrue(queue.anyChanges());
- if (random.nextInt(5) == 0) {
+ if (random().nextInt(5) == 0) {
FrozenBufferedDeletes freezeGlobalBuffer = queue
.freezeGlobalBuffer(null);
assertEquals(termsSinceFreeze, freezeGlobalBuffer.termCount);
@@ -174,15 +174,15 @@ public class TestDocumentsWriterDeleteQu
public void testStressDeleteQueue() throws InterruptedException {
DocumentsWriterDeleteQueue queue = new DocumentsWriterDeleteQueue();
Set<Term> uniqueValues = new HashSet<Term>();
- final int size = 10000 + random.nextInt(500) * RANDOM_MULTIPLIER;
+ final int size = 10000 + random().nextInt(500) * RANDOM_MULTIPLIER;
Integer[] ids = new Integer[size];
for (int i = 0; i < ids.length; i++) {
- ids[i] = random.nextInt();
+ ids[i] = random().nextInt();
uniqueValues.add(new Term("id", ids[i].toString()));
}
CountDownLatch latch = new CountDownLatch(1);
AtomicInteger index = new AtomicInteger(0);
- final int numThreads = 2 + random.nextInt(5);
+ final int numThreads = 2 + random().nextInt(5);
UpdateThread[] threads = new UpdateThread[numThreads];
for (int i = 0; i < threads.length; i++) {
threads[i] = new UpdateThread(queue, index, ids, latch);
Modified: lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDuelingCodecs.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDuelingCodecs.java?rev=1326351&r1=1326350&r2=1326351&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDuelingCodecs.java (original)
+++ lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDuelingCodecs.java Sun Apr 15 14:41:44 2012
@@ -68,11 +68,11 @@ public class TestDuelingCodecs extends L
// so this would make assertEquals complicated.
leftCodec = Codec.forName("SimpleText");
- rightCodec = new RandomCodec(random, false);
+ rightCodec = new RandomCodec(random(), false);
leftDir = newDirectory();
rightDir = newDirectory();
- long seed = random.nextLong();
+ long seed = random().nextLong();
// must use same seed because of random payloads, etc
Analyzer leftAnalyzer = new MockAnalyzer(new Random(seed));
@@ -212,7 +212,7 @@ public class TestDuelingCodecs extends L
if (deep) {
int numIntersections = atLeast(3);
for (int i = 0; i < numIntersections; i++) {
- String re = AutomatonTestUtil.randomRegexp(random);
+ String re = AutomatonTestUtil.randomRegexp(random());
CompiledAutomaton automaton = new CompiledAutomaton(new RegExp(re, RegExp.NONE).toAutomaton());
if (automaton.type == CompiledAutomaton.AUTOMATON_TYPE.NORMAL) {
// TODO: test start term too
@@ -249,7 +249,7 @@ public class TestDuelingCodecs extends L
*/
public void assertTermsEnum(TermsEnum leftTermsEnum, TermsEnum rightTermsEnum, boolean deep) throws Exception {
BytesRef term;
- Bits randomBits = new RandomBits(leftReader.maxDoc(), random.nextDouble(), random);
+ Bits randomBits = new RandomBits(leftReader.maxDoc(), random().nextDouble(), random());
DocsAndPositionsEnum leftPositions = null;
DocsAndPositionsEnum rightPositions = null;
DocsEnum leftDocs = null;
@@ -383,13 +383,13 @@ public class TestDuelingCodecs extends L
int skipInterval = 16;
while (true) {
- if (random.nextBoolean()) {
+ if (random().nextBoolean()) {
// nextDoc()
docid = leftDocs.nextDoc();
assertEquals(info, docid, rightDocs.nextDoc());
} else {
// advance()
- int skip = docid + (int) Math.ceil(Math.abs(skipInterval + random.nextGaussian() * averageGap));
+ int skip = docid + (int) Math.ceil(Math.abs(skipInterval + random().nextGaussian() * averageGap));
docid = leftDocs.advance(skip);
assertEquals(info, docid, rightDocs.advance(skip));
}
@@ -418,13 +418,13 @@ public class TestDuelingCodecs extends L
int skipInterval = 16;
while (true) {
- if (random.nextBoolean()) {
+ if (random().nextBoolean()) {
// nextDoc()
docid = leftDocs.nextDoc();
assertEquals(info, docid, rightDocs.nextDoc());
} else {
// advance()
- int skip = docid + (int) Math.ceil(Math.abs(skipInterval + random.nextGaussian() * averageGap));
+ int skip = docid + (int) Math.ceil(Math.abs(skipInterval + random().nextGaussian() * averageGap));
docid = leftDocs.advance(skip);
assertEquals(info, docid, rightDocs.advance(skip));
}
Modified: lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestFieldInfos.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestFieldInfos.java?rev=1326351&r1=1326350&r2=1326351&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestFieldInfos.java (original)
+++ lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestFieldInfos.java Sun Apr 15 14:41:44 2012
@@ -52,7 +52,7 @@ public class TestFieldInfos extends Luce
assertTrue(fieldInfos.size() == DocHelper.all.size()); //this is all b/c we are using the no-arg constructor
- IndexOutput output = dir.createOutput(filename, newIOContext(random));
+ IndexOutput output = dir.createOutput(filename, newIOContext(random()));
assertTrue(output != null);
//Use a RAMOutputStream
@@ -120,34 +120,34 @@ public class TestFieldInfos extends Luce
}
try {
- readOnly.addOrUpdate("bogus", random.nextBoolean());
+ readOnly.addOrUpdate("bogus", random().nextBoolean());
fail("instance should be read only");
} catch (IllegalStateException e) {
// expected
}
try {
- readOnly.addOrUpdate("bogus", random.nextBoolean(), random.nextBoolean());
+ readOnly.addOrUpdate("bogus", random().nextBoolean(), random().nextBoolean());
fail("instance should be read only");
} catch (IllegalStateException e) {
// expected
}
try {
- readOnly.addOrUpdate("bogus", random.nextBoolean(), random.nextBoolean(),
- random.nextBoolean());
+ readOnly.addOrUpdate("bogus", random().nextBoolean(), random().nextBoolean(),
+ random().nextBoolean());
fail("instance should be read only");
} catch (IllegalStateException e) {
// expected
}
try {
- readOnly.addOrUpdate("bogus", random.nextBoolean(), random.nextBoolean(),
- random.nextBoolean(),
- random.nextBoolean(), random.nextBoolean() ? IndexOptions.DOCS_ONLY : IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, null, null);
+ readOnly.addOrUpdate("bogus", random().nextBoolean(), random().nextBoolean(),
+ random().nextBoolean(),
+ random().nextBoolean(), random().nextBoolean() ? IndexOptions.DOCS_ONLY : IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, null, null);
fail("instance should be read only");
} catch (IllegalStateException e) {
// expected
}
try {
- readOnly.addOrUpdate(Arrays.asList("a", "b", "c"), random.nextBoolean());
+ readOnly.addOrUpdate(Arrays.asList("a", "b", "c"), random().nextBoolean());
fail("instance should be read only");
} catch (IllegalStateException e) {
// expected
Modified: lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestFieldsReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestFieldsReader.java?rev=1326351&r1=1326350&r2=1326351&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestFieldsReader.java (original)
+++ lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestFieldsReader.java Sun Apr 15 14:41:44 2012
@@ -57,7 +57,7 @@ public class TestFieldsReader extends Lu
DocHelper.setupDoc(testDoc);
_TestUtil.add(testDoc, fieldInfos);
dir = newDirectory();
- IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy());
+ IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy());
((LogMergePolicy) conf.getMergePolicy()).setUseCompoundFile(false);
IndexWriter writer = new IndexWriter(dir, conf);
writer.addDocument(testDoc);
@@ -195,7 +195,7 @@ public class TestFieldsReader extends Lu
try {
Directory dir = new FaultyFSDirectory(indexDir);
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
- TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE));
+ TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.CREATE));
for(int i=0;i<2;i++)
writer.addDocument(testDoc);
writer.forceMerge(1);
@@ -232,7 +232,7 @@ public class TestFieldsReader extends Lu
public void testNumericField() throws Exception {
Directory dir = newDirectory();
- RandomIndexWriter w = new RandomIndexWriter(random, dir);
+ RandomIndexWriter w = new RandomIndexWriter(random(), dir);
final int numDocs = atLeast(500);
final Number[] answers = new Number[numDocs];
final NumericType[] typeAnswers = new NumericType[numDocs];
@@ -242,16 +242,16 @@ public class TestFieldsReader extends Lu
final Field sf;
final Number answer;
final NumericType typeAnswer;
- if (random.nextBoolean()) {
+ if (random().nextBoolean()) {
// float/double
- if (random.nextBoolean()) {
- final float f = random.nextFloat();
+ if (random().nextBoolean()) {
+ final float f = random().nextFloat();
answer = Float.valueOf(f);
nf = new FloatField("nf", f);
sf = new StoredField("nf", f);
typeAnswer = NumericType.FLOAT;
} else {
- final double d = random.nextDouble();
+ final double d = random().nextDouble();
answer = Double.valueOf(d);
nf = new DoubleField("nf", d);
sf = new StoredField("nf", d);
@@ -259,14 +259,14 @@ public class TestFieldsReader extends Lu
}
} else {
// int/long
- if (random.nextBoolean()) {
- final int i = random.nextInt();
+ if (random().nextBoolean()) {
+ final int i = random().nextInt();
answer = Integer.valueOf(i);
nf = new IntField("nf", i);
sf = new StoredField("nf", i);
typeAnswer = NumericType.INT;
} else {
- final long l = random.nextLong();
+ final long l = random().nextLong();
answer = Long.valueOf(l);
nf = new LongField("nf", l);
sf = new StoredField("nf", l);
@@ -302,7 +302,7 @@ public class TestFieldsReader extends Lu
public void testIndexedBit() throws Exception {
Directory dir = newDirectory();
- RandomIndexWriter w = new RandomIndexWriter(random, dir);
+ RandomIndexWriter w = new RandomIndexWriter(random(), dir);
Document doc = new Document();
FieldType onlyStored = new FieldType();
onlyStored.setStored(true);
Modified: lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestFilterAtomicReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestFilterAtomicReader.java?rev=1326351&r1=1326350&r2=1326351&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestFilterAtomicReader.java (original)
+++ lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestFilterAtomicReader.java Sun Apr 15 14:41:44 2012
@@ -129,7 +129,7 @@ public class TestFilterAtomicReader exte
public void testFilterIndexReader() throws Exception {
Directory directory = newDirectory();
- IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
+ IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
Document d1 = new Document();
d1.add(newField("default","one two", TextField.TYPE_STORED));
@@ -150,7 +150,7 @@ public class TestFilterAtomicReader exte
// We mess with the postings so this can fail:
((MockDirectoryWrapper) target).setCrossCheckTermVectorsOnClose(false);
- writer = new IndexWriter(target, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
+ writer = new IndexWriter(target, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexReader reader = new TestReader(IndexReader.open(directory));
writer.addIndexes(reader);
writer.close();
Modified: lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestFlex.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestFlex.java?rev=1326351&r1=1326350&r2=1326351&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestFlex.java (original)
+++ lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestFlex.java Sun Apr 15 14:41:44 2012
@@ -33,7 +33,7 @@ public class TestFlex extends LuceneTest
IndexWriter w = new IndexWriter(
d,
- new IndexWriterConfig(Version.LUCENE_31, new MockAnalyzer(random)).
+ new IndexWriterConfig(Version.LUCENE_31, new MockAnalyzer(random())).
setMaxBufferedDocs(7)
);
@@ -65,7 +65,7 @@ public class TestFlex extends LuceneTest
public void testTermOrd() throws Exception {
Directory d = newDirectory();
IndexWriter w = new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT,
- new MockAnalyzer(random)).setCodec(_TestUtil.alwaysPostingsFormat(new Lucene40PostingsFormat())));
+ new MockAnalyzer(random())).setCodec(_TestUtil.alwaysPostingsFormat(new Lucene40PostingsFormat())));
Document doc = new Document();
doc.add(newField("f", "a b c", TextField.TYPE_UNSTORED));
w.addDocument(doc);
Modified: lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestFlushByRamOrCountsPolicy.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestFlushByRamOrCountsPolicy.java?rev=1326351&r1=1326350&r2=1326351&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestFlushByRamOrCountsPolicy.java (original)
+++ lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestFlushByRamOrCountsPolicy.java Sun Apr 15 14:41:44 2012
@@ -39,7 +39,7 @@ public class TestFlushByRamOrCountsPolic
@BeforeClass
public static void beforeClass() throws Exception {
- lineDocFile = new LineFileDocs(random, defaultCodecSupportsDocValues());
+ lineDocFile = new LineFileDocs(random(), defaultCodecSupportsDocValues());
}
@AfterClass
@@ -51,14 +51,14 @@ public class TestFlushByRamOrCountsPolic
public void testFlushByRam() throws CorruptIndexException,
LockObtainFailedException, IOException, InterruptedException {
final double ramBuffer = (TEST_NIGHTLY ? 1 : 10) + atLeast(2)
- + random.nextDouble();
- runFlushByRam(1 + random.nextInt(TEST_NIGHTLY ? 5 : 1), ramBuffer, false);
+ + random().nextDouble();
+ runFlushByRam(1 + random().nextInt(TEST_NIGHTLY ? 5 : 1), ramBuffer, false);
}
public void testFlushByRamLargeBuffer() throws CorruptIndexException,
LockObtainFailedException, IOException, InterruptedException {
// with a 256 mb ram buffer we should never stall
- runFlushByRam(1 + random.nextInt(TEST_NIGHTLY ? 5 : 1), 256.d, true);
+ runFlushByRam(1 + random().nextInt(TEST_NIGHTLY ? 5 : 1), 256.d, true);
}
protected void runFlushByRam(int numThreads, double maxRamMB,
@@ -69,7 +69,7 @@ public class TestFlushByRamOrCountsPolic
Directory dir = newDirectory();
MockDefaultFlushPolicy flushPolicy = new MockDefaultFlushPolicy();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT,
- new MockAnalyzer(random)).setFlushPolicy(flushPolicy);
+ new MockAnalyzer(random())).setFlushPolicy(flushPolicy);
final int numDWPT = 1 + atLeast(2);
DocumentsWriterPerThreadPool threadPool = new ThreadAffinityDocumentsWriterThreadPool(
numDWPT);
@@ -125,7 +125,7 @@ public class TestFlushByRamOrCountsPolic
Directory dir = newDirectory();
MockDefaultFlushPolicy flushPolicy = new MockDefaultFlushPolicy();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT,
- new MockAnalyzer(random)).setFlushPolicy(flushPolicy);
+ new MockAnalyzer(random())).setFlushPolicy(flushPolicy);
final int numDWPT = 1 + atLeast(2);
DocumentsWriterPerThreadPool threadPool = new ThreadAffinityDocumentsWriterThreadPool(
@@ -168,16 +168,16 @@ public class TestFlushByRamOrCountsPolic
}
public void testRandom() throws IOException, InterruptedException {
- final int numThreads = 1 + random.nextInt(8);
+ final int numThreads = 1 + random().nextInt(8);
final int numDocumentsToIndex = 50 + atLeast(70);
AtomicInteger numDocs = new AtomicInteger(numDocumentsToIndex);
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT,
- new MockAnalyzer(random));
+ new MockAnalyzer(random()));
MockDefaultFlushPolicy flushPolicy = new MockDefaultFlushPolicy();
iwc.setFlushPolicy(flushPolicy);
- final int numDWPT = 1 + random.nextInt(8);
+ final int numDWPT = 1 + random().nextInt(8);
DocumentsWriterPerThreadPool threadPool = new ThreadAffinityDocumentsWriterThreadPool(
numDWPT);
iwc.setIndexerThreadPool(threadPool);
@@ -230,15 +230,15 @@ public class TestFlushByRamOrCountsPolic
public void testStallControl() throws InterruptedException,
CorruptIndexException, LockObtainFailedException, IOException {
- int[] numThreads = new int[] { 4 + random.nextInt(8), 1 };
- final int numDocumentsToIndex = 50 + random.nextInt(50);
+ int[] numThreads = new int[] { 4 + random().nextInt(8), 1 };
+ final int numDocumentsToIndex = 50 + random().nextInt(50);
for (int i = 0; i < numThreads.length; i++) {
AtomicInteger numDocs = new AtomicInteger(numDocumentsToIndex);
MockDirectoryWrapper dir = newDirectory();
// mock a very slow harddisk sometimes here so that flushing is very slow
dir.setThrottling(MockDirectoryWrapper.Throttling.SOMETIMES);
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT,
- new MockAnalyzer(random));
+ new MockAnalyzer(random()));
iwc.setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH);
iwc.setMaxBufferedDeleteTerms(IndexWriterConfig.DISABLE_AUTO_FLUSH);
FlushPolicy flushPolicy = new FlushByRamOrCountsPolicy();
Modified: lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestForTooMuchCloning.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestForTooMuchCloning.java?rev=1326351&r1=1326350&r2=1326351&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestForTooMuchCloning.java (original)
+++ lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestForTooMuchCloning.java Sun Apr 15 14:41:44 2012
@@ -39,13 +39,13 @@ public class TestForTooMuchCloning exten
final MockDirectoryWrapper dir = newDirectory();
final TieredMergePolicy tmp = new TieredMergePolicy();
tmp.setMaxMergeAtOnce(2);
- final RandomIndexWriter w = new RandomIndexWriter(random, dir,
- newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(2).setMergePolicy(tmp));
+ final RandomIndexWriter w = new RandomIndexWriter(random(), dir,
+ newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(2).setMergePolicy(tmp));
final int numDocs = 20;
for(int docs=0;docs<numDocs;docs++) {
StringBuilder sb = new StringBuilder();
for(int terms=0;terms<100;terms++) {
- sb.append(_TestUtil.randomRealisticUnicodeString(random));
+ sb.append(_TestUtil.randomRealisticUnicodeString(random()));
sb.append(' ');
}
final Document doc = new Document();
Modified: lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestForceMergeForever.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestForceMergeForever.java?rev=1326351&r1=1326350&r2=1326351&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestForceMergeForever.java (original)
+++ lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestForceMergeForever.java Sun Apr 15 14:41:44 2012
@@ -54,12 +54,12 @@ public class TestForceMergeForever exten
public void test() throws Exception {
final Directory d = newDirectory();
- final MyIndexWriter w = new MyIndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
+ final MyIndexWriter w = new MyIndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
// Try to make an index that requires merging:
- w.getConfig().setMaxBufferedDocs(_TestUtil.nextInt(random, 2, 11));
+ w.getConfig().setMaxBufferedDocs(_TestUtil.nextInt(random(), 2, 11));
final int numStartDocs = atLeast(20);
- final LineFileDocs docs = new LineFileDocs(random, defaultCodecSupportsDocValues());
+ final LineFileDocs docs = new LineFileDocs(random(), defaultCodecSupportsDocValues());
for(int docIDX=0;docIDX<numStartDocs;docIDX++) {
w.addDocument(docs.nextDoc());
}
@@ -83,7 +83,7 @@ public class TestForceMergeForever exten
public void run() {
try {
while (!doStop.get()) {
- w.updateDocument(new Term("docid", "" + random.nextInt(numStartDocs)),
+ w.updateDocument(new Term("docid", "" + random().nextInt(numStartDocs)),
docs.nextDoc());
// Force deletes to apply
w.getReader().close();
Modified: lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestIndexFileDeleter.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestIndexFileDeleter.java?rev=1326351&r1=1326350&r2=1326351&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestIndexFileDeleter.java (original)
+++ lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestIndexFileDeleter.java Sun Apr 15 14:41:44 2012
@@ -52,7 +52,7 @@ public class TestIndexFileDeleter extend
IndexWriter writer = new IndexWriter(
dir,
- newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
+ newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMaxBufferedDocs(10).
setMergePolicy(mergePolicy)
);
@@ -70,7 +70,7 @@ public class TestIndexFileDeleter extend
// Delete one doc so we get a .del file:
writer = new IndexWriter(
dir,
- newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
+ newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMergePolicy(NoMergePolicy.NO_COMPOUND_FILES)
);
Term searchTerm = new Term("id", "7");
@@ -123,7 +123,7 @@ public class TestIndexFileDeleter extend
// Open & close a writer: it should delete the above 4
// files and nothing more:
- writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
+ writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND));
writer.close();
String[] files2 = dir.listAll();
@@ -179,8 +179,8 @@ public class TestIndexFileDeleter extend
}
public void copyFile(Directory dir, String src, String dest) throws IOException {
- IndexInput in = dir.openInput(src, newIOContext(random));
- IndexOutput out = dir.createOutput(dest, newIOContext(random));
+ IndexInput in = dir.openInput(src, newIOContext(random()));
+ IndexOutput out = dir.createOutput(dest, newIOContext(random()));
byte[] b = new byte[1024];
long remainder = in.length();
while(remainder > 0) {
Modified: lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestIndexInput.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestIndexInput.java?rev=1326351&r1=1326350&r2=1326351&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestIndexInput.java (original)
+++ lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestIndexInput.java Sun Apr 15 14:41:44 2012
@@ -29,6 +29,7 @@ import org.junit.AfterClass;
import org.junit.BeforeClass;
import java.io.IOException;
+import java.util.Random;
public class TestIndexInput extends LuceneTestCase {
@@ -85,6 +86,7 @@ public class TestIndexInput extends Luce
@BeforeClass
public static void beforeClass() throws IOException {
+ Random random = random();
INTS = new int[COUNT];
LONGS = new long[COUNT];
RANDOM_TEST_BYTES = new byte[COUNT * (5 + 4 + 9 + 8)];
@@ -177,6 +179,7 @@ public class TestIndexInput extends Luce
// this test checks the raw IndexInput methods as it uses RAMIndexInput which extends IndexInput directly
public void testRawIndexInputRead() throws IOException {
+ Random random = random();
final RAMDirectory dir = new RAMDirectory();
IndexOutput os = dir.createOutput("foo", newIOContext(random));
os.writeBytes(READ_TEST_BYTES, READ_TEST_BYTES.length);