You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by us...@apache.org on 2011/05/09 18:36:52 UTC
svn commit: r1101103 -
/lucene/dev/branches/lucene_solr_3_1/lucene/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java
Author: uschindler
Date: Mon May 9 16:36:52 2011
New Revision: 1101103
URL: http://svn.apache.org/viewvc?rev=1101103&view=rev
Log:
Add code to build 3.1 compressed test indexes for upgrade test in 3.2+ and trunk; cleanup code
Modified:
lucene/dev/branches/lucene_solr_3_1/lucene/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java
Modified: lucene/dev/branches/lucene_solr_3_1/lucene/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene_solr_3_1/lucene/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java?rev=1101103&r1=1101102&r2=1101103&view=diff
==============================================================================
--- lucene/dev/branches/lucene_solr_3_1/lucene/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java (original)
+++ lucene/dev/branches/lucene_solr_3_1/lucene/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java Mon May 9 16:36:52 2011
@@ -61,26 +61,27 @@ public class TestBackwardsCompatibility
// oldNames array.
/*
- public void testCreatePreLocklessCFS() throws IOException {
- createIndex(random, "index.cfs", true);
+ public void testCreateCFS() throws IOException {
+ createIndex("index.cfs", true, false);
}
- public void testCreatePreLocklessNoCFS() throws IOException {
- createIndex(random, "index.nocfs", false);
+ public void testCreateNoCFS() throws IOException {
+ createIndex("index.nocfs", false, false);
}
*/
-
+
/*
- public void testCreateCFS() throws IOException {
- String dirName = "testindex.cfs";
- File indexDir = createIndex(random, dirName, true);
- //_TestUtil.rmDir(indexDir);
+ // These are only needed for the special upgrade test to verify
+ // that also optimized indexes are correctly upgraded by IndexUpgrader.
+ // You don't need them to be build for non-3.1 (the test is happy with just one
+ // "old" segment format, version is unimportant:
+
+ public void testCreateOptimizedCFS() throws IOException {
+ createIndex("index.optimized.cfs", true, true);
}
- public void testCreateNoCFS() throws IOException {
- String dirName = "testindex.nocfs";
- File indexDir = createIndex(random, dirName, false);
- //_TestUtil.rmDir(indexDir);
+ public void testCreateOptimizedNoCFS() throws IOException {
+ createIndex("index.optimized.nocfs", false, true);
}
*/
@@ -473,7 +474,7 @@ public class TestBackwardsCompatibility
dir.close();
}
- public File createIndex(Random random, String dirName, boolean doCFS) throws IOException {
+ public File createIndex(String dirName, boolean doCFS, boolean optimized) throws IOException {
// we use a real directory name that is not cleaned up, because this method is only used to create backwards indexes:
File indexDir = new File(LuceneTestCase.TEMP_DIR, dirName);
_TestUtil.rmDir(indexDir);
@@ -489,24 +490,30 @@ public class TestBackwardsCompatibility
addDoc(writer, i);
}
assertEquals("wrong doc count", 35, writer.maxDoc());
+ if (optimized) {
+ writer.optimize();
+ }
writer.close();
- // open fresh writer so we get no prx file in the added segment
- conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setMaxBufferedDocs(10);
- ((LogMergePolicy) conf.getMergePolicy()).setUseCompoundFile(doCFS);
- writer = new IndexWriter(dir, conf);
- addNoProxDoc(writer);
- writer.close();
+ if (!optimized) {
+ // open fresh writer so we get no prx file in the added segment
+ conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setMaxBufferedDocs(10);
+ ((LogMergePolicy) conf.getMergePolicy()).setUseCompoundFile(doCFS);
+ writer = new IndexWriter(dir, conf);
+ addNoProxDoc(writer);
+ writer.close();
- // Delete one doc so we get a .del file:
- IndexReader reader = IndexReader.open(dir, false);
- Term searchTerm = new Term("id", "7");
- int delCount = reader.deleteDocuments(searchTerm);
- assertEquals("didn't delete the right number of documents", 1, delCount);
+ // Delete one doc so we get a .del file:
+ IndexReader reader = IndexReader.open(dir, false);
+ Term searchTerm = new Term("id", "7");
+ int delCount = reader.deleteDocuments(searchTerm);
+ assertEquals("didn't delete the right number of documents", 1, delCount);
- // Set one norm so we get a .s0 file:
- reader.setNorm(21, "content", (float) 1.5);
- reader.close();
+ // Set one norm so we get a .s0 file:
+ reader.setNorm(21, "content", (float) 1.5);
+ reader.close();
+ }
+
dir.close();
return indexDir;