You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by mi...@apache.org on 2012/07/19 17:59:32 UTC

svn commit: r1363400 [30/31] - in /lucene/dev/branches/pforcodec_3892: ./ dev-tools/ dev-tools/eclipse/ dev-tools/idea/.idea/ dev-tools/idea/.idea/copyright/ dev-tools/idea/.idea/libraries/ dev-tools/idea/lucene/ dev-tools/maven/ dev-tools/maven/lucene...

Modified: lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/search/TestRealTimeGet.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/search/TestRealTimeGet.java?rev=1363400&r1=1363399&r2=1363400&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/search/TestRealTimeGet.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/search/TestRealTimeGet.java Thu Jul 19 15:58:54 2012
@@ -17,50 +17,34 @@
 package org.apache.solr.search;
 
 
-import org.apache.lucene.analysis.MockAnalyzer;
-import org.apache.lucene.document.Document;
-import org.apache.lucene.document.Field;
-import org.apache.lucene.document.FieldType;
-import org.apache.lucene.index.*;
-import org.apache.lucene.search.*;
-import org.apache.lucene.store.Directory;
-import org.apache.lucene.util.BytesRef;
-import org.apache.lucene.util.Constants;
+import org.apache.lucene.util.LuceneTestCase.Slow;
 import org.apache.noggit.ObjectBuilder;
-import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.SolrInputDocument;
-import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.request.SolrQueryRequest;
-import org.apache.solr.update.UpdateHandler;
-import org.apache.solr.update.UpdateLog;
-import org.apache.solr.update.VersionInfo;
 import org.apache.solr.util.TestHarness;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
-import java.io.IOException;
-import java.util.*;
-import java.util.concurrent.*;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.atomic.AtomicLong;
-import java.util.concurrent.locks.Lock;
-import java.util.concurrent.locks.ReentrantLock;
 
-import static  org.apache.solr.core.SolrCore.verbose;
+import static org.apache.solr.core.SolrCore.verbose;
 import static org.apache.solr.update.processor.DistributingUpdateProcessorFactory.DISTRIB_UPDATE_PARAM;
-import static org.apache.solr.update.processor.DistributedUpdateProcessor.DistribPhase;
 
-public class TestRealTimeGet extends SolrTestCaseJ4 {
-
-  // means we've seen the leader and have version info (i.e. we are a non-leader replica)
-  private static String FROM_LEADER = DistribPhase.FROMLEADER.toString(); 
+public class TestRealTimeGet extends TestRTGBase {
 
   @BeforeClass
   public static void beforeClass() throws Exception {
     initCore("solrconfig-tlog.xml","schema15.xml");
   }
 
+
   @Test
   public void testGetRealtime() throws Exception {
     clearIndex();
@@ -210,7 +194,7 @@ public class TestRealTimeGet extends Sol
     assertU(commit());
 
     // make sure a reordered add doesn't take affect.
-    updateJ(jsonAdd(sdoc("id","1", "_version_",Long.toString(version - 1))), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
+    long version2 = deleteByQueryAndGetVersion("id:2", null);
 
     // test that it's still deleted
     assertJQ(req("qt","/get","id","1")
@@ -218,9 +202,33 @@ public class TestRealTimeGet extends Sol
     );
 
     version = addAndGetVersion(sdoc("id","2"), null);
-    long version2 = deleteByQueryAndGetVersion("id:2", null);
+    version2 = deleteByQueryAndGetVersion("id:2", null);
     assertTrue(Math.abs(version2) > version );
-    
+
+    // test that it's deleted
+    assertJQ(req("qt","/get","id","2")
+        ,"=={'doc':null}");
+
+
+    version2 = Math.abs(version2) + 1000;
+    updateJ(jsonAdd(sdoc("id","3", "_version_",Long.toString(version2+100))), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
+    updateJ(jsonAdd(sdoc("id","4", "_version_",Long.toString(version2+200))), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
+
+    // this should only affect id:3 so far
+    deleteByQueryAndGetVersion("id:(3 4 5 6)", params(DISTRIB_UPDATE_PARAM,FROM_LEADER, "_version_",Long.toString(-(version2+150))) );
+
+    assertJQ(req("qt","/get","id","3"),"=={'doc':null}");
+    assertJQ(req("qt","/get","id","4", "fl","id"),"=={'doc':{'id':'4'}}");
+
+    updateJ(jsonAdd(sdoc("id","5", "_version_",Long.toString(version2+201))), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
+    updateJ(jsonAdd(sdoc("id","6", "_version_",Long.toString(version2+101))), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
+
+   // the DBQ should also have caused id:6 to be removed
+    assertJQ(req("qt","/get","id","5", "fl","id"),"=={'doc':{'id':'5'}}");
+    assertJQ(req("qt","/get","id","6"),"=={'doc':null}");
+
+    assertU(commit());
+
   }
 
   @Test
@@ -390,59 +398,6 @@ public class TestRealTimeGet extends Sol
     ***/
 
 
-  final ConcurrentHashMap<Integer,DocInfo> model = new ConcurrentHashMap<Integer,DocInfo>();
-  Map<Integer,DocInfo> committedModel = new HashMap<Integer,DocInfo>();
-  long snapshotCount;
-  long committedModelClock;
-  volatile int lastId;
-  final String field = "val_l";
-  Object[] syncArr;
-
-  private void initModel(int ndocs) {
-    snapshotCount = 0;
-    committedModelClock = 0;
-    lastId = 0;
-
-    syncArr = new Object[ndocs];
-
-    for (int i=0; i<ndocs; i++) {
-      model.put(i, new DocInfo(0, -1L));
-      syncArr[i] = new Object();
-    }
-    committedModel.putAll(model);
-  }
-
-
-  static class DocInfo {
-    long version;
-    long val;
-
-    public DocInfo(long version, long val) {
-      this.version = version;
-      this.val = val;
-    }
-
-    public String toString() {
-      return "{version="+version+",val="+val+"\"";
-    }
-  }
-
-  private long badVersion(Random rand, long version) {
-    if (version > 0) {
-      // return a random number not equal to version
-      for (;;) {
-        long badVersion = rand.nextInt();
-        if (badVersion != version && badVersion != 0) return badVersion;
-      }
-    }
-
-    // if the version does not exist, then we can only specify a positive version
-    for (;;) {
-      long badVersion = rand.nextInt() & 0x7fffffff;  // mask off sign bit
-      if (badVersion != 0) return badVersion;
-    }
-  }
-
   @Test
   public void testStressGetRealtime() throws Exception {
     clearIndex();
@@ -731,1184 +686,4 @@ public class TestRealTimeGet extends Sol
   }
 
 
-  // This version doesn't synchronize on id to tell what update won, but instead uses versions
-  @Test
-  public void testStressGetRealtimeVersions() throws Exception {
-    clearIndex();
-    assertU(commit());
-
-    final int commitPercent = 5 + random().nextInt(20);
-    final int softCommitPercent = 30+random().nextInt(75); // what percent of the commits are soft
-    final int deletePercent = 4+random().nextInt(25);
-    final int deleteByQueryPercent = 1 + random().nextInt(5);
-    final int optimisticPercent = 1+random().nextInt(50);    // percent change that an update uses optimistic locking
-    final int optimisticCorrectPercent = 25+random().nextInt(70);    // percent change that a version specified will be correct
-    final int ndocs = 5 + (random().nextBoolean() ? random().nextInt(25) : random().nextInt(200));
-    int nWriteThreads = 5 + random().nextInt(25);
-
-    final int maxConcurrentCommits = nWriteThreads;   // number of committers at a time... it should be <= maxWarmingSearchers
-
-        // query variables
-    final int percentRealtimeQuery = 75;
-    final AtomicLong operations = new AtomicLong(50000);  // number of query operations to perform in total
-    int nReadThreads = 5 + random().nextInt(25);
-
-
-
-    initModel(ndocs);
-
-    final AtomicInteger numCommitting = new AtomicInteger();
-
-    List<Thread> threads = new ArrayList<Thread>();
-
-    for (int i=0; i<nWriteThreads; i++) {
-      Thread thread = new Thread("WRITER"+i) {
-        Random rand = new Random(random().nextInt());
-
-        @Override
-        public void run() {
-          try {
-          while (operations.get() > 0) {
-            int oper = rand.nextInt(100);
-
-            if (oper < commitPercent) {
-              if (numCommitting.incrementAndGet() <= maxConcurrentCommits) {
-                Map<Integer,DocInfo> newCommittedModel;
-                long version;
-
-                synchronized(TestRealTimeGet.this) {
-                  newCommittedModel = new HashMap<Integer,DocInfo>(model);  // take a snapshot
-                  version = snapshotCount++;
-                }
-
-                if (rand.nextInt(100) < softCommitPercent) {
-                  verbose("softCommit start");
-                  assertU(TestHarness.commit("softCommit","true"));
-                  verbose("softCommit end");
-                } else {
-                  verbose("hardCommit start");
-                  assertU(commit());
-                  verbose("hardCommit end");
-                }
-
-                synchronized(TestRealTimeGet.this) {
-                  // install this model snapshot only if it's newer than the current one
-                  if (version >= committedModelClock) {
-                    if (VERBOSE) {
-                      verbose("installing new committedModel version="+committedModelClock);
-                    }
-                    committedModel = newCommittedModel;
-                    committedModelClock = version;
-                  }
-                }
-              }
-              numCommitting.decrementAndGet();
-              continue;
-            }
-
-
-            int id = rand.nextInt(ndocs);
-            Object sync = syncArr[id];
-
-            // set the lastId before we actually change it sometimes to try and
-            // uncover more race conditions between writing and reading
-            boolean before = rand.nextBoolean();
-            if (before) {
-              lastId = id;
-            }
-
-            // We can't concurrently update the same document and retain our invariants of increasing values
-            // since we can't guarantee what order the updates will be executed.
-            // Even with versions, we can't remove the sync because increasing versions does not mean increasing vals.
-            //
-            // NOTE: versioning means we can now remove the sync and tell what update "won"
-            // synchronized (sync) {
-              DocInfo info = model.get(id);
-
-              long val = info.val;
-              long nextVal = Math.abs(val)+1;
-
-              if (oper < commitPercent + deletePercent) {
-                verbose("deleting id",id,"val=",nextVal);
-
-                Long version = deleteAndGetVersion(Integer.toString(id), null);
-                assertTrue(version < 0);
-
-                // only update model if the version is newer
-                synchronized (model) {
-                  DocInfo currInfo = model.get(id);
-                  if (Math.abs(version) > Math.abs(currInfo.version)) {
-                    model.put(id, new DocInfo(version, -nextVal));
-                  }
-                }
-
-                verbose("deleting id", id, "val=",nextVal,"DONE");
-              } else if (oper < commitPercent + deletePercent + deleteByQueryPercent) {
-                verbose("deleteByQyery id",id,"val=",nextVal);
-
-                Long version = deleteByQueryAndGetVersion("id:"+Integer.toString(id), null);
-                assertTrue(version < 0);
-
-                // only update model if the version is newer
-                synchronized (model) {
-                  DocInfo currInfo = model.get(id);
-                  if (Math.abs(version) > Math.abs(currInfo.version)) {
-                    model.put(id, new DocInfo(version, -nextVal));
-                  }
-                }
-
-                verbose("deleteByQyery id", id, "val=",nextVal,"DONE");
-              } else {
-                verbose("adding id", id, "val=", nextVal);
-
-                // assertU(adoc("id",Integer.toString(id), field, Long.toString(nextVal)));
-                Long version = addAndGetVersion(sdoc("id", Integer.toString(id), field, Long.toString(nextVal)), null);
-                assertTrue(version > 0);
-
-                // only update model if the version is newer
-                synchronized (model) {
-                  DocInfo currInfo = model.get(id);
-                  if (version > currInfo.version) {
-                    model.put(id, new DocInfo(version, nextVal));
-                  }
-                }
-
-                if (VERBOSE) {
-                  verbose("adding id", id, "val=", nextVal,"DONE");
-                }
-
-              }
-            // }   // end sync
-
-            if (!before) {
-              lastId = id;
-            }
-          }
-          } catch (Throwable e) {
-            operations.set(-1L);
-            throw new RuntimeException(e);
-          }
-        }
-      };
-
-      threads.add(thread);
-    }
-
-
-    for (int i=0; i<nReadThreads; i++) {
-      Thread thread = new Thread("READER"+i) {
-        Random rand = new Random(random().nextInt());
-
-        @Override
-        public void run() {
-          try {
-            while (operations.decrementAndGet() >= 0) {
-              // bias toward a recently changed doc
-              int id = rand.nextInt(100) < 25 ? lastId : rand.nextInt(ndocs);
-
-              // when indexing, we update the index, then the model
-              // so when querying, we should first check the model, and then the index
-
-              boolean realTime = rand.nextInt(100) < percentRealtimeQuery;
-              DocInfo info;
-
-              if (realTime) {
-                info = model.get(id);
-              } else {
-                synchronized(TestRealTimeGet.this) {
-                  info = committedModel.get(id);
-                }
-              }
-
-              if (VERBOSE) {
-                verbose("querying id", id);
-              }
-              SolrQueryRequest sreq;
-              if (realTime) {
-                sreq = req("wt","json", "qt","/get", "ids",Integer.toString(id));
-              } else {
-                sreq = req("wt","json", "q","id:"+Integer.toString(id), "omitHeader","true");
-              }
-
-              String response = h.query(sreq);
-              Map rsp = (Map)ObjectBuilder.fromJSON(response);
-              List doclist = (List)(((Map)rsp.get("response")).get("docs"));
-              if (doclist.size() == 0) {
-                // there's no info we can get back with a delete, so not much we can check without further synchronization
-              } else {
-                assertEquals(1, doclist.size());
-                long foundVal = (Long)(((Map)doclist.get(0)).get(field));
-                long foundVer = (Long)(((Map)doclist.get(0)).get("_version_"));
-                if (foundVer < Math.abs(info.version)
-                    || (foundVer == info.version && foundVal != info.val) ) {    // if the version matches, the val must
-                  verbose("ERROR, id=", id, "found=",response,"model",info);
-                  assertTrue(false);
-                }
-              }
-            }
-          } catch (Throwable e) {
-            operations.set(-1L);
-            throw new RuntimeException(e);
-          }
-        }
-      };
-
-      threads.add(thread);
-    }
-
-
-    for (Thread thread : threads) {
-      thread.start();
-    }
-
-    for (Thread thread : threads) {
-      thread.join();
-    }
-
-  }
-
-  // This version simulates updates coming from the leader and sometimes being reordered
-  @Test
-  public void testStressReorderVersions() throws Exception {
-    clearIndex();
-    assertU(commit());
-
-    final int commitPercent = 5 + random().nextInt(20);
-    final int softCommitPercent = 30+random().nextInt(75); // what percent of the commits are soft
-    final int deletePercent = 4+random().nextInt(25);
-    final int deleteByQueryPercent = 0;  // delete-by-query can't be reordered on replicas
-    final int ndocs = 5 + (random().nextBoolean() ? random().nextInt(25) : random().nextInt(200));
-    int nWriteThreads = 5 + random().nextInt(25);
-
-    final int maxConcurrentCommits = nWriteThreads;   // number of committers at a time... it should be <= maxWarmingSearchers
-
-        // query variables
-    final int percentRealtimeQuery = 75;
-    final AtomicLong operations = new AtomicLong(50000);  // number of query operations to perform in total
-    int nReadThreads = 5 + random().nextInt(25);
-
-    initModel(ndocs);
-
-    final AtomicInteger numCommitting = new AtomicInteger();
-
-    List<Thread> threads = new ArrayList<Thread>();
-
-
-    final AtomicLong testVersion = new AtomicLong(0);
-
-    for (int i=0; i<nWriteThreads; i++) {
-      Thread thread = new Thread("WRITER"+i) {
-        Random rand = new Random(random().nextInt());
-
-        @Override
-        public void run() {
-          try {
-          while (operations.get() > 0) {
-            int oper = rand.nextInt(100);
-
-            if (oper < commitPercent) {
-              if (numCommitting.incrementAndGet() <= maxConcurrentCommits) {
-                Map<Integer,DocInfo> newCommittedModel;
-                long version;
-
-                synchronized(TestRealTimeGet.this) {
-                  newCommittedModel = new HashMap<Integer,DocInfo>(model);  // take a snapshot
-                  version = snapshotCount++;
-                }
-
-                if (rand.nextInt(100) < softCommitPercent) {
-                  verbose("softCommit start");
-                  assertU(TestHarness.commit("softCommit","true"));
-                  verbose("softCommit end");
-                } else {
-                  verbose("hardCommit start");
-                  assertU(commit());
-                  verbose("hardCommit end");
-                }
-
-                synchronized(TestRealTimeGet.this) {
-                  // install this model snapshot only if it's newer than the current one
-                  if (version >= committedModelClock) {
-                    if (VERBOSE) {
-                      verbose("installing new committedModel version="+committedModelClock);
-                    }
-                    committedModel = newCommittedModel;
-                    committedModelClock = version;
-                  }
-                }
-              }
-              numCommitting.decrementAndGet();
-              continue;
-            }
-
-
-            int id;
-
-            if (rand.nextBoolean()) {
-              id = rand.nextInt(ndocs);
-            } else {
-              id = lastId;  // reuse the last ID half of the time to force more race conditions
-            }
-
-            // set the lastId before we actually change it sometimes to try and
-            // uncover more race conditions between writing and reading
-            boolean before = rand.nextBoolean();
-            if (before) {
-              lastId = id;
-            }
-
-            DocInfo info = model.get(id);
-
-            long val = info.val;
-            long nextVal = Math.abs(val)+1;
-
-            // the version we set on the update should determine who wins
-            // These versions are not derived from the actual leader update handler hand hence this
-            // test may need to change depending on how we handle version numbers.
-            long version = testVersion.incrementAndGet();
-
-            // yield after getting the next version to increase the odds of updates happening out of order
-            if (rand.nextBoolean()) Thread.yield();
-
-              if (oper < commitPercent + deletePercent) {
-                verbose("deleting id",id,"val=",nextVal,"version",version);
-
-                Long returnedVersion = deleteAndGetVersion(Integer.toString(id), params("_version_",Long.toString(-version), DISTRIB_UPDATE_PARAM,FROM_LEADER));
-
-                // TODO: returning versions for these types of updates is redundant
-                // but if we do return, they had better be equal
-                if (returnedVersion != null) {
-                  assertEquals(-version, returnedVersion.longValue());
-                }
-
-                // only update model if the version is newer
-                synchronized (model) {
-                  DocInfo currInfo = model.get(id);
-                  if (Math.abs(version) > Math.abs(currInfo.version)) {
-                    model.put(id, new DocInfo(version, -nextVal));
-                  }
-                }
-
-                verbose("deleting id", id, "val=",nextVal,"version",version,"DONE");
-              } else if (oper < commitPercent + deletePercent + deleteByQueryPercent) {
-
-              } else {
-                verbose("adding id", id, "val=", nextVal,"version",version);
-
-                Long returnedVersion = addAndGetVersion(sdoc("id", Integer.toString(id), field, Long.toString(nextVal), "_version_",Long.toString(version)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
-                if (returnedVersion != null) {
-                  assertEquals(version, returnedVersion.longValue());
-                }
-
-                // only update model if the version is newer
-                synchronized (model) {
-                  DocInfo currInfo = model.get(id);
-                  if (version > currInfo.version) {
-                    model.put(id, new DocInfo(version, nextVal));
-                  }
-                }
-
-                if (VERBOSE) {
-                  verbose("adding id", id, "val=", nextVal,"version",version,"DONE");
-                }
-
-              }
-            // }   // end sync
-
-            if (!before) {
-              lastId = id;
-            }
-          }
-          } catch (Throwable e) {
-            operations.set(-1L);
-            throw new RuntimeException(e);
-          }
-        }
-      };
-
-      threads.add(thread);
-    }
-
-
-    for (int i=0; i<nReadThreads; i++) {
-      Thread thread = new Thread("READER"+i) {
-        Random rand = new Random(random().nextInt());
-
-        @Override
-        public void run() {
-          try {
-            while (operations.decrementAndGet() >= 0) {
-              // bias toward a recently changed doc
-              int id = rand.nextInt(100) < 25 ? lastId : rand.nextInt(ndocs);
-
-              // when indexing, we update the index, then the model
-              // so when querying, we should first check the model, and then the index
-
-              boolean realTime = rand.nextInt(100) < percentRealtimeQuery;
-              DocInfo info;
-
-              if (realTime) {
-                info = model.get(id);
-              } else {
-                synchronized(TestRealTimeGet.this) {
-                  info = committedModel.get(id);
-                }
-              }
-
-              if (VERBOSE) {
-                verbose("querying id", id);
-              }
-              SolrQueryRequest sreq;
-              if (realTime) {
-                sreq = req("wt","json", "qt","/get", "ids",Integer.toString(id));
-              } else {
-                sreq = req("wt","json", "q","id:"+Integer.toString(id), "omitHeader","true");
-              }
-
-              String response = h.query(sreq);
-              Map rsp = (Map)ObjectBuilder.fromJSON(response);
-              List doclist = (List)(((Map)rsp.get("response")).get("docs"));
-              if (doclist.size() == 0) {
-                // there's no info we can get back with a delete, so not much we can check without further synchronization
-              } else {
-                assertEquals(1, doclist.size());
-                long foundVal = (Long)(((Map)doclist.get(0)).get(field));
-                long foundVer = (Long)(((Map)doclist.get(0)).get("_version_"));
-                if (foundVer < Math.abs(info.version)
-                    || (foundVer == info.version && foundVal != info.val) ) {    // if the version matches, the val must
-                  verbose("ERROR, id=", id, "found=",response,"model",info);
-                  assertTrue(false);
-                }
-              }
-            }
-          } catch (Throwable e) {
-            operations.set(-1L);
-            throw new RuntimeException(e);
-          }
-        }
-      };
-
-      threads.add(thread);
-    }
-
-
-    for (Thread thread : threads) {
-      thread.start();
-    }
-
-    for (Thread thread : threads) {
-      thread.join();
-    }
-
-  }
-
-
-
-
-
-
-
-  // This points to the live model when state is ACTIVE, but a snapshot of the
-  // past when recovering.
-  volatile ConcurrentHashMap<Integer,DocInfo> visibleModel;
-
-  // This version simulates updates coming from the leader and sometimes being reordered
-  // and tests the ability to buffer updates and apply them later
-  @Test
-  public void testStressRecovery() throws Exception {
-    assumeFalse("FIXME: This test is horribly slow sometimes on Windows!", Constants.WINDOWS);
-    clearIndex();
-    assertU(commit());
-
-    final int commitPercent = 5 + random().nextInt(10);
-    final int softCommitPercent = 30+random().nextInt(75); // what percent of the commits are soft
-    final int deletePercent = 4+random().nextInt(25);
-    final int deleteByQueryPercent = 0;  // real-time get isn't currently supported with delete-by-query
-    final int ndocs = 5 + (random().nextBoolean() ? random().nextInt(25) : random().nextInt(200));
-    int nWriteThreads = 2 + random().nextInt(10);  // fewer write threads to give recovery thread more of a chance
-
-    final int maxConcurrentCommits = nWriteThreads;   // number of committers at a time... it should be <= maxWarmingSearchers
-
-        // query variables
-    final int percentRealtimeQuery = 75;
-    final int percentGetLatestVersions = random().nextInt(4);
-    final AtomicLong operations = new AtomicLong(atLeast(75));  // number of recovery loops to perform
-    int nReadThreads = 2 + random().nextInt(10);  // fewer read threads to give writers more of a chance
-
-    initModel(ndocs);
-
-    final AtomicInteger numCommitting = new AtomicInteger();
-
-    List<Thread> threads = new ArrayList<Thread>();
-
-
-    final AtomicLong testVersion = new AtomicLong(0);
-
-
-    final UpdateHandler uHandler = h.getCore().getUpdateHandler();
-    final UpdateLog uLog = uHandler.getUpdateLog();
-    final VersionInfo vInfo = uLog.getVersionInfo();
-    final Object stateChangeLock = new Object();
-    this.visibleModel = model;
-    final Semaphore[] writePermissions = new Semaphore[nWriteThreads];
-    for (int i=0; i<nWriteThreads; i++) writePermissions[i] = new Semaphore(Integer.MAX_VALUE, false);
-
-    final Semaphore readPermission = new Semaphore(Integer.MAX_VALUE, false);
-
-    for (int i=0; i<nWriteThreads; i++) {
-      final int threadNum = i;
-
-      Thread thread = new Thread("WRITER"+i) {
-        Random rand = new Random(random().nextInt());
-        Semaphore writePermission = writePermissions[threadNum];
-
-        @Override
-        public void run() {
-          try {
-          while (operations.get() > 0) {
-            writePermission.acquire();
-
-            int oper = rand.nextInt(10);
-
-            if (oper < commitPercent) {
-              if (numCommitting.incrementAndGet() <= maxConcurrentCommits) {
-                Map<Integer,DocInfo> newCommittedModel;
-                long version;
-
-                synchronized(TestRealTimeGet.this) {
-                  newCommittedModel = new HashMap<Integer,DocInfo>(model);  // take a snapshot
-                  version = snapshotCount++;
-                }
-
-                synchronized (stateChangeLock) {
-                  // These commits won't take affect if we are in recovery mode,
-                  // so change the version to -1 so we won't update our model.
-                  if (uLog.getState() != UpdateLog.State.ACTIVE) version = -1;
-                  if (rand.nextInt(100) < softCommitPercent) {
-                    verbose("softCommit start");
-                    assertU(TestHarness.commit("softCommit","true"));
-                    verbose("softCommit end");
-                  } else {
-                    verbose("hardCommit start");
-                    assertU(commit());
-                    verbose("hardCommit end");
-                  }
-                }
-
-                synchronized(TestRealTimeGet.this) {
-                  // install this model snapshot only if it's newer than the current one
-                  // install this model only if we are not in recovery mode.
-                  if (version >= committedModelClock) {
-                    if (VERBOSE) {
-                      verbose("installing new committedModel version="+committedModelClock);
-                    }
-                    committedModel = newCommittedModel;
-                    committedModelClock = version;
-                  }
-                }
-              }
-              numCommitting.decrementAndGet();
-              continue;
-            }
-
-
-            int id;
-
-            if (rand.nextBoolean()) {
-              id = rand.nextInt(ndocs);
-            } else {
-              id = lastId;  // reuse the last ID half of the time to force more race conditions
-            }
-
-            // set the lastId before we actually change it sometimes to try and
-            // uncover more race conditions between writing and reading
-            boolean before = rand.nextBoolean();
-            if (before) {
-              lastId = id;
-            }
-
-            DocInfo info = model.get(id);
-
-            long val = info.val;
-            long nextVal = Math.abs(val)+1;
-
-            // the version we set on the update should determine who wins
-            // These versions are not derived from the actual leader update handler hand hence this
-            // test may need to change depending on how we handle version numbers.
-            long version = testVersion.incrementAndGet();
-
-            // yield after getting the next version to increase the odds of updates happening out of order
-            if (rand.nextBoolean()) Thread.yield();
-
-              if (oper < commitPercent + deletePercent) {
-                verbose("deleting id",id,"val=",nextVal,"version",version);
-
-                Long returnedVersion = deleteAndGetVersion(Integer.toString(id), params("_version_",Long.toString(-version), DISTRIB_UPDATE_PARAM,FROM_LEADER));
-
-                // TODO: returning versions for these types of updates is redundant
-                // but if we do return, they had better be equal
-                if (returnedVersion != null) {
-                  assertEquals(-version, returnedVersion.longValue());
-                }
-
-                // only update model if the version is newer
-                synchronized (model) {
-                  DocInfo currInfo = model.get(id);
-                  if (Math.abs(version) > Math.abs(currInfo.version)) {
-                    model.put(id, new DocInfo(version, -nextVal));
-                  }
-                }
-
-                verbose("deleting id", id, "val=",nextVal,"version",version,"DONE");
-              } else if (oper < commitPercent + deletePercent + deleteByQueryPercent) {
-
-              } else {
-                verbose("adding id", id, "val=", nextVal,"version",version);
-
-                Long returnedVersion = addAndGetVersion(sdoc("id", Integer.toString(id), field, Long.toString(nextVal), "_version_",Long.toString(version)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
-                if (returnedVersion != null) {
-                  assertEquals(version, returnedVersion.longValue());
-                }
-
-                // only update model if the version is newer
-                synchronized (model) {
-                  DocInfo currInfo = model.get(id);
-                  if (version > currInfo.version) {
-                    model.put(id, new DocInfo(version, nextVal));
-                  }
-                }
-
-                if (VERBOSE) {
-                  verbose("adding id", id, "val=", nextVal,"version",version,"DONE");
-                }
-
-              }
-            // }   // end sync
-
-            if (!before) {
-              lastId = id;
-            }
-          }
-          } catch (Throwable e) {
-            operations.set(-1L);
-            throw new RuntimeException(e);
-          }
-        }
-      };
-
-      threads.add(thread);
-    }
-
-
-    for (int i=0; i<nReadThreads; i++) {
-      Thread thread = new Thread("READER"+i) {
-        Random rand = new Random(random().nextInt());
-
-        @Override
-        public void run() {
-          try {
-            while (operations.get() > 0) {
-              // throttle reads (don't completely stop)
-              readPermission.tryAcquire(10, TimeUnit.MILLISECONDS);
-
-
-              // bias toward a recently changed doc
-              int id = rand.nextInt(100) < 25 ? lastId : rand.nextInt(ndocs);
-
-              // when indexing, we update the index, then the model
-              // so when querying, we should first check the model, and then the index
-
-              boolean realTime = rand.nextInt(100) < percentRealtimeQuery;
-              DocInfo info;
-
-              if (realTime) {
-                info = visibleModel.get(id);
-              } else {
-                synchronized(TestRealTimeGet.this) {
-                  info = committedModel.get(id);
-                }
-              }
-
-
-              if  (VERBOSE) {
-                verbose("querying id", id);
-              }
-              SolrQueryRequest sreq;
-              if (realTime) {
-                sreq = req("wt","json", "qt","/get", "ids",Integer.toString(id));
-              } else {
-                sreq = req("wt","json", "q","id:"+Integer.toString(id), "omitHeader","true");
-              }
-
-              String response = h.query(sreq);
-              Map rsp = (Map)ObjectBuilder.fromJSON(response);
-              List doclist = (List)(((Map)rsp.get("response")).get("docs"));
-              if (doclist.size() == 0) {
-                // there's no info we can get back with a delete, so not much we can check without further synchronization
-              } else {
-                assertEquals(1, doclist.size());
-                long foundVal = (Long)(((Map)doclist.get(0)).get(field));
-                long foundVer = (Long)(((Map)doclist.get(0)).get("_version_"));
-                if (foundVer < Math.abs(info.version)
-                    || (foundVer == info.version && foundVal != info.val) ) {    // if the version matches, the val must
-                  verbose("ERROR, id=", id, "found=",response,"model",info);
-                  assertTrue(false);
-                }
-              }
-            }
-            
-            
-            if (rand.nextInt(100) < percentGetLatestVersions) {
-              getLatestVersions();
-              // TODO: some sort of validation that the latest version is >= to the latest version we added?
-            }
-
-          } catch (Throwable e) {
-            operations.set(-1L);
-            throw new RuntimeException(e);
-          }
-        }
-      };
-
-      threads.add(thread);
-    }
-
-
-    for (Thread thread : threads) {
-      thread.start();
-    }
-
-    int bufferedAddsApplied = 0;
-    do {
-      assertTrue(uLog.getState() == UpdateLog.State.ACTIVE);
-
-      // before we start buffering updates, we want to point
-      // visibleModel away from the live model.
-
-      visibleModel = new ConcurrentHashMap<Integer, DocInfo>(model);
-
-      synchronized (stateChangeLock) {
-        uLog.bufferUpdates();
-      }
-
-      assertTrue(uLog.getState() == UpdateLog.State.BUFFERING);
-
-      // sometimes wait for a second to allow time for writers to write something
-      if (random().nextBoolean()) Thread.sleep(random().nextInt(10)+1);
-
-      Future<UpdateLog.RecoveryInfo> recoveryInfoF = uLog.applyBufferedUpdates();
-      if (recoveryInfoF != null) {
-        UpdateLog.RecoveryInfo recInfo = null;
-
-        int writeThreadNumber = 0;
-        while (recInfo == null) {
-          try {
-            // wait a short period of time for recovery to complete (and to give a chance for more writers to concurrently add docs)
-            recInfo = recoveryInfoF.get(random().nextInt(100/nWriteThreads), TimeUnit.MILLISECONDS);
-          } catch (TimeoutException e) {
-            // idle one more write thread
-            verbose("Operation",operations.get(),"Draining permits for write thread",writeThreadNumber);
-            writePermissions[writeThreadNumber++].drainPermits();
-            if (writeThreadNumber >= nWriteThreads) {
-              // if we hit the end, back up and give a few write permits
-              writeThreadNumber--;
-              writePermissions[writeThreadNumber].release(random().nextInt(2) + 1);
-            }
-
-            // throttle readers so they don't steal too much CPU from the recovery thread
-            readPermission.drainPermits();
-          }
-        }
-
-        bufferedAddsApplied += recInfo.adds;
-      }
-
-      // put all writers back at full blast
-      for (Semaphore writePerm : writePermissions) {
-        // I don't think semaphores check for overflow, so we need to check mow many remain
-        int neededPermits = Integer.MAX_VALUE - writePerm.availablePermits();
-        if (neededPermits > 0) writePerm.release( neededPermits );
-      }
-
-      // put back readers at full blast and point back to live model
-      visibleModel = model;
-      int neededPermits = Integer.MAX_VALUE - readPermission.availablePermits();
-      if (neededPermits > 0) readPermission.release( neededPermits );
-
-      verbose("ROUND=",operations.get());
-    } while (operations.decrementAndGet() > 0);
-
-    verbose("bufferedAddsApplied=",bufferedAddsApplied);
-
-    for (Thread thread : threads) {
-      thread.join();
-    }
-
-  }
-
-
-  List<Long> getLatestVersions() {
-    List<Long> recentVersions;
-    UpdateLog.RecentUpdates startingRecentUpdates = h.getCore().getUpdateHandler().getUpdateLog().getRecentUpdates();
-    try {
-      recentVersions = startingRecentUpdates.getVersions(100);
-    } finally {
-      startingRecentUpdates.close();
-    }
-    return recentVersions;
-  }
-
-
-
-
-
-
-  // The purpose of this test is to roughly model how solr uses lucene
-  DirectoryReader reader;
-  @Test
-  public void testStressLuceneNRT() throws Exception {
-    final int commitPercent = 5 + random().nextInt(20);
-    final int softCommitPercent = 30+random().nextInt(75); // what percent of the commits are soft
-    final int deletePercent = 4+random().nextInt(25);
-    final int deleteByQueryPercent = 1+random().nextInt(5);
-    final int ndocs = 5 + (random().nextBoolean() ? random().nextInt(25) : random().nextInt(200));
-    int nWriteThreads = 5 + random().nextInt(25);
-
-    final int maxConcurrentCommits = nWriteThreads;   // number of committers at a time... it should be <= maxWarmingSearchers
-
-    final AtomicLong operations = new AtomicLong(1000);  // number of query operations to perform in total - crank up if
-    int nReadThreads = 5 + random().nextInt(25);
-    final boolean tombstones = random().nextBoolean();
-    final boolean syncCommits = random().nextBoolean();
-
-    verbose("commitPercent=", commitPercent);
-    verbose("softCommitPercent=",softCommitPercent);
-    verbose("deletePercent=",deletePercent);
-    verbose("deleteByQueryPercent=", deleteByQueryPercent);
-    verbose("ndocs=", ndocs);
-    verbose("nWriteThreads=", nWriteThreads);
-    verbose("nReadThreads=", nReadThreads);
-    verbose("maxConcurrentCommits=", maxConcurrentCommits);
-    verbose("operations=", operations);
-    verbose("tombstones=", tombstones);
-    verbose("syncCommits=", syncCommits);
-
-    initModel(ndocs);
-
-    final AtomicInteger numCommitting = new AtomicInteger();
-
-    List<Thread> threads = new ArrayList<Thread>();
-
-
-    final FieldType idFt = new FieldType();
-    idFt.setIndexed(true);
-    idFt.setStored(true);
-    idFt.setOmitNorms(true);
-    idFt.setTokenized(false);
-    idFt.setIndexOptions(FieldInfo.IndexOptions.DOCS_ONLY);
-
-    final FieldType ft2 = new FieldType();
-    ft2.setIndexed(false);
-    ft2.setStored(true);
-
-
-    // model how solr does locking - only allow one thread to do a hard commit at once, and only one thread to do a soft commit, but
-    // a hard commit in progress does not stop a soft commit.
-    final Lock hardCommitLock = syncCommits ? new ReentrantLock() : null;
-    final Lock reopenLock = syncCommits ? new ReentrantLock() : null;
-
-
-    // RAMDirectory dir = new RAMDirectory();
-    // final IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(Version.LUCENE_40, new WhitespaceAnalyzer(Version.LUCENE_40)));
-
-    Directory dir = newDirectory();
-
-    final RandomIndexWriter writer = new RandomIndexWriter(random(), dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
-    writer.setDoRandomForceMergeAssert(false);
-
-    // writer.commit();
-    // reader = IndexReader.open(dir);
-    // make this reader an NRT reader from the start to avoid the first non-writer openIfChanged
-    // to only opening at the last commit point.
-    reader = DirectoryReader.open(writer.w, true);
-
-    for (int i=0; i<nWriteThreads; i++) {
-      Thread thread = new Thread("WRITER"+i) {
-        Random rand = new Random(random().nextInt());
-
-        @Override
-        public void run() {
-          try {
-            while (operations.get() > 0) {
-              int oper = rand.nextInt(100);
-
-              if (oper < commitPercent) {
-                if (numCommitting.incrementAndGet() <= maxConcurrentCommits) {
-                  Map<Integer,DocInfo> newCommittedModel;
-                  long version;
-                  DirectoryReader oldReader;
-
-                  boolean softCommit = rand.nextInt(100) < softCommitPercent;
-
-                  if (!softCommit) {
-                    // only allow one hard commit to proceed at once
-                    if (hardCommitLock != null) hardCommitLock.lock();
-                    verbose("hardCommit start");
-
-                    writer.commit();
-                  }
-
-                  if (reopenLock != null) reopenLock.lock();
-
-                  synchronized(TestRealTimeGet.this) {
-                    newCommittedModel = new HashMap<Integer,DocInfo>(model);  // take a snapshot
-                    version = snapshotCount++;
-                    oldReader = reader;
-                    oldReader.incRef();  // increment the reference since we will use this for reopening
-                  }
-
-                  if (!softCommit) {
-                    // must commit after taking a snapshot of the model
-                    // writer.commit();
-                  }
-
-                  verbose("reopen start using", oldReader);
-
-                  DirectoryReader newReader;
-                  if (softCommit) {
-                    newReader = DirectoryReader.openIfChanged(oldReader, writer.w, true);
-                  } else {
-                    // will only open to last commit
-                   newReader = DirectoryReader.openIfChanged(oldReader);
-                  }
-
-
-                  if (newReader == null) {
-                    oldReader.incRef();
-                    newReader = oldReader;
-                  }
-                  oldReader.decRef();
-
-                  verbose("reopen result", newReader);
-
-                  synchronized(TestRealTimeGet.this) {
-                    assert newReader.getRefCount() > 0;
-                    assert reader.getRefCount() > 0;
-
-                    // install the new reader if it's newest (and check the current version since another reader may have already been installed)
-                    if (newReader.getVersion() > reader.getVersion()) {
-                      reader.decRef();
-                      reader = newReader;
-
-                      // install this snapshot only if it's newer than the current one
-                      if (version >= committedModelClock) {
-                        committedModel = newCommittedModel;
-                        committedModelClock = version;
-                      }
-
-                    } else {
-                      // close if unused
-                      newReader.decRef();
-                    }
-
-                  }
-
-                  if (reopenLock != null) reopenLock.unlock();
-
-                  if (!softCommit) {
-                    if (hardCommitLock != null) hardCommitLock.unlock();
-                  }
-
-                }
-                numCommitting.decrementAndGet();
-                continue;
-              }
-
-
-              int id = rand.nextInt(ndocs);
-              Object sync = syncArr[id];
-
-              // set the lastId before we actually change it sometimes to try and
-              // uncover more race conditions between writing and reading
-              boolean before = rand.nextBoolean();
-              if (before) {
-                lastId = id;
-              }
-
-              // We can't concurrently update the same document and retain our invariants of increasing values
-              // since we can't guarantee what order the updates will be executed.
-              synchronized (sync) {
-                DocInfo info = model.get(id);
-                long val = info.val;
-                long nextVal = Math.abs(val)+1;
-
-                if (oper < commitPercent + deletePercent) {
-                  // add tombstone first
-                  if (tombstones) {
-                    Document d = new Document();
-                    d.add(new Field("id","-"+Integer.toString(id), idFt));
-                    d.add(new Field(field, Long.toString(nextVal), ft2));
-                    verbose("adding tombstone for id",id,"val=",nextVal);
-                    writer.updateDocument(new Term("id", "-"+Integer.toString(id)), d);
-                  }
-
-                  verbose("deleting id",id,"val=",nextVal);
-                  writer.deleteDocuments(new Term("id",Integer.toString(id)));
-                  model.put(id, new DocInfo(0,-nextVal));
-                  verbose("deleting id",id,"val=",nextVal,"DONE");
-
-                } else if (oper < commitPercent + deletePercent + deleteByQueryPercent) {
-                  //assertU("<delete><query>id:" + id + "</query></delete>");
-
-                  // add tombstone first
-                  if (tombstones) {
-                    Document d = new Document();
-                    d.add(new Field("id","-"+Integer.toString(id), idFt));
-                    d.add(new Field(field, Long.toString(nextVal), ft2));
-                    verbose("adding tombstone for id",id,"val=",nextVal);
-                    writer.updateDocument(new Term("id", "-"+Integer.toString(id)), d);
-                  }
-
-                  verbose("deleteByQuery",id,"val=",nextVal);
-                  writer.deleteDocuments(new TermQuery(new Term("id", Integer.toString(id))));
-                  model.put(id, new DocInfo(0,-nextVal));
-                  verbose("deleteByQuery",id,"val=",nextVal,"DONE");
-                } else {
-                  // model.put(id, nextVal);   // uncomment this and this test should fail.
-
-                  // assertU(adoc("id",Integer.toString(id), field, Long.toString(nextVal)));
-                  Document d = new Document();
-                  d.add(new Field("id",Integer.toString(id), idFt));
-                  d.add(new Field(field, Long.toString(nextVal), ft2));
-                  verbose("adding id",id,"val=",nextVal);
-                  writer.updateDocument(new Term("id", Integer.toString(id)), d);
-                  if (tombstones) {
-                    // remove tombstone after new addition (this should be optional?)
-                    verbose("deleting tombstone for id",id);
-                    writer.deleteDocuments(new Term("id","-"+Integer.toString(id)));
-                    verbose("deleting tombstone for id",id,"DONE");
-                  }
-
-                  model.put(id, new DocInfo(0,nextVal));
-                  verbose("adding id",id,"val=",nextVal,"DONE");
-                }
-              }
-
-              if (!before) {
-                lastId = id;
-              }
-            }
-          } catch (Exception  ex) {
-            throw new RuntimeException(ex);
-          }
-        }
-      };
-
-      threads.add(thread);
-    }
-
-
-    for (int i=0; i<nReadThreads; i++) {
-      Thread thread = new Thread("READER"+i) {
-        Random rand = new Random(random().nextInt());
-
-        @Override
-        public void run() {
-          try {
-            while (operations.decrementAndGet() >= 0) {
-              // bias toward a recently changed doc
-              int id = rand.nextInt(100) < 25 ? lastId : rand.nextInt(ndocs);
-
-              // when indexing, we update the index, then the model
-              // so when querying, we should first check the model, and then the index
-
-              DocInfo info;
-              synchronized(TestRealTimeGet.this) {
-                info = committedModel.get(id);
-              }
-              long val = info.val;
-
-              IndexReader r;
-              synchronized(TestRealTimeGet.this) {
-                r = reader;
-                r.incRef();
-              }
-
-              int docid = getFirstMatch(r, new Term("id",Integer.toString(id)));
-
-              if (docid < 0 && tombstones) {
-                // if we couldn't find the doc, look for it's tombstone
-                docid = getFirstMatch(r, new Term("id","-"+Integer.toString(id)));
-                if (docid < 0) {
-                  if (val == -1L) {
-                    // expected... no doc was added yet
-                    r.decRef();
-                    continue;
-                  }
-                  verbose("ERROR: Couldn't find a doc  or tombstone for id", id, "using reader",r,"expected value",val);
-                  fail("No documents or tombstones found for id " + id + ", expected at least " + val);
-                }
-              }
-
-              if (docid < 0 && !tombstones) {
-                // nothing to do - we can't tell anything from a deleted doc without tombstones
-              } else {
-                if (docid < 0) {
-                  verbose("ERROR: Couldn't find a doc for id", id, "using reader",r);
-                }
-                assertTrue(docid >= 0);   // we should have found the document, or it's tombstone
-                Document doc = r.document(docid);
-                long foundVal = Long.parseLong(doc.get(field));
-                if (foundVal < Math.abs(val)) {
-                  verbose("ERROR: id",id,"model_val=",val," foundVal=",foundVal,"reader=",reader);
-                }
-                assertTrue(foundVal >= Math.abs(val));
-              }
-
-              r.decRef();
-            }
-          } catch (Throwable e) {
-            operations.set(-1L);
-            throw new RuntimeException(e);
-          }
-        }
-      };
-
-      threads.add(thread);
-    }
-
-
-    for (Thread thread : threads) {
-      thread.start();
-    }
-
-    for (Thread thread : threads) {
-      thread.join();
-    }
-
-    writer.close();
-    reader.close();
-    dir.close();
-  }
-
-
-  public int getFirstMatch(IndexReader r, Term t) throws IOException {
-    Fields fields = MultiFields.getFields(r);
-    if (fields == null) return -1;
-    Terms terms = fields.terms(t.field());
-    if (terms == null) return -1;
-    BytesRef termBytes = t.bytes();
-    final TermsEnum termsEnum = terms.iterator(null);
-    if (!termsEnum.seekExact(termBytes, false)) {
-      return -1;
-    }
-    DocsEnum docs = termsEnum.docs(MultiFields.getLiveDocs(r), null, false);
-    int id = docs.nextDoc();
-    if (id != DocIdSetIterator.NO_MORE_DOCS) {
-      int next = docs.nextDoc();
-      assertEquals(DocIdSetIterator.NO_MORE_DOCS, next);
-    }
-    return id == DocIdSetIterator.NO_MORE_DOCS ? -1 : id;
-  }
-
 }

Modified: lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/search/TestRecovery.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/search/TestRecovery.java?rev=1363400&r1=1363399&r2=1363400&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/search/TestRecovery.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/search/TestRecovery.java Thu Jul 19 15:58:54 2012
@@ -17,24 +17,25 @@
 package org.apache.solr.search;
 
 
-import org.apache.lucene.util.BytesRef;
-import org.apache.noggit.JSONUtil;
 import org.apache.noggit.ObjectBuilder;
 import org.apache.solr.SolrTestCaseJ4;
-import org.apache.solr.common.util.ByteUtils;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.update.DirectUpdateHandler2;
 import org.apache.solr.update.UpdateLog;
 import org.apache.solr.update.UpdateHandler;
-import org.apache.solr.update.UpdateLog;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
-import org.junit.Ignore;
 import org.junit.Test;
 
 import java.io.File;
 import java.io.RandomAccessFile;
-import java.util.*;
+import java.util.ArrayDeque;
+import java.util.Arrays;
+import java.util.Deque;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
 import java.util.concurrent.Future;
 import java.util.concurrent.Semaphore;
 import java.util.concurrent.TimeUnit;
@@ -60,7 +61,7 @@ public class TestRecovery extends SolrTe
   }
   
   @AfterClass
-  public static void afterClass() throws Exception {
+  public static void afterClass() {
     if (savedFactory == null) {
       System.clearProperty("solr.directoryFactory");
     } else {
@@ -68,6 +69,19 @@ public class TestRecovery extends SolrTe
     }
   }
 
+
+  // since we make up fake versions in these tests, we can get messed up by a DBQ with a real version
+  // since Solr can think following updates were reordered.
+  @Override
+  public void clearIndex() {
+    try {
+      deleteByQueryAndGetVersion("*:*", params("_version_", Long.toString(-Long.MAX_VALUE), DISTRIB_UPDATE_PARAM,FROM_LEADER));
+    } catch (Exception e) {
+      throw new RuntimeException(e);
+    }
+  }
+
+
   @Test
   public void testLogReplay() throws Exception {
     try {
@@ -892,7 +906,7 @@ public class TestRecovery extends SolrTe
 
       // WARNING... assumes format of .00000n where n is less than 9
       long logNumber = Long.parseLong(fname.substring(fname.lastIndexOf(".") + 1));
-      String fname2 = String.format(Locale.ENGLISH, 
+      String fname2 = String.format(Locale.ROOT,
           UpdateLog.LOG_FILENAME_PATTERN,
           UpdateLog.TLOG_NAME,
           logNumber + 1);

Modified: lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/search/TestSearchPerf.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/search/TestSearchPerf.java?rev=1363400&r1=1363399&r2=1363400&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/search/TestSearchPerf.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/search/TestSearchPerf.java Thu Jul 19 15:58:54 2012
@@ -50,7 +50,7 @@ public class TestSearchPerf extends Abst
   }
 
   String t(int tnum) {
-    return String.format("%08d", tnum);
+    return String.format(Locale.ROOT, "%08d", tnum);
   }
 
   Random r = new Random(0);  // specific seed for reproducible perf testing

Modified: lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/search/TestSort.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/search/TestSort.java?rev=1363400&r1=1363399&r2=1363400&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/search/TestSort.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/search/TestSort.java Thu Jul 19 15:58:54 2012
@@ -203,7 +203,7 @@ public class TestSort extends SolrTestCa
       for (int i=0; i<qiter; i++) {
         Filter filt = new Filter() {
           @Override
-          public DocIdSet getDocIdSet(AtomicReaderContext context, Bits acceptDocs) throws IOException {
+          public DocIdSet getDocIdSet(AtomicReaderContext context, Bits acceptDocs) {
             return BitsFilteredDocIdSet.wrap(randSet(context.reader().maxDoc()), acceptDocs);
           }
         };

Modified: lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/search/similarities/TestDefaultSimilarityFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/search/similarities/TestDefaultSimilarityFactory.java?rev=1363400&r1=1363399&r2=1363400&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/search/similarities/TestDefaultSimilarityFactory.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/search/similarities/TestDefaultSimilarityFactory.java Thu Jul 19 15:58:54 2012
@@ -18,6 +18,7 @@ package org.apache.solr.search.similarit
  */
 
 import org.apache.lucene.search.similarities.DefaultSimilarity;
+import org.apache.lucene.search.similarities.Similarity;
 import org.junit.BeforeClass;
 
 /**
@@ -30,7 +31,16 @@ public class TestDefaultSimilarityFactor
   }
   
   /** default parameters */
-  public void test() throws Exception {
-    assertEquals(DefaultSimilarity.class, getSimilarity("text").getClass());
+  public void testDefaults() throws Exception {
+    Similarity sim = getSimilarity("text");
+    assertEquals(DefaultSimilarity.class, sim.getClass());
+    assertEquals(true, ((DefaultSimilarity)sim).getDiscountOverlaps());
   }
+  /** explicit params */
+  public void testParams() throws Exception {
+    Similarity sim = getSimilarity("text_overlap");
+    assertEquals(DefaultSimilarity.class, sim.getClass());
+    assertEquals(false, ((DefaultSimilarity)sim).getDiscountOverlaps());
+  }
+
 }

Modified: lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/servlet/CacheHeaderTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/servlet/CacheHeaderTest.java?rev=1363400&r1=1363399&r2=1363400&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/servlet/CacheHeaderTest.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/servlet/CacheHeaderTest.java Thu Jul 19 15:58:54 2012
@@ -31,6 +31,8 @@ import org.apache.solr.common.params.Com
 import org.junit.BeforeClass;
 import org.junit.Test;
 
+import org.apache.lucene.util._TestUtil;
+
 /**
  * A test case for the several HTTP cache headers emitted by Solr
  */
@@ -54,6 +56,7 @@ public class CacheHeaderTest extends Cac
     HttpResponse response = getClient().execute(m);
     assertEquals(200, response.getStatusLine().getStatusCode());
     checkVetoHeaders(response, true);
+    f.delete();
   }
   
   @Test
@@ -243,8 +246,7 @@ public class CacheHeaderTest extends Cac
 
   protected File makeFile(String contents, String charset) {
     try {
-      File f = File.createTempFile(getClass().getName(),"csv");
-      f.deleteOnExit();
+      File f = _TestUtil.getTempDir("cachetest_csv");
       Writer out = new OutputStreamWriter(new FileOutputStream(f),
           charset);
       out.write(contents);

Modified: lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/servlet/NoCacheHeaderTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/servlet/NoCacheHeaderTest.java?rev=1363400&r1=1363399&r2=1363400&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/servlet/NoCacheHeaderTest.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/servlet/NoCacheHeaderTest.java Thu Jul 19 15:58:54 2012
@@ -33,7 +33,7 @@ public class NoCacheHeaderTest extends C
   // as its home. it could interfere with other tests!
   @BeforeClass
   public static void beforeTest() throws Exception {
-    createJetty(TEST_HOME(), "solr/conf/solrconfig-nocache.xml", null);
+    createJetty(TEST_HOME(), "solr/collection1/conf/solrconfig-nocache.xml", null);
   }
 
   // The tests

Modified: lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/servlet/SolrRequestParserTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/servlet/SolrRequestParserTest.java?rev=1363400&r1=1363399&r2=1363400&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/servlet/SolrRequestParserTest.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/servlet/SolrRequestParserTest.java Thu Jul 19 15:58:54 2012
@@ -54,7 +54,7 @@ public class SolrRequestParserTest exten
   static SolrRequestParsers parser;
 
   @AfterClass
-  public static void afterClass() throws Exception {
+  public static void afterClass() {
     parser = null;
   }
   

Modified: lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/spelling/FileBasedSpellCheckerTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/spelling/FileBasedSpellCheckerTest.java?rev=1363400&r1=1363399&r2=1363400&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/spelling/FileBasedSpellCheckerTest.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/spelling/FileBasedSpellCheckerTest.java Thu Jul 19 15:58:54 2012
@@ -54,7 +54,7 @@ public class FileBasedSpellCheckerTest e
   }
   
   @AfterClass
-  public static void afterClass() throws Exception {
+  public static void afterClass() {
     queryConverter = null;
   }
 

Modified: lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/spelling/IndexBasedSpellCheckerTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/spelling/IndexBasedSpellCheckerTest.java?rev=1363400&r1=1363399&r2=1363400&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/spelling/IndexBasedSpellCheckerTest.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/spelling/IndexBasedSpellCheckerTest.java Thu Jul 19 15:58:54 2012
@@ -76,7 +76,7 @@ public class IndexBasedSpellCheckerTest 
   }
   
   @AfterClass
-  public static void afterClass() throws Exception {
+  public static void afterClass() {
     queryConverter = null;
   }
 

Modified: lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/spelling/SpellCheckCollatorTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/spelling/SpellCheckCollatorTest.java?rev=1363400&r1=1363399&r2=1363400&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/spelling/SpellCheckCollatorTest.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/spelling/SpellCheckCollatorTest.java Thu Jul 19 15:58:54 2012
@@ -20,6 +20,7 @@ import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
 
+import org.apache.lucene.util.LuceneTestCase.Slow;
 import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.common.params.CommonParams;
 import org.apache.solr.common.params.GroupParams;
@@ -37,6 +38,7 @@ import org.apache.solr.response.SolrQuer
 import org.junit.BeforeClass;
 import org.junit.Test;
 
+@Slow
 public class SpellCheckCollatorTest extends SolrTestCaseJ4 {
 	@BeforeClass
 	public static void beforeClass() throws Exception {

Modified: lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/spelling/suggest/SuggesterTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/spelling/suggest/SuggesterTest.java?rev=1363400&r1=1363399&r2=1363400&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/spelling/suggest/SuggesterTest.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/spelling/suggest/SuggesterTest.java Thu Jul 19 15:58:54 2012
@@ -43,7 +43,7 @@ public class SuggesterTest extends SolrT
   }
   
   @AfterClass
-  public static void afterClass() throws Exception {
+  public static void afterClass() {
     if (savedFactory == null) {
       System.clearProperty("solr.directoryFactory");
     } else {
@@ -51,7 +51,7 @@ public class SuggesterTest extends SolrT
     }
   }
 
-  public static void addDocs() throws Exception {
+  public static void addDocs() {
     assertU(adoc("id", "1",
                  "text", "acceptable accidentally accommodate acquire"
                ));

Modified: lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/update/AutoCommitTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/update/AutoCommitTest.java?rev=1363400&r1=1363399&r2=1363400&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/update/AutoCommitTest.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/update/AutoCommitTest.java Thu Jul 19 15:58:54 2012
@@ -21,6 +21,7 @@ import java.util.ArrayList;
 import java.util.Collection;
 import java.util.HashMap;
 
+import org.apache.lucene.util.LuceneTestCase.Slow;
 import org.apache.solr.common.params.MapSolrParams;
 import org.apache.solr.common.util.ContentStream;
 import org.apache.solr.common.util.ContentStreamBase;
@@ -104,6 +105,7 @@ class NewSearcherListener implements Sol
   }
 }
 
+@Slow
 public class AutoCommitTest extends AbstractSolrTestCase {
 
   @Override

Modified: lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/update/DirectUpdateHandlerTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/update/DirectUpdateHandlerTest.java?rev=1363400&r1=1363399&r2=1363400&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/update/DirectUpdateHandlerTest.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/update/DirectUpdateHandlerTest.java Thu Jul 19 15:58:54 2012
@@ -18,7 +18,6 @@
 package org.apache.solr.update;
 
 import java.util.Arrays;
-import java.util.Collections;
 import java.util.HashMap;
 import java.util.Map;
 
@@ -51,7 +50,7 @@ public class DirectUpdateHandlerTest ext
   }
   
   @AfterClass
-  public static void afterClass() throws Exception {
+  public static void afterClass() {
     if (savedFactory == null) {
       System.clearProperty("solr.directoryFactory");
     } else {

Modified: lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/update/SoftAutoCommitTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/update/SoftAutoCommitTest.java?rev=1363400&r1=1363399&r2=1363400&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/update/SoftAutoCommitTest.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/update/SoftAutoCommitTest.java Thu Jul 19 15:58:54 2012
@@ -25,6 +25,7 @@ import java.util.concurrent.BlockingQueu
 import java.util.concurrent.LinkedBlockingQueue;
 
 import org.apache.lucene.util.Constants;
+import org.apache.lucene.util.LuceneTestCase.Slow;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.core.SolrCore;
 import org.apache.solr.core.SolrEventListener;
@@ -51,6 +52,7 @@ import org.junit.Before;
  *   </li>
  * </ul>
  */
+@Slow
 public class SoftAutoCommitTest extends AbstractSolrTestCase {
 
   @Override

Modified: lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/update/SolrCmdDistributorTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/update/SolrCmdDistributorTest.java?rev=1363400&r1=1363399&r2=1363400&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/update/SolrCmdDistributorTest.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/update/SolrCmdDistributorTest.java Thu Jul 19 15:58:54 2012
@@ -17,6 +17,7 @@ package org.apache.solr.update;
  * limitations under the License.
  */
 
+import java.io.File;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
@@ -56,7 +57,7 @@ public class SolrCmdDistributorTest exte
   // TODO: for now we redefine this method so that it pulls from the above
   // we don't get helpful override behavior due to the method being static
   protected void createServers(int numShards) throws Exception {
-    controlJetty = createJetty(testDir, testDir + "/control/data", null, getSolrConfigFile(), getSchemaFile());
+    controlJetty = createJetty(new File(getSolrHome()), testDir + "/control/data", null, getSolrConfigFile(), getSchemaFile());
 
     controlClient = createNewSolrServer(controlJetty.getLocalPort());
 
@@ -64,7 +65,7 @@ public class SolrCmdDistributorTest exte
     StringBuilder sb = new StringBuilder();
     for (int i = 0; i < numShards; i++) {
       if (sb.length() > 0) sb.append(',');
-      JettySolrRunner j = createJetty(testDir,
+      JettySolrRunner j = createJetty(new File(getSolrHome()),
           testDir + "/shard" + i + "/data", null, getSolrConfigFile(),
           getSchemaFile());
       jettys.add(j);

Modified: lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/update/SolrIndexConfigTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/update/SolrIndexConfigTest.java?rev=1363400&r1=1363399&r2=1363400&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/update/SolrIndexConfigTest.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/update/SolrIndexConfigTest.java Thu Jul 19 15:58:54 2012
@@ -17,6 +17,8 @@ package org.apache.solr.update;
  * limitations under the License.
  */
 
+import java.io.File;
+
 import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.core.SolrConfig;
 import org.apache.solr.schema.IndexSchema;
@@ -42,10 +44,13 @@ public class SolrIndexConfigTest extends
 
   @Test
   public void testTieredMPSolrIndexConfigCreation() throws Exception {
-    SolrConfig solrConfig = new SolrConfig("solrconfig-mergepolicy.xml");
-    SolrIndexConfig solrIndexConfig = new SolrIndexConfig(solrConfig, null, null);
+    SolrConfig solrConfig = new SolrConfig("solr" + File.separator
+        + "collection1", "solrconfig-mergepolicy.xml", null);
+    SolrIndexConfig solrIndexConfig = new SolrIndexConfig(solrConfig, null,
+        null);
     assertNotNull(solrIndexConfig);
-    assertEquals("org.apache.lucene.index.TieredMergePolicy", solrIndexConfig.defaultMergePolicyClassName);
+    assertEquals("org.apache.lucene.index.TieredMergePolicy",
+        solrIndexConfig.defaultMergePolicyClassName);
     IndexSchema indexSchema = new IndexSchema(solrConfig, "schema.xml", null);
     solrIndexConfig.toIndexWriterConfig(indexSchema);
   }

Modified: lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/update/TestIndexingPerformance.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/update/TestIndexingPerformance.java?rev=1363400&r1=1363399&r2=1363400&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/update/TestIndexingPerformance.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/update/TestIndexingPerformance.java Thu Jul 19 15:58:54 2012
@@ -41,12 +41,12 @@ public class TestIndexingPerformance ext
   // TODO: fix this test to not require FSDirectory
   static String savedFactory;
   @BeforeClass
-  public static void beforeClass() throws Exception {
+  public static void beforeClass() {
     savedFactory = System.getProperty("solr.DirectoryFactory");
     System.setProperty("solr.directoryFactory", "org.apache.solr.core.MockFSDirectoryFactory");
   }
   @AfterClass
-  public static void afterClass() throws Exception {
+  public static void afterClass() {
     if (savedFactory == null) {
       System.clearProperty("solr.directoryFactory");
     } else {

Modified: lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/update/TestUpdate.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/update/TestUpdate.java?rev=1363400&r1=1363399&r2=1363400&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/update/TestUpdate.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/update/TestUpdate.java Thu Jul 19 15:58:54 2012
@@ -150,14 +150,23 @@ public class TestUpdate extends SolrTest
     version = deleteAndGetVersion("1", null);
     afterUpdate.call();
 
+
     try {
-      // Currently, there is an implicit _version_=1 for updates (doc must exist).  This is subject to change!
-      version2 = addAndGetVersion(sdoc("id","1", "val_is",map("add",-100)), null);
+      // test that updating a non-existing doc fails if we set _version_=1
+      version2 = addAndGetVersion(sdoc("id","1", "val_is",map("add",-101), "_version_","1"), null);
       fail();
     } catch (SolrException se) {
       assertEquals(409, se.code());
     }
 
+
+    // test that by default we can update a non-existing doc
+    version = addAndGetVersion(sdoc("id","1", "val_i",102, "val_is",map("add",-102)), null);
+    afterUpdate.call();
+    assertJQ(req("qt","/get", "id","1", "fl","id,val*")
+        ,"=={'doc':{'id':'1', 'val_i':102, 'val_is':[-102]}}"
+    );
+
     version = addAndGetVersion(sdoc("id","1", "val_i",5), null);
     afterUpdate.call();
 

Modified: lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/update/processor/FieldMutatingUpdateProcessorTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/update/processor/FieldMutatingUpdateProcessorTest.java?rev=1363400&r1=1363399&r2=1363400&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/update/processor/FieldMutatingUpdateProcessorTest.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/update/processor/FieldMutatingUpdateProcessorTest.java Thu Jul 19 15:58:54 2012
@@ -53,7 +53,7 @@ import org.junit.Test;
  * (mainly via TrimFieldUpdateProcessor) and the logic of other various 
  * subclasses.
  */
-public class FieldMutatingUpdateProcessorTest extends SolrTestCaseJ4 {
+public class FieldMutatingUpdateProcessorTest extends UpdateProcessorTestBase {
 
   @BeforeClass
   public static void beforeClass() throws Exception {
@@ -816,64 +816,4 @@ public class FieldMutatingUpdateProcesso
                  3.0F, d.getField("foo_s").getBoost(), 0.0F);
   }
 
-  /** 
-   * Convenience method for building up SolrInputDocuments
-   */
-  SolrInputDocument doc(SolrInputField... fields) {
-    SolrInputDocument d = new SolrInputDocument();
-    for (SolrInputField f : fields) {
-      d.put(f.getName(), f);
-    }
-    return d;
-  }
-
-  /** 
-   * Convenience method for building up SolrInputFields
-   */
-  SolrInputField field(String name, float boost, Object... values) {
-    SolrInputField f = new SolrInputField(name);
-    for (Object v : values) {
-      f.addValue(v, 1.0F);
-    }
-    f.setBoost(boost);
-    return f;
-  }
-
-  /** 
-   * Convenience method for building up SolrInputFields with default boost
-   */
-  SolrInputField f(String name, Object... values) {
-    return field(name, 1.0F, values);
-  }
-
-
-  /**
-   * Runs a document through the specified chain, and returns the final 
-   * document used when the chain is completed (NOTE: some chains may 
-   * modify the document in place
-   */
-  SolrInputDocument processAdd(final String chain, 
-                               final SolrInputDocument docIn) 
-    throws IOException {
-
-    SolrCore core = h.getCore();
-    UpdateRequestProcessorChain pc = core.getUpdateProcessingChain(chain);
-    assertNotNull("No Chain named: " + chain, pc);
-
-    SolrQueryResponse rsp = new SolrQueryResponse();
-
-    SolrQueryRequest req = new LocalSolrQueryRequest
-      (core, new ModifiableSolrParams());
-    try {
-      AddUpdateCommand cmd = new AddUpdateCommand(req);
-      cmd.solrDoc = docIn;
-
-      UpdateRequestProcessor processor = pc.createProcessor(req, rsp);
-      processor.processAdd(cmd);
-
-      return cmd.solrDoc;
-    } finally {
-      req.close();
-    }
-  }
 }

Modified: lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/util/CircularListTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/util/CircularListTest.java?rev=1363400&r1=1363399&r2=1363400&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/util/CircularListTest.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/util/CircularListTest.java Thu Jul 19 15:58:54 2012
@@ -19,15 +19,14 @@ package org.apache.solr.util;
 
 import java.io.IOException;
 
-import junit.framework.TestCase;
-
+import org.apache.lucene.util.LuceneTestCase;
 import org.apache.solr.logging.CircularList;
 import org.junit.Test;
 
 /** 
  * Test circular list
  */
-public class CircularListTest  extends TestCase {  
+public class CircularListTest  extends LuceneTestCase {  
 
   @Test
   public void testCircularList() throws IOException {

Modified: lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/util/DOMUtilTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/util/DOMUtilTest.java?rev=1363400&r1=1363399&r2=1363400&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/util/DOMUtilTest.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/util/DOMUtilTest.java Thu Jul 19 15:58:54 2012
@@ -19,7 +19,6 @@ package org.apache.solr.util;
 
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.SimpleOrderedMap;
-import org.apache.solr.util.DOMUtilTestBase;
 
 public class DOMUtilTest extends DOMUtilTestBase {
   
@@ -49,7 +48,7 @@ public class DOMUtilTest extends DOMUtil
     assertTypeAndValue( namedList, "Boolean", false );
   }
 
-  private void assertTypeAndValue( NamedList<Object> namedList, String key, Object value ) throws Exception {
+  private void assertTypeAndValue( NamedList<Object> namedList, String key, Object value ) {
     Object v = namedList.get( key );
     assertNotNull( v );
     assertEquals( key, v.getClass().getSimpleName() );

Modified: lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/util/DateMathParserTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/util/DateMathParserTest.java?rev=1363400&r1=1363399&r2=1363400&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/util/DateMathParserTest.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/util/DateMathParserTest.java Thu Jul 19 15:58:54 2012
@@ -55,17 +55,17 @@ public class DateMathParserTest extends 
   public DateMathParserTest() {
     super();
     fmt = new SimpleDateFormat
-      ("G yyyyy MM ww WW DD dd F E aa HH hh mm ss SSS z Z",Locale.US);
+      ("G yyyyy MM ww WW DD dd F E aa HH hh mm ss SSS z Z",Locale.ROOT);
     fmt.setTimeZone(UTC);
 
-    parser = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS",Locale.US);
+    parser = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS",Locale.ROOT);
     parser.setTimeZone(UTC);
   }
 
   /** MACRO: Round: parses s, rounds with u, fmts */
   protected String r(String s, String u) throws Exception {
     Date d = parser.parse(s);
-    Calendar c = Calendar.getInstance(UTC, Locale.US);
+    Calendar c = Calendar.getInstance(UTC, Locale.ROOT);
     c.setTime(d);
     DateMathParser.round(c, u);
     return fmt.format(c.getTime());
@@ -74,7 +74,7 @@ public class DateMathParserTest extends 
   /** MACRO: Add: parses s, adds v u, fmts */
   protected String a(String s, int v, String u) throws Exception {
     Date d = parser.parse(s);
-    Calendar c = Calendar.getInstance(UTC, Locale.US);
+    Calendar c = Calendar.getInstance(UTC, Locale.ROOT);
     c.setTime(d);
     DateMathParser.add(c, v, u);
     return fmt.format(c.getTime());
@@ -162,7 +162,7 @@ public class DateMathParserTest extends 
   
   public void testParseStatelessness() throws Exception {
 
-    DateMathParser p = new DateMathParser(UTC, Locale.US);
+    DateMathParser p = new DateMathParser(UTC, Locale.ROOT);
     p.setNow(parser.parse("2001-07-04T12:08:56.235"));
 
     String e = fmt.format(p.parseMath(""));
@@ -179,7 +179,7 @@ public class DateMathParserTest extends 
     
   public void testParseMath() throws Exception {
 
-    DateMathParser p = new DateMathParser(UTC, Locale.US);
+    DateMathParser p = new DateMathParser(UTC, Locale.ROOT);
     p.setNow(parser.parse("2001-07-04T12:08:56.235"));
 
     // No-Op
@@ -272,7 +272,7 @@ public class DateMathParserTest extends 
     // US, Positive Offset with DST
 
     TimeZone tz = TimeZone.getTimeZone(PLUS_TZS);
-    DateMathParser p = new DateMathParser(tz, Locale.US);
+    DateMathParser p = new DateMathParser(tz, Locale.ROOT);
 
     p.setNow(parser.parse("2001-07-04T12:08:56.235"));
 
@@ -294,7 +294,7 @@ public class DateMathParserTest extends 
     // France, Negative Offset with DST
 
     tz = TimeZone.getTimeZone(NEG_TZS);
-    p = new DateMathParser(tz, Locale.US);
+    p = new DateMathParser(tz, Locale.ROOT);
     p.setNow(parser.parse("2001-07-04T12:08:56.235"));
 
     assertMath("2001-07-04T12:08:56.000", p, "/SECOND");
@@ -311,7 +311,7 @@ public class DateMathParserTest extends 
  
   public void testParseMathExceptions() throws Exception {
     
-    DateMathParser p = new DateMathParser(UTC, Locale.US);
+    DateMathParser p = new DateMathParser(UTC, Locale.ROOT);
     p.setNow(parser.parse("2001-07-04T12:08:56.235"));
     
     Map<String,Integer> badCommands = new HashMap<String,Integer>();

Modified: lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/util/TestSystemIdResolver.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/util/TestSystemIdResolver.java?rev=1363400&r1=1363399&r2=1363400&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/util/TestSystemIdResolver.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/util/TestSystemIdResolver.java Thu Jul 19 15:58:54 2012
@@ -38,8 +38,8 @@ public class TestSystemIdResolver extend
   }
   
   public void testResolving() throws Exception {
-    final String testHome = SolrTestCaseJ4.getFile("solr/conf").getParent();
-    final ResourceLoader loader = new SolrResourceLoader(testHome, this.getClass().getClassLoader());
+    final String testHome = SolrTestCaseJ4.getFile("solr/collection1").getParent();
+    final ResourceLoader loader = new SolrResourceLoader(testHome + "/collection1", this.getClass().getClassLoader());
     final SystemIdResolver resolver = new SystemIdResolver(loader);
     final String fileUri = new File(testHome+"/crazy-path-to-config.xml").toURI().toASCIIString();
     
@@ -61,8 +61,8 @@ public class TestSystemIdResolver extend
     assertEntityResolving(resolver, "solrres:/schema.xml", "solrres:/solrconfig.xml", "schema.xml");
     assertEntityResolving(resolver, "solrres:/org/apache/solr/util/TestSystemIdResolver.class",
       "solrres:/org/apache/solr/util/RTimer.class", "TestSystemIdResolver.class");
-    assertEntityResolving(resolver, SystemIdResolver.createSystemIdFromResourceName(testHome+"/conf/schema.xml"),
-      SystemIdResolver.createSystemIdFromResourceName(testHome+"/conf/solrconfig.xml"), "schema.xml");
+    assertEntityResolving(resolver, SystemIdResolver.createSystemIdFromResourceName(testHome+"/collection1/conf/schema.xml"),
+      SystemIdResolver.createSystemIdFromResourceName(testHome+"/collection1/conf/solrconfig.xml"), "schema.xml");
     assertEntityResolving(resolver, SystemIdResolver.createSystemIdFromResourceName(testHome+"/crazy-path-to-schema.xml"),
       SystemIdResolver.createSystemIdFromResourceName(testHome+"/crazy-path-to-config.xml"), "crazy-path-to-schema.xml");
     

Modified: lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/util/TimeZoneUtilsTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/util/TimeZoneUtilsTest.java?rev=1363400&r1=1363399&r2=1363400&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/util/TimeZoneUtilsTest.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/util/TimeZoneUtilsTest.java Thu Jul 19 15:58:54 2012
@@ -120,10 +120,10 @@ public class TimeZoneUtilsTest extends L
       int hour = _TestUtil.nextInt(r, 0, 23);
       int min = _TestUtil.nextInt(r, 0, 59);
 
-      String hours = String.format(Locale.US, 
+      String hours = String.format(Locale.ROOT, 
                                    (r.nextBoolean() ? ONE_DIGIT : TWO_DIGIT),
                                    hour);
-      String mins = String.format(Locale.US, TWO_DIGIT, min);
+      String mins = String.format(Locale.ROOT, TWO_DIGIT, min);
       String input = "GMT" + (r.nextBoolean()?"+":"-") 
         + hours + (r.nextBoolean() ? "" : ((r.nextBoolean()?":":"") + mins));
       assertSameRules(input,  

Modified: lucene/dev/branches/pforcodec_3892/solr/example/README.txt
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/example/README.txt?rev=1363400&r1=1363399&r2=1363400&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/example/README.txt (original)
+++ lucene/dev/branches/pforcodec_3892/solr/example/README.txt Thu Jul 19 15:58:54 2012
@@ -43,6 +43,8 @@ UIMA, the clustering component, or other
 you will need to copy the required jars into solr/lib or update the paths to
 the jars in your solrconfig.xml.
 
+-SolrHome
+
 By default, start.jar starts Solr in Jetty using the default solr home
 directory of "./solr/" -- To run other example configurations, you can
 speciy the solr.solr.home system property when starting jetty...
@@ -50,3 +52,15 @@ speciy the solr.solr.home system propert
   java -Dsolr.solr.home=multicore -jar start.jar
   java -Dsolr.solr.home=example-DIH -jar start.jar
 
+-Logging
+
+By default, Solr will log to the console. This can be convenient when first
+getting started, but eventually you will want to log to a file. To enable
+logging, you can just pass a system property to Jetty on startup:
+
+  java -Djava.util.logging.config.file=etc/logging.properties -jar start.jar
+ 
+ This will use Java Util Logging to log to a file based on the config in
+ etc/logging.properties. Logs will be written in the logs directory. It is
+ also possible to setup log4j or other popular logging frameworks.
+