You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by mi...@apache.org on 2012/08/13 13:17:06 UTC

svn commit: r1372366 [7/8] - in /lucene/dev/branches/pforcodec_3892: ./ dev-tools/ dev-tools/eclipse/ dev-tools/idea/.idea/libraries/ dev-tools/maven/ dev-tools/maven/lucene/ dev-tools/maven/lucene/analysis/common/ dev-tools/maven/lucene/analysis/icu/ ...

Modified: lucene/dev/branches/pforcodec_3892/solr/core/src/java/org/apache/solr/search/ValueSourceParser.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/core/src/java/org/apache/solr/search/ValueSourceParser.java?rev=1372366&r1=1372365&r2=1372366&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/core/src/java/org/apache/solr/search/ValueSourceParser.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/core/src/java/org/apache/solr/search/ValueSourceParser.java Mon Aug 13 11:16:57 2012
@@ -107,6 +107,25 @@ public abstract class ValueSourceParser 
         return new LiteralValueSource(fp.parseArg());
       }
     });
+    addParser("threadid", new ValueSourceParser() {
+      @Override
+      public ValueSource parse(FunctionQParser fp) throws ParseException {
+        return new LongConstValueSource(Thread.currentThread().getId());
+      }
+    });
+    addParser("sleep", new ValueSourceParser() {
+      @Override
+      public ValueSource parse(FunctionQParser fp) throws ParseException {
+        int ms = fp.parseInt();
+        ValueSource source = fp.parseValueSource();
+        try {
+          Thread.sleep(ms);
+        } catch (InterruptedException e) {
+          throw new RuntimeException(e);
+        }
+        return source;
+      }
+    });
     addParser("rord", new ValueSourceParser() {
       @Override
       public ValueSource parse(FunctionQParser fp) throws ParseException {

Modified: lucene/dev/branches/pforcodec_3892/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/SearchGroupsResultTransformer.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/SearchGroupsResultTransformer.java?rev=1372366&r1=1372365&r2=1372366&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/SearchGroupsResultTransformer.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/SearchGroupsResultTransformer.java Mon Aug 13 11:16:57 2012
@@ -110,9 +110,9 @@ public class SearchGroupsResultTransform
           if (sortValue instanceof BytesRef) {
             UnicodeUtil.UTF8toUTF16((BytesRef)sortValue, spare);
             String indexedValue = spare.toString();
-            sortValue = (Comparable) fieldType.toObject(field.createField(fieldType.indexedToReadable(indexedValue), 0.0f));
+            sortValue = (Comparable) fieldType.toObject(field.createField(fieldType.indexedToReadable(indexedValue), 1.0f));
           } else if (sortValue instanceof String) {
-            sortValue = (Comparable) fieldType.toObject(field.createField(fieldType.indexedToReadable((String) sortValue), 0.0f));
+            sortValue = (Comparable) fieldType.toObject(field.createField(fieldType.indexedToReadable((String) sortValue), 1.0f));
           }
         }
         convertedSortValues[i] = sortValue;

Modified: lucene/dev/branches/pforcodec_3892/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/TopGroupsResultTransformer.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/TopGroupsResultTransformer.java?rev=1372366&r1=1372365&r2=1372366&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/TopGroupsResultTransformer.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/TopGroupsResultTransformer.java Mon Aug 13 11:16:57 2012
@@ -199,9 +199,9 @@ public class TopGroupsResultTransformer 
             if (sortValue instanceof BytesRef) {
               UnicodeUtil.UTF8toUTF16((BytesRef)sortValue, spare);
               String indexedValue = spare.toString();
-              sortValue = fieldType.toObject(field.createField(fieldType.indexedToReadable(indexedValue), 0.0f));
+              sortValue = fieldType.toObject(field.createField(fieldType.indexedToReadable(indexedValue), 1.0f));
             } else if (sortValue instanceof String) {
-              sortValue = fieldType.toObject(field.createField(fieldType.indexedToReadable((String) sortValue), 0.0f));
+              sortValue = fieldType.toObject(field.createField(fieldType.indexedToReadable((String) sortValue), 1.0f));
             }
           }
           convertedSortValues[j] = sortValue;
@@ -252,9 +252,9 @@ public class TopGroupsResultTransformer 
           if (sortValue instanceof BytesRef) {
             UnicodeUtil.UTF8toUTF16((BytesRef)sortValue, spare);
             String indexedValue = spare.toString();
-            sortValue = fieldType.toObject(field.createField(fieldType.indexedToReadable(indexedValue), 0.0f));
+            sortValue = fieldType.toObject(field.createField(fieldType.indexedToReadable(indexedValue), 1.0f));
           } else if (sortValue instanceof String) {
-            sortValue = fieldType.toObject(field.createField(fieldType.indexedToReadable((String) sortValue), 0.0f));
+            sortValue = fieldType.toObject(field.createField(fieldType.indexedToReadable((String) sortValue), 1.0f));
           }
         }
         convertedSortValues[j] = sortValue;

Modified: lucene/dev/branches/pforcodec_3892/solr/core/src/java/org/apache/solr/search/grouping/endresulttransformer/GroupedEndResultTransformer.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/core/src/java/org/apache/solr/search/grouping/endresulttransformer/GroupedEndResultTransformer.java?rev=1372366&r1=1372365&r2=1372366&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/core/src/java/org/apache/solr/search/grouping/endresulttransformer/GroupedEndResultTransformer.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/core/src/java/org/apache/solr/search/grouping/endresulttransformer/GroupedEndResultTransformer.java Mon Aug 13 11:16:57 2012
@@ -69,7 +69,7 @@ public class GroupedEndResultTransformer
           SimpleOrderedMap<Object> groupResult = new SimpleOrderedMap<Object>();
           if (group.groupValue != null) {
             groupResult.add(
-                "groupValue", groupFieldType.toObject(groupField.createField(group.groupValue.utf8ToString(), 0.0f))
+                "groupValue", groupFieldType.toObject(groupField.createField(group.groupValue.utf8ToString(), 1.0f))
             );
           } else {
             groupResult.add("groupValue", null);

Modified: lucene/dev/branches/pforcodec_3892/solr/core/src/java/org/apache/solr/update/CommitTracker.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/core/src/java/org/apache/solr/update/CommitTracker.java?rev=1372366&r1=1372365&r2=1372366&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/core/src/java/org/apache/solr/update/CommitTracker.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/core/src/java/org/apache/solr/update/CommitTracker.java Mon Aug 13 11:16:57 2012
@@ -29,6 +29,7 @@ import org.apache.solr.common.params.Mod
 import org.apache.solr.core.SolrCore;
 import org.apache.solr.request.LocalSolrQueryRequest;
 import org.apache.solr.request.SolrQueryRequest;
+import org.apache.solr.util.DefaultSolrThreadFactory;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -52,8 +53,8 @@ public final class CommitTracker impleme
   private int docsUpperBound;
   private long timeUpperBound;
   
-  private final ScheduledExecutorService scheduler = Executors
-      .newScheduledThreadPool(1);
+  private final ScheduledExecutorService scheduler = 
+      Executors.newScheduledThreadPool(1, new DefaultSolrThreadFactory("commitScheduler"));
   private ScheduledFuture pending;
   
   // state

Modified: lucene/dev/branches/pforcodec_3892/solr/core/src/java/org/apache/solr/update/DefaultSolrCoreState.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/core/src/java/org/apache/solr/update/DefaultSolrCoreState.java?rev=1372366&r1=1372365&r2=1372366&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/core/src/java/org/apache/solr/update/DefaultSolrCoreState.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/core/src/java/org/apache/solr/update/DefaultSolrCoreState.java Mon Aug 13 11:16:57 2012
@@ -198,8 +198,9 @@ public final class DefaultSolrCoreState 
       return;
     }
     
-    cancelRecovery();
     synchronized (recoveryLock) {
+      cancelRecovery();
+      
       while (recoveryRunning) {
         try {
           recoveryLock.wait(1000);

Modified: lucene/dev/branches/pforcodec_3892/solr/core/src/java/org/apache/solr/update/TransactionLog.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/core/src/java/org/apache/solr/update/TransactionLog.java?rev=1372366&r1=1372365&r2=1372366&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/core/src/java/org/apache/solr/update/TransactionLog.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/core/src/java/org/apache/solr/update/TransactionLog.java Mon Aug 13 11:16:57 2012
@@ -34,9 +34,11 @@ import java.nio.ByteBuffer;
 import java.nio.channels.Channels;
 import java.nio.channels.FileChannel;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.Collection;
 import java.util.HashMap;
 import java.util.LinkedHashMap;
+import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
 import java.util.concurrent.atomic.AtomicInteger;
@@ -149,7 +151,8 @@ public class TransactionLog {
       long start = raf.length();
       channel = raf.getChannel();
       os = Channels.newOutputStream(channel);
-      fos = FastOutputStream.wrap(os);
+      fos = new FastOutputStream(os, new byte[65536], 0);
+      // fos = FastOutputStream.wrap(os);
 
       if (openExisting) {
         if (start > 0) {
@@ -300,93 +303,119 @@ public class TransactionLog {
     numRecords++;
   }
 
+  private void checkWriteHeader(LogCodec codec, SolrInputDocument optional) throws IOException {
+
+    // Unsynchronized access.  We can get away with an unsynchronized access here
+    // since we will never get a false non-zero when the position is in fact 0.
+    // rollback() is the only function that can reset to zero, and it blocks updates.
+    if (fos.size() != 0) return;
+
+    synchronized (this) {
+      if (fos.size() != 0) return;  // check again while synchronized
+      if (optional != null) {
+        addGlobalStrings(optional.getFieldNames());
+      }
+      writeLogHeader(codec);
+    }
+  }
+
+  int lastAddSize;
 
   public long write(AddUpdateCommand cmd, int flags) {
     LogCodec codec = new LogCodec();
-    long pos = 0;
-    synchronized (this) {
-      try {
-        pos = fos.size();   // if we had flushed, this should be equal to channel.position()
-        SolrInputDocument sdoc = cmd.getSolrInputDocument();
+    SolrInputDocument sdoc = cmd.getSolrInputDocument();
 
-        if (pos == 0) { // TODO: needs to be changed if we start writing a header first
-          addGlobalStrings(sdoc.getFieldNames());
-          writeLogHeader(codec);
-          pos = fos.size();
-        }
+    try {
+      checkWriteHeader(codec, sdoc);
+
+      // adaptive buffer sizing
+      int bufSize = lastAddSize;    // unsynchronized access of lastAddSize should be fine
+      bufSize = Math.min(1024*1024, bufSize+(bufSize>>3)+256);
+
+      MemOutputStream out = new MemOutputStream(new byte[bufSize]);
+      codec.init(out);
+      codec.writeTag(JavaBinCodec.ARR, 3);
+      codec.writeInt(UpdateLog.ADD | flags);  // should just take one byte
+      codec.writeLong(cmd.getVersion());
+      codec.writeSolrInputDocument(cmd.getSolrInputDocument());
+      lastAddSize = (int)out.size();
+
+      synchronized (this) {
+        long pos = fos.size();   // if we had flushed, this should be equal to channel.position()
+        assert pos != 0;
 
         /***
-        System.out.println("###writing at " + pos + " fos.size()=" + fos.size() + " raf.length()=" + raf.length());
+         System.out.println("###writing at " + pos + " fos.size()=" + fos.size() + " raf.length()=" + raf.length());
          if (pos != fos.size()) {
-          throw new RuntimeException("ERROR" + "###writing at " + pos + " fos.size()=" + fos.size() + " raf.length()=" + raf.length());
-        }
+         throw new RuntimeException("ERROR" + "###writing at " + pos + " fos.size()=" + fos.size() + " raf.length()=" + raf.length());
+         }
          ***/
 
-        codec.init(fos);
-        codec.writeTag(JavaBinCodec.ARR, 3);
-        codec.writeInt(UpdateLog.ADD | flags);  // should just take one byte
-        codec.writeLong(cmd.getVersion());
-        codec.writeSolrInputDocument(cmd.getSolrInputDocument());
-
+        out.writeAll(fos);
         endRecord(pos);
         // fos.flushBuffer();  // flush later
         return pos;
-      } catch (IOException e) {
-        // TODO: reset our file pointer back to "pos", the start of this record.
-        throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Error logging add", e);
       }
+
+    } catch (IOException e) {
+      // TODO: reset our file pointer back to "pos", the start of this record.
+      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Error logging add", e);
     }
   }
 
   public long writeDelete(DeleteUpdateCommand cmd, int flags) {
     LogCodec codec = new LogCodec();
-    synchronized (this) {
-      try {
-        long pos = fos.size();   // if we had flushed, this should be equal to channel.position()
-        if (pos == 0) {
-          writeLogHeader(codec);
-          pos = fos.size();
-        }
-        codec.init(fos);
-        codec.writeTag(JavaBinCodec.ARR, 3);
-        codec.writeInt(UpdateLog.DELETE | flags);  // should just take one byte
-        codec.writeLong(cmd.getVersion());
-        BytesRef br = cmd.getIndexedId();
-        codec.writeByteArray(br.bytes, br.offset, br.length);
 
+    try {
+      checkWriteHeader(codec, null);
+
+      BytesRef br = cmd.getIndexedId();
+
+      MemOutputStream out = new MemOutputStream(new byte[20 + br.length]);
+      codec.init(out);
+      codec.writeTag(JavaBinCodec.ARR, 3);
+      codec.writeInt(UpdateLog.DELETE | flags);  // should just take one byte
+      codec.writeLong(cmd.getVersion());
+      codec.writeByteArray(br.bytes, br.offset, br.length);
+
+      synchronized (this) {
+        long pos = fos.size();   // if we had flushed, this should be equal to channel.position()
+        assert pos != 0;
+        out.writeAll(fos);
         endRecord(pos);
         // fos.flushBuffer();  // flush later
-
         return pos;
-      } catch (IOException e) {
-        throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
       }
+
+    } catch (IOException e) {
+      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
     }
+
   }
 
   public long writeDeleteByQuery(DeleteUpdateCommand cmd, int flags) {
     LogCodec codec = new LogCodec();
-    synchronized (this) {
-      try {
-        long pos = fos.size();   // if we had flushed, this should be equal to channel.position()
-        if (pos == 0) {
-          writeLogHeader(codec);
-          pos = fos.size();
-        }
-        codec.init(fos);
-        codec.writeTag(JavaBinCodec.ARR, 3);
-        codec.writeInt(UpdateLog.DELETE_BY_QUERY | flags);  // should just take one byte
-        codec.writeLong(cmd.getVersion());
-        codec.writeStr(cmd.query);
+    try {
+      checkWriteHeader(codec, null);
 
+      MemOutputStream out = new MemOutputStream(new byte[20 + (cmd.query.length())]);
+      codec.init(out);
+      codec.writeTag(JavaBinCodec.ARR, 3);
+      codec.writeInt(UpdateLog.DELETE_BY_QUERY | flags);  // should just take one byte
+      codec.writeLong(cmd.getVersion());
+      codec.writeStr(cmd.query);
+
+      synchronized (this) {
+        long pos = fos.size();   // if we had flushed, this should be equal to channel.position()
+        out.writeAll(fos);
         endRecord(pos);
         // fos.flushBuffer();  // flush later
-
         return pos;
+      }
       } catch (IOException e) {
         throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
       }
-    }
+
   }
 
 
@@ -745,3 +774,32 @@ class ChannelFastInputStream extends Fas
   }
 }
 
+
+class MemOutputStream extends FastOutputStream {
+  public List<byte[]> buffers = new LinkedList<byte[]>();
+  public MemOutputStream(byte[] tempBuffer) {
+    super(null, tempBuffer, 0);
+  }
+
+  @Override
+  public void flush(byte[] arr, int offset, int len) throws IOException {
+    if (arr == buf && offset==0 && len==buf.length) {
+      buffers.add(buf);  // steal the buffer
+      buf = new byte[8192];
+    } else if (len > 0) {
+      byte[] newBuf = new byte[len];
+      System.arraycopy(arr, offset, newBuf, 0, len);
+      buffers.add(newBuf);
+    }
+  }
+
+  public void writeAll(FastOutputStream fos) throws IOException {
+    for (byte[] buffer : buffers) {
+      fos.write(buffer);
+    }
+    if (pos > 0) {
+      fos.write(buf, 0, pos);
+    }
+  }
+}
+

Modified: lucene/dev/branches/pforcodec_3892/solr/core/src/java/org/apache/solr/update/UpdateLog.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/core/src/java/org/apache/solr/update/UpdateLog.java?rev=1372366&r1=1372365&r2=1372366&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/core/src/java/org/apache/solr/update/UpdateLog.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/core/src/java/org/apache/solr/update/UpdateLog.java Mon Aug 13 11:16:57 2012
@@ -1048,6 +1048,7 @@ public class UpdateLog implements Plugin
     try {
       cancelApplyBufferUpdate = false;
       if (state != State.BUFFERING) return null;
+      operationFlags &= ~FLAG_GAP;
 
       // handle case when no log was even created because no updates
       // were received.
@@ -1057,7 +1058,6 @@ public class UpdateLog implements Plugin
       }
       tlog.incref();
       state = State.APPLYING_BUFFERED;
-      operationFlags &= ~FLAG_GAP;
     } finally {
       versionInfo.unblockUpdates();
     }

Modified: lucene/dev/branches/pforcodec_3892/solr/core/src/java/org/apache/solr/util/FastWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/core/src/java/org/apache/solr/util/FastWriter.java?rev=1372366&r1=1372365&r2=1372366&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/core/src/java/org/apache/solr/util/FastWriter.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/core/src/java/org/apache/solr/util/FastWriter.java Mon Aug 13 11:16:57 2012
@@ -52,7 +52,7 @@ public class FastWriter extends Writer {
 
   public void write(char c) throws IOException {
     if (pos >= buf.length) {
-      sink.write(buf,0,pos);
+      flush(buf,0,pos);
       pos=0;
     }
     buf[pos++] = c;
@@ -61,7 +61,7 @@ public class FastWriter extends Writer {
   @Override
   public FastWriter append(char c) throws IOException {
     if (pos >= buf.length) {
-      sink.write(buf,0,pos);
+      flush(buf,0,pos);
       pos=0;
     }
     buf[pos++] = c;
@@ -77,14 +77,14 @@ public class FastWriter extends Writer {
     } else if (len<BUFSIZE) {
       // if the data to write is small enough, buffer it.
       System.arraycopy(cbuf, off, buf, pos, space);
-      sink.write(buf, 0, buf.length);
+      flush(buf, 0, buf.length);
       pos = len-space;
       System.arraycopy(cbuf, off+space, buf, 0, pos);
     } else {
-      sink.write(buf,0,pos);  // flush
+      flush(buf,0,pos);  // flush
       pos=0;
       // don't buffer, just write to sink
-      sink.write(cbuf, off, len);
+      flush(cbuf, off, len);
     }
   }
 
@@ -97,32 +97,40 @@ public class FastWriter extends Writer {
     } else if (len<BUFSIZE) {
       // if the data to write is small enough, buffer it.
       str.getChars(off, off+space, buf, pos);
-      sink.write(buf, 0, buf.length);
+      flush(buf, 0, buf.length);
       str.getChars(off+space, off+len, buf, 0);
       pos = len-space;
     } else {
-      sink.write(buf,0,pos);  // flush
+      flush(buf,0,pos);  // flush
       pos=0;
       // don't buffer, just write to sink
-      sink.write(str, off, len);
+      flush(str, off, len);
     }
   }
 
   @Override
   public void flush() throws IOException {
-    sink.write(buf,0,pos);
+    flush(buf, 0, pos);
     pos=0;
-    sink.flush();
+    if (sink != null) sink.flush();
+  }
+
+  public void flush(char[] buf, int offset, int len) throws IOException {
+    sink.write(buf, offset, len);
+  }
+
+  public void flush(String str, int offset, int len) throws IOException {
+    sink.write(str, offset, len);
   }
 
   @Override
   public void close() throws IOException {
     flush();
-    sink.close();
+    if (sink != null) sink.close();
   }
 
   public void flushBuffer() throws IOException {
-    sink.write(buf, 0, pos);
+    flush(buf, 0, pos);
     pos=0;
   }
 }

Modified: lucene/dev/branches/pforcodec_3892/solr/core/src/test-files/solr/collection1/conf/schema.xml
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/core/src/test-files/solr/collection1/conf/schema.xml?rev=1372366&r1=1372365&r2=1372366&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/core/src/test-files/solr/collection1/conf/schema.xml (original)
+++ lucene/dev/branches/pforcodec_3892/solr/core/src/test-files/solr/collection1/conf/schema.xml Mon Aug 13 11:16:57 2012
@@ -559,7 +559,7 @@
    <field name="inStock" type="boolean" indexed="true" stored="true" />
 
    <field name="subword" type="subword" indexed="true" stored="true"/>
-   <field name="subword_offsets" type="subword" indexed="true" stored="true" termOffsets="true"/>
+   <field name="subword_offsets" type="subword" indexed="true" stored="true" termVectors="true" termOffsets="true"/>
    <field name="numericsubword" type="numericsubword" indexed="true" stored="true"/>
    <field name="protectedsubword" type="protectedsubword" indexed="true" stored="true"/>
 

Modified: lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/SolrInfoMBeanTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/SolrInfoMBeanTest.java?rev=1372366&r1=1372365&r2=1372366&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/SolrInfoMBeanTest.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/SolrInfoMBeanTest.java Mon Aug 13 11:16:57 2012
@@ -25,6 +25,7 @@ import org.apache.solr.highlight.Default
 import org.apache.solr.search.LRUCache;
 import org.junit.BeforeClass;
 import java.io.File;
+import java.net.URI;
 import java.net.URL;
 import java.util.ArrayList;
 import java.util.Enumeration;
@@ -94,7 +95,10 @@ public class SolrInfoMBeanTest extends S
     String path = pckgname.replace('.', '/');
     Enumeration<URL> resources = cld.getResources(path);
     while (resources.hasMoreElements()) {
-      final File f = new File(resources.nextElement().toURI());
+      final URI uri = resources.nextElement().toURI();
+      if (!"file".equalsIgnoreCase(uri.getScheme()))
+        continue;
+      final File f = new File(uri);
       directories.add(f);
     }
       
@@ -114,6 +118,7 @@ public class SolrInfoMBeanTest extends S
         }
       }
     }
+    assertFalse("No classes found in package '"+pckgname+"'; maybe your test classes are packaged as JAR file?", classes.isEmpty());
     return classes;
   }
 }

Modified: lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/TestDistributedSearch.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/TestDistributedSearch.java?rev=1372366&r1=1372365&r2=1372366&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/TestDistributedSearch.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/TestDistributedSearch.java Mon Aug 13 11:16:57 2012
@@ -310,7 +310,8 @@ public class TestDistributedSearch exten
     unIgnoreException("isShard is only acceptable");
 
     // test debugging
-    handle.put("explain", UNORDERED);
+    // handle.put("explain", UNORDERED);
+    handle.put("explain", SKIPVAL);  // internal docids differ, idf differs w/o global idf
     handle.put("debug", UNORDERED);
     handle.put("time", SKIPVAL);
     query("q","now their fox sat had put","fl","*,score",CommonParams.DEBUG_QUERY, "true");

Modified: lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/cloud/BasicDistributedZkTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/cloud/BasicDistributedZkTest.java?rev=1372366&r1=1372365&r2=1372366&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/cloud/BasicDistributedZkTest.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/cloud/BasicDistributedZkTest.java Mon Aug 13 11:16:57 2012
@@ -71,10 +71,11 @@ import org.apache.solr.update.SolrCmdDis
 import org.apache.solr.util.DefaultSolrThreadFactory;
 
 /**
- *
+ * This test simply does a bunch of basic things in solrcloud mode and asserts things
+ * work as expected.
  */
 @Slow
-public class BasicDistributedZkTest extends AbstractDistributedZkTestCase {
+public class BasicDistributedZkTest extends AbstractDistribZkTestBase {
   
   private static final String DEFAULT_COLLECTION = "collection1";
   private static final boolean DEBUG = false;
@@ -281,7 +282,7 @@ public class BasicDistributedZkTest exte
     }
 
     // test debugging
-    handle.put("explain", UNORDERED);
+    handle.put("explain", SKIPVAL);
     handle.put("debug", UNORDERED);
     handle.put("time", SKIPVAL);
     query("q","now their fox sat had put","fl","*,score",CommonParams.DEBUG_QUERY, "true");

Modified: lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyNothingIsSafeTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyNothingIsSafeTest.java?rev=1372366&r1=1372365&r2=1372366&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyNothingIsSafeTest.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyNothingIsSafeTest.java Mon Aug 13 11:16:57 2012
@@ -39,7 +39,7 @@ import org.slf4j.LoggerFactory;
 
 @Slow
 @Ignore("ignore while investigating jenkins fails")
-public class ChaosMonkeyNothingIsSafeTest extends FullSolrCloudTest {
+public class ChaosMonkeyNothingIsSafeTest extends AbstractFullDistribZkTestBase {
   public static Logger log = LoggerFactory.getLogger(ChaosMonkeyNothingIsSafeTest.class);
   
   private static final int BASE_RUN_LENGTH = 180000;

Modified: lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeySafeLeaderTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeySafeLeaderTest.java?rev=1372366&r1=1372365&r2=1372366&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeySafeLeaderTest.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeySafeLeaderTest.java Mon Aug 13 11:16:57 2012
@@ -32,7 +32,7 @@ import org.junit.BeforeClass;
 import org.junit.Ignore;
 
 @Ignore("SOLR-3126")
-public class ChaosMonkeySafeLeaderTest extends FullSolrCloudTest {
+public class ChaosMonkeySafeLeaderTest extends AbstractFullDistribZkTestBase {
   
   private static final int BASE_RUN_LENGTH = 120000;
 

Modified: lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/cloud/ClusterStateTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/cloud/ClusterStateTest.java?rev=1372366&r1=1372365&r2=1372366&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/cloud/ClusterStateTest.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/cloud/ClusterStateTest.java Mon Aug 13 11:16:57 2012
@@ -55,7 +55,7 @@ public class ClusterStateTest extends So
     ClusterState clusterState = new ClusterState(liveNodes, collectionStates);
     byte[] bytes = ZkStateReader.toJSON(clusterState);
     
-    ClusterState loadedClusterState = ClusterState.load(bytes, liveNodes);
+    ClusterState loadedClusterState = ClusterState.load(null, bytes, liveNodes);
     
     assertEquals("Provided liveNodes not used properly", 2, loadedClusterState
         .getLiveNodes().size());
@@ -63,13 +63,13 @@ public class ClusterStateTest extends So
     assertEquals("Poperties not copied properly", zkNodeProps.get("prop1"), loadedClusterState.getSlice("collection1", "shard1").getShards().get("node1").get("prop1"));
     assertEquals("Poperties not copied properly", zkNodeProps.get("prop2"), loadedClusterState.getSlice("collection1", "shard1").getShards().get("node1").get("prop2"));
 
-    loadedClusterState = ClusterState.load(new byte[0], liveNodes);
+    loadedClusterState = ClusterState.load(null, new byte[0], liveNodes);
     
     assertEquals("Provided liveNodes not used properly", 2, loadedClusterState
         .getLiveNodes().size());
     assertEquals("Should not have collections", 0, loadedClusterState.getCollections().size());
 
-    loadedClusterState = ClusterState.load((byte[])null, liveNodes);
+    loadedClusterState = ClusterState.load(null, (byte[])null, liveNodes);
     
     assertEquals("Provided liveNodes not used properly", 2, loadedClusterState
         .getLiveNodes().size());

Modified: lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/cloud/FullSolrCloudDistribCmdsTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/cloud/FullSolrCloudDistribCmdsTest.java?rev=1372366&r1=1372365&r2=1372366&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/cloud/FullSolrCloudDistribCmdsTest.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/cloud/FullSolrCloudDistribCmdsTest.java Mon Aug 13 11:16:57 2012
@@ -23,8 +23,8 @@ import org.apache.lucene.util.LuceneTest
 import org.apache.solr.client.solrj.SolrQuery;
 import org.apache.solr.client.solrj.SolrServer;
 import org.apache.solr.client.solrj.SolrServerException;
-import org.apache.solr.client.solrj.impl.HttpSolrServer;
 import org.apache.solr.client.solrj.impl.ConcurrentUpdateSolrServer;
+import org.apache.solr.client.solrj.impl.HttpSolrServer;
 import org.apache.solr.client.solrj.request.UpdateRequest;
 import org.apache.solr.client.solrj.response.QueryResponse;
 import org.apache.solr.common.SolrDocument;
@@ -37,14 +37,13 @@ import org.apache.solr.common.params.Mod
 import org.apache.solr.update.VersionInfo;
 import org.apache.solr.update.processor.DistributedUpdateProcessor;
 import org.apache.zookeeper.CreateMode;
-import org.apache.zookeeper.KeeperException;
 import org.junit.BeforeClass;
 
 /**
  * Super basic testing, no shard restarting or anything.
  */
 @Slow
-public class FullSolrCloudDistribCmdsTest extends FullSolrCloudTest {
+public class FullSolrCloudDistribCmdsTest extends AbstractFullDistribZkTestBase {
   
   
   @BeforeClass

Modified: lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/cloud/LeaderElectionTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/cloud/LeaderElectionTest.java?rev=1372366&r1=1372365&r2=1372366&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/cloud/LeaderElectionTest.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/cloud/LeaderElectionTest.java Mon Aug 13 11:16:57 2012
@@ -35,10 +35,12 @@ import org.apache.solr.common.cloud.Solr
 import org.apache.solr.common.cloud.ZkCoreNodeProps;
 import org.apache.solr.common.cloud.ZkNodeProps;
 import org.apache.solr.common.cloud.ZkStateReader;
+import org.apache.solr.util.DefaultSolrThreadFactory;
 import org.apache.zookeeper.KeeperException;
 import org.apache.zookeeper.KeeperException.NoNodeException;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
+import org.junit.Ignore;
 import org.junit.Test;
 
 @Slow
@@ -315,7 +317,7 @@ public class LeaderElectionTest extends 
   @Test
   public void testStressElection() throws Exception {
     final ScheduledExecutorService scheduler = Executors
-        .newScheduledThreadPool(15);
+        .newScheduledThreadPool(15, new DefaultSolrThreadFactory("stressElection"));
     final List<ClientThread> threads = Collections
         .synchronizedList(new ArrayList<ClientThread>());
     
@@ -369,9 +371,7 @@ public class LeaderElectionTest extends 
             }
 
             Thread.sleep(10);
-            
           } catch (Exception e) {
-
           }
         }
       }
@@ -382,7 +382,6 @@ public class LeaderElectionTest extends 
       public void run() {
         
         while (!stopStress) {
-
           try {
             Thread.sleep(50);
             int j;
@@ -426,6 +425,7 @@ public class LeaderElectionTest extends 
     
     // cleanup any threads still running
     for (ClientThread thread : threads) {
+      thread.zkClient.getSolrZooKeeper().close();
       thread.close();
     }
     

Modified: lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/cloud/OverseerTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/cloud/OverseerTest.java?rev=1372366&r1=1372365&r2=1372366&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/cloud/OverseerTest.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/cloud/OverseerTest.java Mon Aug 13 11:16:57 2012
@@ -43,6 +43,7 @@ import org.apache.solr.common.cloud.ZkNo
 import org.apache.solr.common.cloud.ZkStateReader;
 import org.apache.solr.core.CoreDescriptor;
 import org.apache.solr.handler.component.HttpShardHandlerFactory;
+import org.apache.solr.util.DefaultSolrThreadFactory;
 import org.apache.zookeeper.CreateMode;
 import org.apache.zookeeper.KeeperException;
 import org.apache.zookeeper.KeeperException.NodeExistsException;
@@ -298,7 +299,7 @@ public class OverseerTest extends SolrTe
 
       
       for (int i = 0; i < nodeCount; i++) {
-        nodeExecutors[i] = Executors.newFixedThreadPool(1);
+        nodeExecutors[i] = Executors.newFixedThreadPool(1, new DefaultSolrThreadFactory("testShardAssignment"));
       }
       
       final String[] ids = new String[coreCount];

Modified: lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/cloud/RecoveryZkTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/cloud/RecoveryZkTest.java?rev=1372366&r1=1372365&r2=1372366&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/cloud/RecoveryZkTest.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/cloud/RecoveryZkTest.java Mon Aug 13 11:16:57 2012
@@ -24,29 +24,17 @@ import org.apache.solr.client.solrj.Solr
 import org.apache.solr.client.solrj.SolrServerException;
 import org.apache.solr.client.solrj.embedded.JettySolrRunner;
 import org.apache.solr.common.SolrInputDocument;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.Ignore;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @Slow
-public class RecoveryZkTest extends FullSolrCloudTest {
+public class RecoveryZkTest extends AbstractFullDistribZkTestBase {
 
   //private static final String DISTRIB_UPDATE_CHAIN = "distrib-update-chain";
   private static Logger log = LoggerFactory.getLogger(RecoveryZkTest.class);
   private StopableIndexingThread indexThread;
   private StopableIndexingThread indexThread2;
-  @BeforeClass
-  public static void beforeSuperClass() {
 
-  }
-  
-  @AfterClass
-  public static void afterSuperClass() {
-
-  }
-  
   public RecoveryZkTest() {
     super();
     sliceCount = 1;

Modified: lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/cloud/SyncSliceTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/cloud/SyncSliceTest.java?rev=1372366&r1=1372365&r2=1372366&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/cloud/SyncSliceTest.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/cloud/SyncSliceTest.java Mon Aug 13 11:16:57 2012
@@ -44,7 +44,7 @@ import org.junit.BeforeClass;
  * elected.
  */
 @Slow
-public class SyncSliceTest extends FullSolrCloudTest {
+public class SyncSliceTest extends AbstractFullDistribZkTestBase {
   
   @BeforeClass
   public static void beforeSuperClass() {
@@ -86,7 +86,7 @@ public class SyncSliceTest extends FullS
     handle.put("QTime", SKIPVAL);
     handle.put("timestamp", SKIPVAL);
     
-    waitForThingsToLevelOut();
+    waitForThingsToLevelOut(15);
 
     del("*:*");
     List<String> skipServers = new ArrayList<String>();
@@ -129,7 +129,7 @@ public class SyncSliceTest extends FullS
     HttpSolrServer baseServer = new HttpSolrServer(baseUrl);
     baseServer.request(request);
     
-    waitForThingsToLevelOut();
+    waitForThingsToLevelOut(15);
     
     checkShardConsistency(false, true);
     
@@ -159,7 +159,7 @@ public class SyncSliceTest extends FullS
     // to talk to a downed node causes grief
     waitToSeeDownInClusterState(leaderJetty, jetties);
 
-    waitForThingsToLevelOut();
+    waitForThingsToLevelOut(15);
     
     checkShardConsistency(false, true);
     
@@ -180,7 +180,7 @@ public class SyncSliceTest extends FullS
     // give a moment to be sure it has started recovering
     Thread.sleep(2000);
     
-    waitForThingsToLevelOut();
+    waitForThingsToLevelOut(15);
     waitForRecoveriesToFinish(false);
     
     skipServers = getRandomOtherJetty(leaderJetty, null);
@@ -224,6 +224,7 @@ public class SyncSliceTest extends FullS
     waitForRecoveriesToFinish(false);
 
     checkShardConsistency(true, true);
+    
   }
 
   private List<String> getRandomJetty() {
@@ -258,34 +259,6 @@ public class SyncSliceTest extends FullS
     }
     waitToSeeNotLive(cloudClient.getZkStateReader(), leaderJetty);
   }
-
-  private void waitForThingsToLevelOut() throws Exception {
-    int cnt = 0;
-    boolean retry = false;
-    do {
-      waitForRecoveriesToFinish(false);
-      
-      commit();
-      
-      updateMappingsFromZk(jettys, clients);
-      
-      Set<String> theShards = shardToJetty.keySet();
-      String failMessage = null;
-      for (String shard : theShards) {
-        failMessage = checkShardConsistency(shard, false);
-      }
-      
-      if (failMessage != null) {
-        retry = true;
-      } else {
-        retry = false;
-      }
-      
-      cnt++;
-      if (cnt > 10) break;
-      Thread.sleep(2000);
-    } while (retry);
-  }
   
   protected void indexDoc(List<String> skipServers, Object... fields) throws IOException,
       SolrServerException {

Modified: lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/cloud/TestMultiCoreConfBootstrap.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/cloud/TestMultiCoreConfBootstrap.java?rev=1372366&r1=1372365&r2=1372366&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/cloud/TestMultiCoreConfBootstrap.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/cloud/TestMultiCoreConfBootstrap.java Mon Aug 13 11:16:57 2012
@@ -37,7 +37,6 @@ public class TestMultiCoreConfBootstrap 
   protected CoreContainer cores = null;
   private String home;
 
-
   protected static ZkTestServer zkServer;
   protected static String zkDir;
   
@@ -101,7 +100,6 @@ public class TestMultiCoreConfBootstrap 
     super.tearDown();
   }
 
-
   @Test
   public void testMultiCoreConfBootstrap() throws Exception {
     System.setProperty("bootstrap_conf", "true");
@@ -113,6 +111,7 @@ public class TestMultiCoreConfBootstrap 
     assertTrue(zkclient.exists("/configs/core1/schema.xml", true));
     assertTrue(zkclient.exists("/configs/core0/solrconfig.xml", true));
     assertTrue(zkclient.exists("/configs/core1/schema.xml", true));
+    
+    zkclient.close();
   }
-
 }

Modified: lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/core/SolrCoreTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/core/SolrCoreTest.java?rev=1372366&r1=1372365&r2=1372366&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/core/SolrCoreTest.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/core/SolrCoreTest.java Mon Aug 13 11:16:57 2012
@@ -25,6 +25,7 @@ import org.apache.solr.request.SolrQuery
 import org.apache.solr.request.SolrRequestHandler;
 import org.apache.solr.response.SolrQueryResponse;
 import org.apache.solr.schema.IndexSchema;
+import org.apache.solr.util.DefaultSolrThreadFactory;
 import org.apache.solr.util.plugin.SolrCoreAware;
 import org.junit.Test;
 
@@ -163,7 +164,7 @@ public class SolrCoreTest extends SolrTe
 
     final int LOOP = 100;
     final int MT = 16;
-    ExecutorService service = Executors.newFixedThreadPool(MT);
+    ExecutorService service = Executors.newFixedThreadPool(MT, new DefaultSolrThreadFactory("refCountMT"));
     List<Callable<Integer>> callees = new ArrayList<Callable<Integer>>(MT);
     final CoreContainer cores = h.getCoreContainer();
     for (int i = 0; i < MT; ++i) {

Modified: lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/handler/component/TermVectorComponentTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/handler/component/TermVectorComponentTest.java?rev=1372366&r1=1372365&r2=1372366&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/handler/component/TermVectorComponentTest.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/handler/component/TermVectorComponentTest.java Mon Aug 13 11:16:57 2012
@@ -121,7 +121,7 @@ public class TermVectorComponentTest ext
   @Test
   public void testBasics() throws Exception {
     assertJQ(req("json.nl","map", "qt",tv, "q", "id:0", TermVectorComponent.COMPONENT_NAME, "true", TermVectorParams.TF, "true")
-       ,"/termVectors=={'doc-0':{'uniqueKey':'0'," +
+       ,"/termVectors=={'0':{'uniqueKey':'0'," +
             " 'test_basictv':{'anoth':{'tf':1},'titl':{'tf':2}}," +
             " 'test_offtv':{'anoth':{'tf':1},'titl':{'tf':2}}," +
             " 'test_posofftv':{'anoth':{'tf':1},'titl':{'tf':2}}," +
@@ -136,7 +136,7 @@ public class TermVectorComponentTest ext
                  "tv.fl", "test_basictv,test_offtv",
                  TermVectorComponent.COMPONENT_NAME, "true", 
                  TermVectorParams.TF, "true")
-       ,"/termVectors=={'doc-0':{'uniqueKey':'0'," +
+       ,"/termVectors=={'0':{'uniqueKey':'0'," +
             " 'test_basictv':{'anoth':{'tf':1},'titl':{'tf':2}}," +
             " 'test_offtv':{'anoth':{'tf':1},'titl':{'tf':2}}}," +
             " 'uniqueKeyFieldName':'id'}"
@@ -150,7 +150,7 @@ public class TermVectorComponentTest ext
                  "tv.fl","test_offtv",
                  TermVectorComponent.COMPONENT_NAME, "true", 
                  TermVectorParams.TF, "true")
-       ,"/termVectors=={'doc-0':{'uniqueKey':'0'," +
+       ,"/termVectors=={'0':{'uniqueKey':'0'," +
             " 'test_basictv':{'anoth':{'tf':1},'titl':{'tf':2}}," +
             " 'test_offtv':{'anoth':{'tf':1},'titl':{'tf':2}}}," +
             " 'uniqueKeyFieldName':'id'}"
@@ -162,7 +162,7 @@ public class TermVectorComponentTest ext
                  "fl", "*,score",
                  TermVectorComponent.COMPONENT_NAME, "true", 
                  TermVectorParams.TF, "true")
-       ,"/termVectors=={'doc-0':{'uniqueKey':'0'," +
+       ,"/termVectors=={'0':{'uniqueKey':'0'," +
             " 'test_basictv':{'anoth':{'tf':1},'titl':{'tf':2}}," +
             " 'test_offtv':{'anoth':{'tf':1},'titl':{'tf':2}}," +
             " 'test_posofftv':{'anoth':{'tf':1},'titl':{'tf':2}}," +
@@ -176,7 +176,7 @@ public class TermVectorComponentTest ext
                  "fl", "score,test_basictv,[docid],test_postv,val:sum(3,4)",
                  TermVectorComponent.COMPONENT_NAME, "true", 
                  TermVectorParams.TF, "true")
-       ,"/termVectors=={'doc-0':{'uniqueKey':'0'," +
+       ,"/termVectors=={'0':{'uniqueKey':'0'," +
             " 'test_basictv':{'anoth':{'tf':1},'titl':{'tf':2}}," +
             " 'test_postv':{'anoth':{'tf':1},'titl':{'tf':2}}}," +
             " 'uniqueKeyFieldName':'id'}"
@@ -189,7 +189,7 @@ public class TermVectorComponentTest ext
                  "fl", "[docid],test_postv,val:sum(3,4)",
                  TermVectorComponent.COMPONENT_NAME, "true", 
                  TermVectorParams.TF, "true")
-       ,"/termVectors=={'doc-0':{'uniqueKey':'0'," +
+       ,"/termVectors=={'0':{'uniqueKey':'0'," +
             " 'test_basictv':{'anoth':{'tf':1},'titl':{'tf':2}}," +
             " 'test_postv':{'anoth':{'tf':1},'titl':{'tf':2}}}," +
             " 'uniqueKeyFieldName':'id'}"
@@ -201,12 +201,12 @@ public class TermVectorComponentTest ext
   public void testOptions() throws Exception {
     assertJQ(req("json.nl","map", "qt",tv, "q", "id:0", TermVectorComponent.COMPONENT_NAME, "true"
        , TermVectorParams.TF, "true", TermVectorParams.DF, "true", TermVectorParams.OFFSETS, "true", TermVectorParams.POSITIONS, "true", TermVectorParams.TF_IDF, "true")
-       ,"/termVectors/doc-0/test_posofftv/anoth=={'tf':1, 'offsets':{'start':20, 'end':27}, 'positions':{'position':1}, 'df':2, 'tf-idf':0.5}"
+       ,"/termVectors/0/test_posofftv/anoth=={'tf':1, 'offsets':{'start':20, 'end':27}, 'positions':{'position':1}, 'df':2, 'tf-idf':0.5}"
     );
     
     assertJQ(req("json.nl","map", "qt",tv, "q", "id:0", TermVectorComponent.COMPONENT_NAME, "true"
         , TermVectorParams.ALL, "true")
-        ,"/termVectors/doc-0/test_posofftv/anoth=={'tf':1, 'offsets':{'start':20, 'end':27}, 'positions':{'position':1}, 'df':2, 'tf-idf':0.5}"
+        ,"/termVectors/0/test_posofftv/anoth=={'tf':1, 'offsets':{'start':20, 'end':27}, 'positions':{'position':1}, 'df':2, 'tf-idf':0.5}"
      );
     
     // test each combination at random
@@ -217,7 +217,7 @@ public class TermVectorComponentTest ext
         { TermVectorParams.POSITIONS, "'positions':{'position':1}" },
         { TermVectorParams.DF, "'df':2" },
         { TermVectorParams.TF_IDF, "'tf-idf':0.5" } };
-    StringBuilder expected = new StringBuilder("/termVectors/doc-0/test_posofftv/anoth=={");
+    StringBuilder expected = new StringBuilder("/termVectors/0/test_posofftv/anoth=={");
     boolean first = true;
     for (int i = 0; i < options.length; i++) {
       final boolean use = random().nextBoolean();
@@ -248,59 +248,13 @@ public class TermVectorComponentTest ext
         ,"f.test_basictv." + TermVectorParams.TF, "false"
         ,"f.test_basictv." + TermVectorParams.TF_IDF, "false"
         )
-    ,"/termVectors/doc-0/test_basictv=={'anoth':{},'titl':{}}"
-    ,"/termVectors/doc-0/test_postv/anoth=={'tf':1, 'positions':{'position':1}, 'df':2, 'tf-idf':0.5}"
-    ,"/termVectors/doc-0/test_offtv/anoth=={'tf':1, 'df':2, 'tf-idf':0.5}"
+    ,"/termVectors/0/test_basictv=={'anoth':{},'titl':{}}"
+    ,"/termVectors/0/test_postv/anoth=={'tf':1, 'positions':{'position':1}, 'df':2, 'tf-idf':0.5}"
+    ,"/termVectors/0/test_offtv/anoth=={'tf':1, 'df':2, 'tf-idf':0.5}"
     ,"/termVectors/warnings=={ 'noTermVectors':['test_notv'], 'noPositions':['test_basictv', 'test_offtv'], 'noOffsets':['test_basictv', 'test_postv']}"
     );
   }
 
-
-  // TODO: this test is really fragile since it pokes around in solr's guts and makes many assumptions.
-  // it should be rewritten to use the real distributed interface
-  @Test
-  public void testDistributed() throws Exception {
-    SolrCore core = h.getCore();
-    TermVectorComponent tvComp = (TermVectorComponent) core.getSearchComponent("tvComponent");
-    assertTrue("tvComp is null and it shouldn't be", tvComp != null);
-    ModifiableSolrParams params = new ModifiableSolrParams();
-    params.add(CommonParams.Q, "id:0");
-    params.add(CommonParams.QT, "tvrh");
-    params.add(TermVectorParams.TF, "true");
-    params.add(TermVectorParams.DF, "true");
-    params.add(TermVectorParams.OFFSETS, "true");
-    params.add(TermVectorParams.POSITIONS, "true");
-    params.add(TermVectorComponent.COMPONENT_NAME, "true");
-
-    ResponseBuilder rb = new ResponseBuilder(new LocalSolrQueryRequest(core, params), new SolrQueryResponse(), (List)Arrays.asList(tvComp));
-    rb.stage = ResponseBuilder.STAGE_GET_FIELDS;
-    rb.shards = new String[]{"localhost:0", "localhost:1", "localhost:2", "localhost:3"};//we don't actually call these, since we are going to invoke distributedProcess directly
-    rb.resultIds = new HashMap<Object, ShardDoc>();
-
-    rb.outgoing = new ArrayList<ShardRequest>();
-    //one doc per shard, but make sure there are enough docs to go around
-    for (int i = 0; i < rb.shards.length; i++){
-      ShardDoc doc = new ShardDoc();
-      doc.id = i; //must be a valid doc that was indexed.
-      doc.score = 1 - (i / (float)rb.shards.length);
-      doc.positionInResponse = i;
-      doc.shard = rb.shards[i];
-      doc.orderInShard = 0;
-      rb.resultIds.put(doc.id, doc);
-    }
-
-    int result = tvComp.distributedProcess(rb);
-    assertTrue(result + " does not equal: " + ResponseBuilder.STAGE_DONE, result == ResponseBuilder.STAGE_DONE);
-    //one outgoing per shard
-    assertTrue("rb.outgoing Size: " + rb.outgoing.size() + " is not: " + rb.shards.length, rb.outgoing.size() == rb.shards.length);
-    for (ShardRequest request : rb.outgoing) {
-      ModifiableSolrParams solrParams = request.params;
-      log.info("Shard: " + Arrays.asList(request.shards) + " Params: " + solrParams);
-    }
-
-    rb.req.close();
-  }
-
 }
 
 

Modified: lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/search/QueryEqualityTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/search/QueryEqualityTest.java?rev=1372366&r1=1372365&r2=1372366&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/search/QueryEqualityTest.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/search/QueryEqualityTest.java Mon Aug 13 11:16:57 2012
@@ -648,6 +648,11 @@ public class QueryEqualityTest extends S
                      "foo_i");
   }
 
+  public void testTestFuncs() throws Exception {
+    assertFuncEquals("sleep(1,5)", "sleep(1,5)");
+    assertFuncEquals("threadid()", "threadid()");
+  }
+
   /**
    * this test does not assert anything itself, it simply toggles a static 
    * boolean informing an @AfterClass method to assert that every default 

Modified: lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/search/TestRecovery.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/search/TestRecovery.java?rev=1372366&r1=1372365&r2=1372366&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/search/TestRecovery.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/search/TestRecovery.java Mon Aug 13 11:16:57 2012
@@ -559,7 +559,22 @@ public class TestRecovery extends SolrTe
 
       assertTrue((ulog.getStartingOperation() & UpdateLog.FLAG_GAP) == 0);
 
+      ulog.bufferUpdates();
+      // simulate receiving no updates
+      ulog.applyBufferedUpdates();
+      updateJ(jsonAdd(sdoc("id","Q7", "_version_","117")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); // do another add to make sure flags are back to normal
 
+      req.close();
+      h.close();
+      createCore();
+
+      req = req();
+      uhandler = req.getCore().getUpdateHandler();
+      ulog = uhandler.getUpdateLog();
+
+      assertTrue((ulog.getStartingOperation() & UpdateLog.FLAG_GAP) == 0); // check flags on Q7
+
+      logReplayFinish.acquire();
       assertEquals(UpdateLog.State.ACTIVE, ulog.getState()); // leave each test method in a good state
     } finally {
       DirectUpdateHandler2.commitOnClose = true;

Modified: lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/search/TestSolrJ.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/search/TestSolrJ.java?rev=1372366&r1=1372365&r2=1372366&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/search/TestSolrJ.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/search/TestSolrJ.java Mon Aug 13 11:16:57 2012
@@ -18,24 +18,149 @@
 package org.apache.solr.search;
 
 
-import org.apache.lucene.util.OpenBitSet;
 import org.apache.solr.SolrTestCaseJ4;
+import org.apache.solr.client.solrj.SolrServer;
+import org.apache.solr.client.solrj.SolrServerException;
+import org.apache.solr.client.solrj.impl.ConcurrentUpdateSolrServer;
 import org.apache.solr.client.solrj.impl.HttpSolrServer;
-import org.apache.solr.common.SolrException;
 import org.apache.solr.common.SolrInputDocument;
-import org.apache.solr.request.SolrQueryRequest;
-import org.junit.BeforeClass;
-import org.junit.Test;
 
+import java.io.IOException;
 import java.util.ArrayList;
+import java.util.Date;
 import java.util.List;
+import java.util.Random;
+
 
 public class TestSolrJ extends SolrTestCaseJ4 {
 
-  public void testSolrJ() {
+  public void testSolrJ() throws Exception {
+                          // docs, producers, connections, sleep_time
+    //  main(new String[] {"1000000","4", "1", "0"});
+
     // doCommitPerf();
   }
 
+  public static SolrServer server;
+  public static String idField = "id";
+  public static Exception ex;
+
+  public static void main(String[] args) throws Exception {
+    // String addr = "http://odin.local:80/solr";
+    // String addr = "http://odin.local:8983/solr";
+    String addr = "http://localhost:8983/solr";
+
+    int i = 0;
+    final int nDocs = Integer.parseInt(args[i++]);
+    final int nProducers = Integer.parseInt(args[i++]);
+    final int nConnections = Integer.parseInt(args[i++]);
+    final int maxSleep = Integer.parseInt(args[i++]);
+
+    ConcurrentUpdateSolrServer sserver = null;
+
+    // server = sserver = new ConcurrentUpdateSolrServer(addr,32,8);
+    server = sserver = new ConcurrentUpdateSolrServer(addr,64,nConnections);
+
+    server.deleteByQuery("*:*");
+    server.commit();
+
+    long start = System.currentTimeMillis();
+
+    final int docsPerThread = nDocs / nProducers;
+
+    Thread[] threads = new Thread[nProducers];
+
+    for (int threadNum = 0; threadNum<nProducers; threadNum++) {
+      final int base = threadNum * docsPerThread;
+
+      threads[threadNum] = new Thread("add-thread"+i) {
+        public void run(){
+          try {
+            indexDocs(base, docsPerThread, maxSleep);
+          } catch (Exception e) {
+            System.out.println("###############################CAUGHT EXCEPTION");
+            e.printStackTrace();
+            ex = e;
+          }
+        }
+      };
+      threads[threadNum].start();
+    }
+
+    // optional: wait for commit?
+
+    for (int threadNum = 0; threadNum<nProducers; threadNum++) {
+      threads[threadNum].join();
+    }
+
+    if (sserver != null) {
+      sserver.blockUntilFinished();
+    }
+
+    long end = System.currentTimeMillis();
+    System.out.println("time="+(end-start) + " throughput="+(nDocs*1000/(end-start)) + " Exception="+ex);
+
+    // should server threads be marked as daemon?
+    // need a server.close()!!!
+  }
+
+  public static SolrInputDocument getDocument(int docnum) {
+    SolrInputDocument doc = new SolrInputDocument();
+    doc.setField(idField, docnum );
+    doc.setField("cat", Integer.toString(docnum&0x0f) );
+    doc.setField("name", "my name is " + Integer.toString(docnum&0xff) );
+    doc.setField("foo_t", "now is the time for all good men to come to the aid of their country" );
+    doc.setField("foo_i", Integer.toString(docnum&0x0f) );
+    doc.setField("foo_s", Integer.toString(docnum&0xff) );
+    doc.setField("foo_b", Boolean.toString( (docnum&0x01) == 1) );
+    doc.setField("parent_s", Integer.toString(docnum-1) );
+    doc.setField("price", Integer.toString(docnum >> 4));
+
+    int golden = (int)2654435761L;
+    int h = docnum * golden;
+    int n = (h & 0xff) + 1;
+    List lst = new ArrayList(n);
+    for (int i=0; i<n; i++) {
+      h = (h+i) * golden;
+      lst.add(h & 0xfff);
+    }
+
+    doc.setField("num_is", lst);
+    return doc;
+  }
+
+  public static void indexDocs(int base, int count, int maxSleep) throws IOException, SolrServerException {
+    Random r = new Random(base);
+
+    for (int i=base; i<count+base; i++) {
+      if ((i & 0xfffff) == 0) {
+        System.out.print("\n% " + new Date()+ "\t" + i + "\t");
+        System.out.flush();
+      }
+
+      if ((i & 0xffff) == 0) {
+        System.out.print(".");
+        System.out.flush();
+      }
+
+      SolrInputDocument doc = getDocument(i);
+      server.add(doc);
+
+      if (maxSleep > 0) {
+        int sleep = r.nextInt(maxSleep);
+        try {
+          Thread.sleep(sleep);
+        } catch (InterruptedException e) {
+          Thread.currentThread().interrupt();
+          e.printStackTrace();
+          throw new RuntimeException(e);
+        }
+      }
+
+    }
+  }
+
+
   public void doCommitPerf() throws Exception {
     HttpSolrServer client = new HttpSolrServer("http://localhost:8983/solr");
 
@@ -55,4 +180,7 @@ public class TestSolrJ extends SolrTestC
     System.out.println("TIME: " + (end-start));
   }
 
+
+
+
 }

Modified: lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/servlet/SolrRequestParserTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/servlet/SolrRequestParserTest.java?rev=1372366&r1=1372365&r2=1372366&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/servlet/SolrRequestParserTest.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/servlet/SolrRequestParserTest.java Mon Aug 13 11:16:57 2012
@@ -21,6 +21,8 @@ import static org.easymock.EasyMock.crea
 import static org.easymock.EasyMock.expect;
 import static org.easymock.EasyMock.replay;
 
+import java.net.HttpURLConnection;
+import java.net.SocketTimeoutException;
 import java.net.URL;
 import java.net.URLConnection;
 import java.util.ArrayList;
@@ -114,10 +116,13 @@ public class SolrRequestParserTest exten
     String url = "http://www.apache.org/dist/lucene/solr/";
     byte[] bytes = null;
     try {
-      URLConnection connection = new URL(url).openConnection();
+      URL u = new URL(url);
+      HttpURLConnection connection = (HttpURLConnection)u.openConnection();
       connection.setConnectTimeout(5000);
       connection.setReadTimeout(5000);
       connection.connect();
+      int code = connection.getResponseCode();
+      assumeTrue("wrong response code from server: " + code, 200 == code);
       bytes = IOUtils.toByteArray( connection.getInputStream());
     }
     catch( Exception ex ) {
@@ -134,8 +139,13 @@ public class SolrRequestParserTest exten
     List<ContentStream> streams = new ArrayList<ContentStream>();
     SolrQueryRequest req = parser.buildRequestFrom( core, new MultiMapSolrParams( args ), streams );
     assertEquals( 1, streams.size() );
-    assertArrayEquals( bytes, IOUtils.toByteArray( streams.get(0).getStream() ) );
-    req.close();
+    try {
+      assertArrayEquals( bytes, IOUtils.toByteArray( streams.get(0).getStream() ) );
+    } catch (SocketTimeoutException ex) {
+      assumeNoException("Problems retrieving from " + url + " to run the test.", ex);
+    } finally {
+      req.close();
+    }
   }
   
   @Test

Modified: lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/update/processor/ScriptEngineTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/update/processor/ScriptEngineTest.java?rev=1372366&r1=1372365&r2=1372366&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/update/processor/ScriptEngineTest.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/update/processor/ScriptEngineTest.java Mon Aug 13 11:16:57 2012
@@ -25,6 +25,9 @@ import javax.script.ScriptEngineManager;
 import javax.script.ScriptException;
 import java.io.StringReader;
 
+import org.junit.Assume;
+import org.junit.BeforeClass;
+
 /**
  * Sanity tests basic functionality of {@link ScriptEngineManager} and 
  * {@link ScriptEngine} w/o excercising any Lucene specific code.
@@ -33,6 +36,12 @@ public class ScriptEngineTest extends Lu
 
   private ScriptEngineManager manager;
 
+  @BeforeClass
+  public static void beforeClass() throws Exception {
+    Assume.assumeNotNull((new ScriptEngineManager()).getEngineByExtension("js"));
+    Assume.assumeNotNull((new ScriptEngineManager()).getEngineByName("JavaScript"));
+  }
+
   @Override
   public void setUp() throws Exception {
     super.setUp();
@@ -83,13 +92,17 @@ public class ScriptEngineTest extends Lu
     assertEquals(3, result.intValue());
   }
 
-//  public void testJRuby() throws ScriptException, NoSuchMethodException {  // Simply adding jruby.jar to Solr's lib/ directory gets this test passing
-//    ScriptEngine engine = manager.getEngineByName("jruby");
-//    assertNotNull(engine);
-//    engine.eval("def add(a,b); a + b; end");
-//    Long result = (Long) ((Invocable)engine).invokeFunction("add", 1, 2);
-//    assertNotNull(result);
-//    assertEquals(3, result.intValue());
-//  }
+ public void testJRuby() throws ScriptException, NoSuchMethodException {  
+   // Simply adding jruby.jar to Solr's lib/ directory gets this test passing
+   ScriptEngine engine = manager.getEngineByName("jruby");
+
+   Assume.assumeNotNull(engine);
+
+   assertNotNull(engine);
+   engine.eval("def add(a,b); a + b; end");
+   Long result = (Long) ((Invocable)engine).invokeFunction("add", 1, 2);
+   assertNotNull(result);
+   assertEquals(3, result.intValue());
+ }
 
 }

Modified: lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/update/processor/StatelessScriptUpdateProcessorFactoryTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/update/processor/StatelessScriptUpdateProcessorFactoryTest.java?rev=1372366&r1=1372365&r2=1372366&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/update/processor/StatelessScriptUpdateProcessorFactoryTest.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/core/src/test/org/apache/solr/update/processor/StatelessScriptUpdateProcessorFactoryTest.java Mon Aug 13 11:16:57 2012
@@ -45,9 +45,8 @@ public class StatelessScriptUpdateProces
 
   @BeforeClass
   public static void beforeClass() throws Exception {
-    initCore("solrconfig-script-updateprocessor.xml", "schema12.xml");
-
     Assume.assumeNotNull((new ScriptEngineManager()).getEngineByExtension("js"));
+    initCore("solrconfig-script-updateprocessor.xml", "schema12.xml");
   }
 
   /**

Modified: lucene/dev/branches/pforcodec_3892/solr/example/solr/collection1/conf/schema.xml
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/example/solr/collection1/conf/schema.xml?rev=1372366&r1=1372365&r2=1372366&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/example/solr/collection1/conf/schema.xml (original)
+++ lucene/dev/branches/pforcodec_3892/solr/example/solr/collection1/conf/schema.xml Mon Aug 13 11:16:57 2012
@@ -109,7 +109,10 @@
    <!-- Common metadata fields, named specifically to match up with
      SolrCell metadata when parsing rich documents such as Word, PDF.
      Some fields are multiValued only because Tika currently may return
-     multiple values for them.
+     multiple values for them. Some metadata is parsed from the documents,
+     but there are some which come from the client context:
+       "content_type": From the HTTP headers of incoming stream
+       "resourcename": From SolrCell request param resource.name
    -->
    <field name="title" type="text_general" indexed="true" stored="true" multiValued="true"/>
    <field name="subject" type="text_general" indexed="true" stored="true"/>
@@ -118,10 +121,18 @@
    <field name="author" type="text_general" indexed="true" stored="true"/>
    <field name="keywords" type="text_general" indexed="true" stored="true"/>
    <field name="category" type="text_general" indexed="true" stored="true"/>
+   <field name="resourcename" type="text_general" indexed="true" stored="true"/>
+   <field name="url" type="text_general" indexed="true" stored="true"/>
    <field name="content_type" type="string" indexed="true" stored="true" multiValued="true"/>
    <field name="last_modified" type="date" indexed="true" stored="true"/>
    <field name="links" type="string" indexed="true" stored="true" multiValued="true"/>
 
+   <!-- Main body of document extracted by SolrCell.
+        NOTE: This field is not indexed by default, since it is also copied to "text"
+        using copyField below. This is to save space. Use this field for returning and
+        highlighting document content. Use the "text" field to search the content. -->
+   <field name="content" type="text_general" indexed="false" stored="true" multiValued="true"/>
+   
 
    <!-- catchall field, containing all other searchable text fields (implemented
         via copyField further on in this schema  -->
@@ -232,6 +243,19 @@
 
    <!-- Copy the price into a currency enabled field (default USD) -->
    <copyField source="price" dest="price_c"/>
+
+   <!-- Text fields from SolrCell to search by default in our catch-all field -->
+   <copyField source="title" dest="text"/>
+   <copyField source="author" dest="text"/>
+   <copyField source="description" dest="text"/>
+   <copyField source="keywords" dest="text"/>
+   <copyField source="content" dest="text"/>
+   <copyField source="content_type" dest="text"/>
+   <copyField source="resourcename" dest="text"/>
+   <copyField source="url" dest="text"/>
+
+   <!-- Create a string version of author for faceting -->
+   <copyField source="author" dest="author_s"/>
 	
    <!-- Above, multiple source fields are copied to the [text] field. 
 	  Another way to map multiple source fields to the same 

Modified: lucene/dev/branches/pforcodec_3892/solr/example/solr/collection1/conf/solrconfig.xml
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/example/solr/collection1/conf/solrconfig.xml?rev=1372366&r1=1372365&r2=1372366&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/example/solr/collection1/conf/solrconfig.xml (original)
+++ lucene/dev/branches/pforcodec_3892/solr/example/solr/collection1/conf/solrconfig.xml Mon Aug 13 11:16:57 2012
@@ -850,6 +850,7 @@
        <str name="defType">edismax</str>
        <str name="qf">
           text^0.5 features^1.0 name^1.2 sku^1.5 id^10.0 manu^1.1 cat^1.4
+          title^10.0 description^5.0 keywords^5.0 author^2.0 resourcename^1.0
        </str>
        <str name="df">text</str>
        <str name="mm">100%</str>
@@ -859,14 +860,17 @@
 
        <str name="mlt.qf">
          text^0.5 features^1.0 name^1.2 sku^1.5 id^10.0 manu^1.1 cat^1.4
+         title^10.0 description^5.0 keywords^5.0 author^2.0 resourcename^1.0
        </str>
-       <str name="mlt.fl">text,features,name,sku,id,manu,cat</str>
+       <str name="mlt.fl">text,features,name,sku,id,manu,cat,title,description,keywords,author,resourcename</str>
        <int name="mlt.count">3</int>
 
        <!-- Faceting defaults -->
        <str name="facet">on</str>
        <str name="facet.field">cat</str>
        <str name="facet.field">manu_exact</str>
+       <str name="facet.field">content_type</str>
+       <str name="facet.field">author_s</str>
        <str name="facet.query">ipod</str>
        <str name="facet.query">GB</str>
        <str name="facet.mincount">1</str>
@@ -889,9 +893,18 @@
 
        <!-- Highlighting defaults -->
        <str name="hl">on</str>
-       <str name="hl.fl">text features name</str>
+       <str name="hl.fl">content features title name</str>
+       <str name="hl.encoder">html</str>
+       <str name="hl.simple.pre">&lt;b&gt;</str>
+       <str name="hl.simple.post">&lt;/b&gt;</str>
+       <str name="f.title.hl.fragsize">0</str>
+       <str name="f.title.hl.alternateField">title</str>
        <str name="f.name.hl.fragsize">0</str>
        <str name="f.name.hl.alternateField">name</str>
+       <str name="f.content.hl.snippets">3</str>
+       <str name="f.content.hl.fragsize">200</str>
+       <str name="f.content.hl.alternateField">content</str>
+       <str name="f.content.hl.maxAlternateFieldLength">750</str>
 
        <!-- Spell checking defaults -->
        <str name="spellcheck">on</str>
@@ -952,9 +965,6 @@
                   startup="lazy"
                   class="solr.extraction.ExtractingRequestHandler" >
     <lst name="defaults">
-      <!-- All the main content goes into "text"... if you need to return
-           the extracted text or do highlighting, use a stored field. -->
-      <str name="fmap.content">text</str>
       <str name="lowernames">true</str>
       <str name="uprefix">ignored_</str>
 

Modified: lucene/dev/branches/pforcodec_3892/solr/example/solr/collection1/conf/velocity/VM_global_library.vm
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/example/solr/collection1/conf/velocity/VM_global_library.vm?rev=1372366&r1=1372365&r2=1372366&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/example/solr/collection1/conf/velocity/VM_global_library.vm (original)
+++ lucene/dev/branches/pforcodec_3892/solr/example/solr/collection1/conf/velocity/VM_global_library.vm Mon Aug 13 11:16:57 2012
@@ -133,10 +133,14 @@
 
 #macro(field $f)
   #if($response.response.highlighting.get($docId).get($f).get(0))
-    $!response.response.highlighting.get($docId).get($f).get(0)
+    #set($pad = "")
+    #foreach($v in $response.response.highlighting.get($docId).get($f))
+$pad$v##
+      #set($pad = " ... ")
+    #end
   #else
     #foreach($v in $doc.getFieldValues($f))
-      $v
+$v##
     #end
   #end
 #end  

Modified: lucene/dev/branches/pforcodec_3892/solr/example/solr/collection1/conf/velocity/facet_fields.vm
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/example/solr/collection1/conf/velocity/facet_fields.vm?rev=1372366&r1=1372365&r2=1372366&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/example/solr/collection1/conf/velocity/facet_fields.vm (original)
+++ lucene/dev/branches/pforcodec_3892/solr/example/solr/collection1/conf/velocity/facet_fields.vm Mon Aug 13 11:16:57 2012
@@ -1,6 +1,8 @@
 #if($response.facetFields)
     <h2 #annTitle("Facets generated by adding &facet.field= to the request")>Field Facets</h2>
     #foreach($field in $response.facetFields)
+      ## Hide facets without value
+      #if($field.values.size() > 0)
       <span class="facet-field">$field.name</span>
 
       <ul>
@@ -8,5 +10,6 @@
             <li><a href="#url_for_facet_filter($field.name, $facet.name)">$facet.name</a> ($facet.count)</li>
         #end
       </ul>
+      #end
     #end
   #end
\ No newline at end of file

Modified: lucene/dev/branches/pforcodec_3892/solr/example/solr/collection1/conf/velocity/facet_ranges.vm
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/example/solr/collection1/conf/velocity/facet_ranges.vm?rev=1372366&r1=1372365&r2=1372366&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/example/solr/collection1/conf/velocity/facet_ranges.vm (original)
+++ lucene/dev/branches/pforcodec_3892/solr/example/solr/collection1/conf/velocity/facet_ranges.vm Mon Aug 13 11:16:57 2012
@@ -1,5 +1,7 @@
 <h2 #annTitle("Facets generated by adding &facet.range= to the request")>Range Facets</h2>
 #foreach ($field in $response.response.facet_counts.facet_ranges)
+  ## Hide facets without value
+  #if($field.value.counts.size() > 0)
 	#set($name = $field.key)
 	#set($display = $name)
 	#set($f = $field.value.counts)
@@ -9,4 +11,5 @@
 	#set($before = $field.value.before)
 	#set($after = $field.value.after)
 	#display_facet_range($f, $display, $name, $start, $end, $gap, $before, $after)
+  #end
 #end
\ No newline at end of file

Modified: lucene/dev/branches/pforcodec_3892/solr/example/solr/collection1/conf/velocity/hit.vm
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/example/solr/collection1/conf/velocity/hit.vm?rev=1372366&r1=1372365&r2=1372366&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/example/solr/collection1/conf/velocity/hit.vm (original)
+++ lucene/dev/branches/pforcodec_3892/solr/example/solr/collection1/conf/velocity/hit.vm Mon Aug 13 11:16:57 2012
@@ -1,5 +1,11 @@
 #set($docId = $doc.getFieldValue('id'))
 
 <div class="result-document">
-  #parse("doc.vm")
+#if($doc.getFieldValue('name'))
+  #parse("product-doc.vm")
+#elseif($doc.getFieldValue('compName_s'))
+  #parse("join-doc.vm")
+#else
+  #parse("richtext-doc.vm")
+#end
 </div>

Modified: lucene/dev/branches/pforcodec_3892/solr/example/solr/collection1/conf/velocity/hitGrouped.vm
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/example/solr/collection1/conf/velocity/hitGrouped.vm?rev=1372366&r1=1372365&r2=1372366&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/example/solr/collection1/conf/velocity/hitGrouped.vm (original)
+++ lucene/dev/branches/pforcodec_3892/solr/example/solr/collection1/conf/velocity/hitGrouped.vm Mon Aug 13 11:16:57 2012
@@ -6,7 +6,13 @@
     <div class="group-doclist" #annTitle("Contains the top scoring documents in the group")>
       #foreach ($doc in $group.doclist)
         #set($docId = $doc.getFieldValue('id'))
-        #parse("doc.vm")
+        #if($doc.getFieldValue('name'))
+          #parse("product-doc.vm")
+        #elseif($doc.getFieldValue('compName_s'))
+          #parse("join-doc.vm")
+        #else
+          #parse("richtext-doc.vm")
+        #end
       #end
     </div>
     #end</div>

Modified: lucene/dev/branches/pforcodec_3892/solr/example/solr/collection1/conf/velocity/main.css
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/example/solr/collection1/conf/velocity/main.css?rev=1372366&r1=1372365&r2=1372366&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/example/solr/collection1/conf/velocity/main.css (original)
+++ lucene/dev/branches/pforcodec_3892/solr/example/solr/collection1/conf/velocity/main.css Mon Aug 13 11:16:57 2012
@@ -167,6 +167,10 @@ a {
   width:60%;
 }
 
+.result-body{
+  background: #ddd;
+}
+
 .mlt{
   
 }

Modified: lucene/dev/branches/pforcodec_3892/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrServer.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrServer.java?rev=1372366&r1=1372365&r2=1372366&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrServer.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrServer.java Mon Aug 13 11:16:57 2012
@@ -32,6 +32,7 @@ import org.apache.http.client.HttpClient
 import org.apache.solr.client.solrj.SolrRequest;
 import org.apache.solr.client.solrj.SolrServer;
 import org.apache.solr.client.solrj.SolrServerException;
+import org.apache.solr.client.solrj.request.IsUpdateRequest;
 import org.apache.solr.client.solrj.util.ClientUtils;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.cloud.ClusterState;
@@ -61,6 +62,18 @@ public class CloudSolrServer extends Sol
   private LBHttpSolrServer lbServer;
   private HttpClient myClient;
   Random rand = new Random();
+  
+  // since the state shouldn't change often, should be very cheap reads
+  private volatile List<String> urlList;
+  
+  private volatile List<String> leaderUrlList;
+  private volatile List<String> replicasList;
+  
+  private volatile int lastClusterStateHashCode;
+  
+  private final boolean updatesToLeaders;
+
+  
   /**
    * @param zkHost The client endpoint of the zookeeper quorum containing the cloud state,
    * in the form HOST:PORT.
@@ -69,6 +82,7 @@ public class CloudSolrServer extends Sol
       this.zkHost = zkHost;
       this.myClient = HttpClientUtil.createClient(null);
       this.lbServer = new LBHttpSolrServer(myClient);
+      this.updatesToLeaders = true;
   }
 
   /**
@@ -79,6 +93,19 @@ public class CloudSolrServer extends Sol
   public CloudSolrServer(String zkHost, LBHttpSolrServer lbServer) {
     this.zkHost = zkHost;
     this.lbServer = lbServer;
+    this.updatesToLeaders = true;
+  }
+  
+  /**
+   * @param zkHost The client endpoint of the zookeeper quorum containing the cloud state,
+   * in the form HOST:PORT.
+   * @param lbServer LBHttpSolrServer instance for requests. 
+   * @param updatesToLeaders sends updates only to leaders - defaults to true
+   */
+  public CloudSolrServer(String zkHost, LBHttpSolrServer lbServer, boolean updatesToLeaders) {
+    this.zkHost = zkHost;
+    this.lbServer = lbServer;
+    this.updatesToLeaders = updatesToLeaders;
   }
 
   public ZkStateReader getZkStateReader() {
@@ -140,6 +167,13 @@ public class CloudSolrServer extends Sol
     // TODO: if you can hash here, you could favor the shard leader
     
     ClusterState clusterState = zkStateReader.getClusterState();
+    boolean sendToLeaders = false;
+    List<String> replicas = null;
+    
+    if (request instanceof IsUpdateRequest && updatesToLeaders) {
+      sendToLeaders = true;
+      replicas = new ArrayList<String>();
+    }
 
     SolrParams reqParams = request.getParams();
     if (reqParams == null) {
@@ -154,6 +188,9 @@ public class CloudSolrServer extends Sol
     // Extract each comma separated collection name and store in a List.
     List<String> collectionList = StrUtils.splitSmart(collection, ",", true);
     
+    // TODO: not a big deal because of the caching, but we could avoid looking at every shard
+    // when getting leaders if we tweaked some things
+    
     // Retrieve slices from the cloud state and, for each collection specified,
     // add it to the Map of slices.
     Map<String,Slice> slices = new HashMap<String,Slice>();
@@ -164,32 +201,57 @@ public class CloudSolrServer extends Sol
 
     Set<String> liveNodes = clusterState.getLiveNodes();
 
-    // IDEA: have versions on various things... like a global clusterState version
-    // or shardAddressVersion (which only changes when the shards change)
-    // to allow caching.
-
-    // build a map of unique nodes
-    // TODO: allow filtering by group, role, etc
-    Map<String,ZkNodeProps> nodes = new HashMap<String,ZkNodeProps>();
-    List<String> urlList = new ArrayList<String>();
-    for (Slice slice : slices.values()) {
-      for (ZkNodeProps nodeProps : slice.getShards().values()) {
-        ZkCoreNodeProps coreNodeProps = new ZkCoreNodeProps(nodeProps);
-        String node = coreNodeProps.getNodeName();
-        if (!liveNodes.contains(coreNodeProps.getNodeName())
-            || !coreNodeProps.getState().equals(
-                ZkStateReader.ACTIVE)) continue;
-        if (nodes.put(node, nodeProps) == null) {
-          String url = coreNodeProps.getCoreUrl();
-          urlList.add(url);
+    if (sendToLeaders && leaderUrlList == null || !sendToLeaders && urlList == null || clusterState.hashCode() != this.lastClusterStateHashCode) {
+    
+      // build a map of unique nodes
+      // TODO: allow filtering by group, role, etc
+      Map<String,ZkNodeProps> nodes = new HashMap<String,ZkNodeProps>();
+      List<String> urlList = new ArrayList<String>();
+      for (Slice slice : slices.values()) {
+        for (ZkNodeProps nodeProps : slice.getShards().values()) {
+          ZkCoreNodeProps coreNodeProps = new ZkCoreNodeProps(nodeProps);
+          String node = coreNodeProps.getNodeName();
+          if (!liveNodes.contains(coreNodeProps.getNodeName())
+              || !coreNodeProps.getState().equals(ZkStateReader.ACTIVE)) continue;
+          if (nodes.put(node, nodeProps) == null) {
+            if (!sendToLeaders || (sendToLeaders && coreNodeProps.isLeader())) {
+              String url = coreNodeProps.getCoreUrl();
+              urlList.add(url);
+            } else if (sendToLeaders) {
+              String url = coreNodeProps.getCoreUrl();
+              replicas.add(url);
+            }
+          }
         }
       }
+      if (sendToLeaders) {
+        this.leaderUrlList = urlList; 
+        this.replicasList = replicas;
+      } else {
+        this.urlList = urlList;
+      }
+      this.lastClusterStateHashCode = clusterState.hashCode();
+    }
+    
+    List<String> theUrlList;
+    if (sendToLeaders) {
+      theUrlList = new ArrayList<String>(leaderUrlList.size());
+      theUrlList.addAll(leaderUrlList);
+    } else {
+      theUrlList = new ArrayList<String>(urlList.size());
+      theUrlList.addAll(urlList);
     }
+    Collections.shuffle(theUrlList, rand);
+    if (replicas != null) {
+      ArrayList<String> theReplicas = new ArrayList<String>(replicasList.size());
+      theReplicas.addAll(replicasList);
+      Collections.shuffle(theReplicas, rand);
 
-    Collections.shuffle(urlList, rand);
-    //System.out.println("########################## MAKING REQUEST TO " + urlList);
+      theUrlList.addAll(theReplicas);
+    }
+    //System.out.println("########################## MAKING REQUEST TO " + theUrlList);
  
-    LBHttpSolrServer.Req req = new LBHttpSolrServer.Req(request, urlList);
+    LBHttpSolrServer.Req req = new LBHttpSolrServer.Req(request, theUrlList);
     LBHttpSolrServer.Rsp rsp = lbServer.request(req);
     return rsp.getResponse();
   }
@@ -211,4 +273,16 @@ public class CloudSolrServer extends Sol
   public LBHttpSolrServer getLbServer() {
     return lbServer;
   }
+
+  List<String> getUrlList() {
+    return urlList;
+  }
+
+  List<String> getLeaderUrlList() {
+    return leaderUrlList;
+  }
+
+  List<String> getReplicasList() {
+    return replicasList;
+  }
 }

Modified: lucene/dev/branches/pforcodec_3892/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ConcurrentUpdateSolrServer.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ConcurrentUpdateSolrServer.java?rev=1372366&r1=1372365&r2=1372366&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ConcurrentUpdateSolrServer.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ConcurrentUpdateSolrServer.java Mon Aug 13 11:16:57 2012
@@ -49,6 +49,7 @@ import org.apache.solr.common.params.Mod
 import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.common.params.UpdateParams;
 import org.apache.solr.common.util.NamedList;
+import org.apache.solr.common.util.SolrjNamedThreadFactory;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -71,7 +72,8 @@ public class ConcurrentUpdateSolrServer 
       .getLogger(ConcurrentUpdateSolrServer.class);
   private HttpSolrServer server;
   final BlockingQueue<UpdateRequest> queue;
-  final ExecutorService scheduler = Executors.newCachedThreadPool();
+  final ExecutorService scheduler = Executors.newCachedThreadPool(
+      new SolrjNamedThreadFactory("concurrentUpdateScheduler"));
   final Queue<Runner> runners;
   volatile CountDownLatch lock = null; // used to block everything
   final int threadCount;

Modified: lucene/dev/branches/pforcodec_3892/solr/solrj/src/java/org/apache/solr/client/solrj/impl/LBHttpSolrServer.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/solrj/src/java/org/apache/solr/client/solrj/impl/LBHttpSolrServer.java?rev=1372366&r1=1372365&r2=1372366&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/solrj/src/java/org/apache/solr/client/solrj/impl/LBHttpSolrServer.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/solrj/src/java/org/apache/solr/client/solrj/impl/LBHttpSolrServer.java Mon Aug 13 11:16:57 2012
@@ -21,6 +21,7 @@ import org.apache.solr.client.solrj.*;
 import org.apache.solr.client.solrj.response.QueryResponse;
 import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.util.NamedList;
+import org.apache.solr.common.util.SolrjNamedThreadFactory;
 import org.apache.solr.common.SolrException;
 
 import java.io.IOException;
@@ -397,7 +398,7 @@ public class LBHttpSolrServer extends So
   public void setSoTimeout(int timeout) {
     HttpClientUtil.setSoTimeout(httpClient, timeout);
   }
-  
+
   @Override
   public void shutdown() {
     if (aliveCheckExecutor != null) {
@@ -555,7 +556,8 @@ public class LBHttpSolrServer extends So
     if (aliveCheckExecutor == null) {
       synchronized (this) {
         if (aliveCheckExecutor == null) {
-          aliveCheckExecutor = Executors.newSingleThreadScheduledExecutor();
+          aliveCheckExecutor = Executors.newSingleThreadScheduledExecutor(
+              new SolrjNamedThreadFactory("aliveCheckExecutor"));
           aliveCheckExecutor.scheduleAtFixedRate(
                   getAliveCheckRunner(new WeakReference<LBHttpSolrServer>(this)),
                   this.interval, this.interval, TimeUnit.MILLISECONDS);