You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by ry...@apache.org on 2012/03/07 00:17:23 UTC

svn commit: r1297785 [11/16] - in /lucene/dev/branches/lucene3795_lsp_spatial_module: ./ dev-tools/eclipse/ dev-tools/maven/ dev-tools/maven/lucene/ dev-tools/maven/lucene/contrib/demo/ dev-tools/maven/lucene/contrib/highlighter/ dev-tools/maven/lucene...

Modified: lucene/dev/branches/lucene3795_lsp_spatial_module/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SolrEntityProcessor.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene3795_lsp_spatial_module/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SolrEntityProcessor.java?rev=1297785&r1=1297784&r2=1297785&view=diff
==============================================================================
--- lucene/dev/branches/lucene3795_lsp_spatial_module/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SolrEntityProcessor.java (original)
+++ lucene/dev/branches/lucene3795_lsp_spatial_module/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SolrEntityProcessor.java Tue Mar  6 23:17:08 2012
@@ -17,16 +17,6 @@ package org.apache.solr.handler.dataimpo
  * limitations under the License.
  */
 
-import static org.apache.solr.handler.dataimport.DataImportHandlerException.SEVERE;
-import static org.apache.solr.handler.dataimport.DataImportHandlerException.wrapAndThrow;
-
-import java.net.MalformedURLException;
-import java.net.URL;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.Map;
-
 import org.apache.commons.httpclient.HttpClient;
 import org.apache.commons.httpclient.MultiThreadedHttpConnectionManager;
 import org.apache.solr.client.solrj.SolrQuery;
@@ -37,9 +27,20 @@ import org.apache.solr.client.solrj.impl
 import org.apache.solr.client.solrj.response.QueryResponse;
 import org.apache.solr.common.SolrDocument;
 import org.apache.solr.common.SolrDocumentList;
+import org.apache.solr.common.params.CommonParams;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Map;
+
+import static org.apache.solr.handler.dataimport.DataImportHandlerException.SEVERE;
+import static org.apache.solr.handler.dataimport.DataImportHandlerException.wrapAndThrow;
+
 /**
  * <p>
  * An implementation of {@link EntityProcessor} which fetches values from a
@@ -58,15 +59,8 @@ public class SolrEntityProcessor extends
   
   public static final String SOLR_SERVER = "url";
   public static final String QUERY = "query";
-  /**
-   * (format="javabin|xml") default is javabin
-   */
-  public static final String FORMAT = "format";
-  public static final String ROWS = "rows";
-  public static final String FIELDS = "fields";
-  public static final String FQ = "fq";
   public static final String TIMEOUT = "timeout";
-  
+
   public static final int TIMEOUT_SECS = 5 * 60; // 5 minutes
   public static final int ROWS_DEFAULT = 50;
   
@@ -75,10 +69,22 @@ public class SolrEntityProcessor extends
   private int rows = ROWS_DEFAULT;
   private String[] filterQueries;
   private String[] fields;
+  private String queryType;
   private int timeout = TIMEOUT_SECS;
   
   private boolean initDone = false;
-  
+
+  /**
+   * Factory method that returns a {@link HttpClient} instance used for interfacing with a source Solr service.
+   * One can override this method to return a differently configured {@link HttpClient} instance.
+   * For example configure https and http authentication.
+   *
+   * @return a {@link HttpClient} instance used for interfacing with a source Solr service
+   */
+  protected HttpClient getHttpClient() {
+    return new HttpClient(new MultiThreadedHttpConnectionManager());
+  }
+
   @Override
   protected void firstInit(Context context) {
     super.firstInit(context);
@@ -89,23 +95,21 @@ public class SolrEntityProcessor extends
         throw new DataImportHandlerException(DataImportHandlerException.SEVERE,
             "SolrEntityProcessor: parameter 'url' is required");
       }
-      HttpClient client = new HttpClient(
-          new MultiThreadedHttpConnectionManager());
+
+      HttpClient client = getHttpClient();
       URL url = new URL(serverPath);
-      
-      if ("xml".equals(context.getResolvedEntityAttribute(FORMAT))) {
-        solrServer = new CommonsHttpSolrServer(url, client,
-            new XMLResponseParser(), false);
+      // (wt="javabin|xml") default is javabin
+      if ("xml".equals(context.getResolvedEntityAttribute(CommonParams.WT))) {
+        solrServer = new CommonsHttpSolrServer(url, client, new XMLResponseParser(), false);
         LOG.info("using XMLResponseParser");
       } else {
         solrServer = new CommonsHttpSolrServer(url, client);
         LOG.info("using BinaryResponseParser");
       }
-      
     } catch (MalformedURLException e) {
       throw new DataImportHandlerException(DataImportHandlerException.SEVERE, e);
     }
-    
+
     this.queryString = context.getResolvedEntityAttribute(QUERY);
     if (this.queryString == null) {
       throw new DataImportHandlerException(
@@ -114,21 +118,21 @@ public class SolrEntityProcessor extends
       );
     }
     
-    String rowsP = context.getResolvedEntityAttribute(ROWS);
+    String rowsP = context.getResolvedEntityAttribute(CommonParams.ROWS);
     if (rowsP != null) {
       rows = Integer.parseInt(rowsP);
     }
     
-    String fqAsString = context.getResolvedEntityAttribute(FQ);
+    String fqAsString = context.getResolvedEntityAttribute(CommonParams.FQ);
     if (fqAsString != null) {
       this.filterQueries = fqAsString.split(",");
     }
     
-    String fieldsAsString = context.getResolvedEntityAttribute(FIELDS);
+    String fieldsAsString = context.getResolvedEntityAttribute(CommonParams.FL);
     if (fieldsAsString != null) {
       this.fields = fieldsAsString.split(",");
     }
-    
+    this.queryType = context.getResolvedEntityAttribute(CommonParams.QT);
     String timeoutAsString = context.getResolvedEntityAttribute(TIMEOUT);
     if (timeoutAsString != null) {
       this.timeout = Integer.parseInt(timeoutAsString);
@@ -181,6 +185,7 @@ public class SolrEntityProcessor extends
         solrQuery.addField(field);
       }
     }
+    solrQuery.setQueryType(queryType);
     solrQuery.setFilterQueries(filterQueries);
     solrQuery.setTimeAllowed(timeout * 1000);
     

Modified: lucene/dev/branches/lucene3795_lsp_spatial_module/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestScriptTransformer.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene3795_lsp_spatial_module/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestScriptTransformer.java?rev=1297785&r1=1297784&r2=1297785&view=diff
==============================================================================
--- lucene/dev/branches/lucene3795_lsp_spatial_module/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestScriptTransformer.java (original)
+++ lucene/dev/branches/lucene3795_lsp_spatial_module/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestScriptTransformer.java Tue Mar  6 23:17:08 2012
@@ -53,7 +53,8 @@ public class TestScriptTransformer exten
       sep.applyTransformer(map);
       assertEquals(map.get("name"), "Hello Scott");
     } catch (DataImportHandlerException e) {
-      assumeFalse("JRE does not contain a JavaScript engine (OpenJDK)", "<script> can be used only in java 6 or above".equals(e.getMessage()));
+      assumeFalse("JRE does not contain a JavaScript engine (OpenJDK)", 
+                  e.getMessage().startsWith("<script> can be used only in java 6 or above"));      
       throw e;
     }
   }
@@ -85,7 +86,8 @@ public class TestScriptTransformer exten
       sep.applyTransformer(map);
       assertEquals(map.get("name"), "Hello Scott");
     } catch (DataImportHandlerException e) {
-      assumeFalse("JRE does not contain a JavaScript engine (OpenJDK)", "<script> can be used only in java 6 or above".equals(e.getMessage()));
+      assumeFalse("JRE does not contain a JavaScript engine (OpenJDK)", 
+                  e.getMessage().startsWith("<script> can be used only in java 6 or above"));      
       throw e;
     }
   }
@@ -101,7 +103,8 @@ public class TestScriptTransformer exten
               .item(0));
       assertTrue(config.script.text.indexOf("checkNextToken") > -1);
     } catch (DataImportHandlerException e) {
-      assumeFalse("JRE does not contain a JavaScript engine (OpenJDK)", "<script> can be used only in java 6 or above".equals(e.getMessage()));
+      assumeFalse("JRE does not contain a JavaScript engine (OpenJDK)", 
+                  e.getMessage().startsWith("<script> can be used only in java 6 or above"));      
       throw e;
     }
   }
@@ -129,7 +132,8 @@ public class TestScriptTransformer exten
       sep.applyTransformer(map);
       assertNull(map.get("$hasMore"));
     } catch (DataImportHandlerException e) {
-      assumeFalse("JRE does not contain a JavaScript engine (OpenJDK)", "<script> can be used only in java 6 or above".equals(e.getMessage()));
+      assumeFalse("JRE does not contain a JavaScript engine (OpenJDK)", 
+                  e.getMessage().startsWith("<script> can be used only in java 6 or above"));      
       throw e;
     }
   }

Modified: lucene/dev/branches/lucene3795_lsp_spatial_module/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSolrEntityProcessorEndToEnd.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene3795_lsp_spatial_module/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSolrEntityProcessorEndToEnd.java?rev=1297785&r1=1297784&r2=1297785&view=diff
==============================================================================
--- lucene/dev/branches/lucene3795_lsp_spatial_module/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSolrEntityProcessorEndToEnd.java (original)
+++ lucene/dev/branches/lucene3795_lsp_spatial_module/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSolrEntityProcessorEndToEnd.java Tue Mar  6 23:17:08 2012
@@ -145,7 +145,7 @@ public class TestSolrEntityProcessorEndT
     
     try {
       addDocumentsToSolr(SOLR_DOCS);
-      runFullImport(generateDIHConfig("query='*:*' rows='2' fields='id,desc' onError='skip'", jetty.getLocalPort()));
+      runFullImport(generateDIHConfig("query='*:*' rows='2' fl='id,desc' onError='skip'", jetty.getLocalPort()));
     } catch (Exception e) {
       LOG.error(e.getMessage(), e);
       fail(e.getMessage());
@@ -178,7 +178,7 @@ public class TestSolrEntityProcessorEndT
     
     try {
       addDocumentsToSolr(generateSolrDocuments(7));
-      runFullImport(generateDIHConfig("query='*:*' fields='id' rows='2'", jetty.getLocalPort()));
+      runFullImport(generateDIHConfig("query='*:*' fl='id' rows='2'", jetty.getLocalPort()));
     } catch (Exception e) {
       LOG.error(e.getMessage(), e);
       fail(e.getMessage());
@@ -231,7 +231,7 @@ public class TestSolrEntityProcessorEndT
     assertQ(req("*:*"), "//result[@numFound='0']");
     
     try {
-      runFullImport(generateDIHConfig("query='*:*' rows='2' fields='id,desc' onError='skip'", jetty.getLocalPort()));
+      runFullImport(generateDIHConfig("query='*:*' rows='2' fl='id,desc' onError='skip'", jetty.getLocalPort()));
     } catch (Exception e) {
       LOG.error(e.getMessage(), e);
       fail(e.getMessage());
@@ -244,7 +244,7 @@ public class TestSolrEntityProcessorEndT
     assertQ(req("*:*"), "//result[@numFound='0']");
     
     try {
-      runFullImport(generateDIHConfig("query='bogus:3' rows='2' fields='id,desc' onError='abort'", jetty.getLocalPort()));
+      runFullImport(generateDIHConfig("query='bogus:3' rows='2' fl='id,desc' onError='abort'", jetty.getLocalPort()));
     } catch (Exception e) {
       LOG.error(e.getMessage(), e);
       fail(e.getMessage());

Modified: lucene/dev/branches/lucene3795_lsp_spatial_module/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestXPathEntityProcessor.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene3795_lsp_spatial_module/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestXPathEntityProcessor.java?rev=1297785&r1=1297784&r2=1297785&view=diff
==============================================================================
--- lucene/dev/branches/lucene3795_lsp_spatial_module/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestXPathEntityProcessor.java (original)
+++ lucene/dev/branches/lucene3795_lsp_spatial_module/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestXPathEntityProcessor.java Tue Mar  6 23:17:08 2012
@@ -211,7 +211,7 @@ public class TestXPathEntityProcessor ex
     tmpdir.delete();
     tmpdir.mkdir();
     tmpdir.deleteOnExit();
-    TestFileListEntityProcessor.createFile(tmpdir, "x.xsl", xsl.getBytes("UTF-8"),
+    AbstractDataImportHandlerTestCase.createFile(tmpdir, "x.xsl", xsl.getBytes("UTF-8"),
             false);
     Map entityAttrs = createMap("name", "e",
             XPathEntityProcessor.USE_SOLR_ADD_SCHEMA, "true", "xsl", ""

Modified: lucene/dev/branches/lucene3795_lsp_spatial_module/solr/contrib/dataimporthandler/src/webapp/admin/dataimport.jsp
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene3795_lsp_spatial_module/solr/contrib/dataimporthandler/src/webapp/admin/dataimport.jsp?rev=1297785&r1=1297784&r2=1297785&view=diff
==============================================================================
--- lucene/dev/branches/lucene3795_lsp_spatial_module/solr/contrib/dataimporthandler/src/webapp/admin/dataimport.jsp (original)
+++ lucene/dev/branches/lucene3795_lsp_spatial_module/solr/contrib/dataimporthandler/src/webapp/admin/dataimport.jsp Tue Mar  6 23:17:08 2012
@@ -47,7 +47,7 @@ Select handler:
 
 <frameset cols = "50%, 50%">
   <frame src ="debug.jsp?handler=<%=handler%>" />
-  <frame src ="../select?qt=<%=handler%>&command=status"  name="result"/>
+  <frame src ="..<%=handler%>?command=status"  name="result"/>
 </frameset>
 <% } %>
 </html>

Modified: lucene/dev/branches/lucene3795_lsp_spatial_module/solr/contrib/dataimporthandler/src/webapp/admin/debug.jsp
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene3795_lsp_spatial_module/solr/contrib/dataimporthandler/src/webapp/admin/debug.jsp?rev=1297785&r1=1297784&r2=1297785&view=diff
==============================================================================
--- lucene/dev/branches/lucene3795_lsp_spatial_module/solr/contrib/dataimporthandler/src/webapp/admin/debug.jsp (original)
+++ lucene/dev/branches/lucene3795_lsp_spatial_module/solr/contrib/dataimporthandler/src/webapp/admin/debug.jsp Tue Mar  6 23:17:08 2012
@@ -30,9 +30,8 @@
 <%
   String handler = request.getParameter("handler");  // must be specified
 %>
-<form action="../select" target="result" method="get">
+<form action="..<%=handler%>" target="result" method="get">
 <input type="hidden" name="debug" value="on"/>
-<input type="hidden" name="qt" value="<%=handler%>"/>
 <table>
   <tr>
     <th>Handler: </th>
@@ -69,17 +68,16 @@
 	<tr>
 		<td colspan="2"><textarea id="txtDataConfig" rows="30" cols="80" name="dataConfig"></textarea></td>
     <script type="text/javascript" language="javascript">
-        $.get("../select?qt=<%=handler%>&command=show-config", function(data){
+        $.get("..<%=handler%>?command=show-config", function(data){
             $('#txtDataConfig').attr('value', data);
         });
     </script>
 	</tr>
 </table>
 </form>
-<form action="../select" target="result" method="get">
+<form action="..<%=handler%>" target="result" method="get">
 	<input type="hidden" name="clean" value="false">
 	<input type="hidden" name="commit" value="true">
-  <input type="hidden" name="qt" value="<%=handler%>"/>
 	<input class="stdbutton" type="submit" name="command" value="full-import">
 	<input class="stdbutton" type="submit" name="command" value="delta-import">
 	<input class="stdbutton" type="submit" name="command" value="status">
@@ -92,8 +90,7 @@
 	<input type="hidden" name="rows" value="0">
 	<input class="stdbutton" type="submit" value="Documents Count">
 </form>
-<form action="../select" target="result" method="get">
-  <input type="hidden" name="qt" value="<%=handler%>"/>
+<form action="..<%=handler%>" target="result" method="get">
 	<input type="hidden" name="verbose" value="true">
 	<input type="hidden" name="clean" value="true">
 	<input type="hidden" name="commit" value="true">

Modified: lucene/dev/branches/lucene3795_lsp_spatial_module/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/ExtractingDocumentLoader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene3795_lsp_spatial_module/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/ExtractingDocumentLoader.java?rev=1297785&r1=1297784&r2=1297785&view=diff
==============================================================================
--- lucene/dev/branches/lucene3795_lsp_spatial_module/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/ExtractingDocumentLoader.java (original)
+++ lucene/dev/branches/lucene3795_lsp_spatial_module/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/ExtractingDocumentLoader.java Tue Mar  6 23:17:08 2012
@@ -36,7 +36,9 @@ import org.apache.solr.update.AddUpdateC
 import org.apache.solr.update.processor.UpdateRequestProcessor;
 import org.apache.tika.config.TikaConfig;
 import org.apache.tika.exception.TikaException;
+import org.apache.tika.metadata.HttpHeaders;
 import org.apache.tika.metadata.Metadata;
+import org.apache.tika.metadata.TikaMetadataKeys;
 import org.apache.tika.mime.MediaType;
 import org.apache.tika.parser.AutoDetectParser;
 import org.apache.tika.parser.DefaultParser;
@@ -150,11 +152,11 @@ public class ExtractingDocumentLoader ex
       // then Tika can make use of it in guessing the appropriate MIME type:
       String resourceName = req.getParams().get(ExtractingParams.RESOURCE_NAME, null);
       if (resourceName != null) {
-        metadata.add(Metadata.RESOURCE_NAME_KEY, resourceName);
+        metadata.add(TikaMetadataKeys.RESOURCE_NAME_KEY, resourceName);
       }
       // Provide stream's content type as hint for auto detection
       if(stream.getContentType() != null) {
-        metadata.add(Metadata.CONTENT_TYPE, stream.getContentType());
+        metadata.add(HttpHeaders.CONTENT_TYPE, stream.getContentType());
       }
 
       InputStream inputStream = null;
@@ -167,7 +169,7 @@ public class ExtractingDocumentLoader ex
         // HtmlParser and TXTParser regard Metadata.CONTENT_ENCODING in metadata
         String charset = ContentStreamBase.getCharsetFromContentType(stream.getContentType());
         if(charset != null){
-          metadata.add(Metadata.CONTENT_ENCODING, charset);
+          metadata.add(HttpHeaders.CONTENT_ENCODING, charset);
         }
 
         String xpathExpr = params.get(ExtractingParams.XPATH_EXPRESSION);

Modified: lucene/dev/branches/lucene3795_lsp_spatial_module/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/SolrContentHandler.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene3795_lsp_spatial_module/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/SolrContentHandler.java?rev=1297785&r1=1297784&r2=1297785&view=diff
==============================================================================
--- lucene/dev/branches/lucene3795_lsp_spatial_module/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/SolrContentHandler.java (original)
+++ lucene/dev/branches/lucene3795_lsp_spatial_module/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/SolrContentHandler.java Tue Mar  6 23:17:08 2012
@@ -24,6 +24,7 @@ import org.apache.solr.schema.DateField;
 import org.apache.solr.schema.IndexSchema;
 import org.apache.solr.schema.SchemaField;
 import org.apache.tika.metadata.Metadata;
+import org.apache.tika.metadata.TikaMetadataKeys;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.xml.sax.Attributes;
@@ -191,7 +192,7 @@ public class SolrContentHandler extends 
     if (sf==null && unknownFieldPrefix.length() > 0) {
       name = unknownFieldPrefix + name;
       sf = schema.getFieldOrNull(name);
-    } else if (sf == null && defaultField.length() > 0 && name.equals(Metadata.RESOURCE_NAME_KEY) == false /*let the fall through below handle this*/){
+    } else if (sf == null && defaultField.length() > 0 && name.equals(TikaMetadataKeys.RESOURCE_NAME_KEY) == false /*let the fall through below handle this*/){
       name = defaultField;
       sf = schema.getFieldOrNull(name);
     }
@@ -201,7 +202,7 @@ public class SolrContentHandler extends 
     // ExtractingDocumentLoader.load(). You shouldn't have to define a mapping for this
     // field just because you specified a resource.name parameter to the handler, should
     // you?
-    if (sf == null && unknownFieldPrefix.length()==0 && name == Metadata.RESOURCE_NAME_KEY) {
+    if (sf == null && unknownFieldPrefix.length()==0 && name == TikaMetadataKeys.RESOURCE_NAME_KEY) {
       return;
     }
 

Modified: lucene/dev/branches/lucene3795_lsp_spatial_module/solr/contrib/langid/src/test-files/langid/solr/conf/solrconfig-languageidentifier.xml
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene3795_lsp_spatial_module/solr/contrib/langid/src/test-files/langid/solr/conf/solrconfig-languageidentifier.xml?rev=1297785&r1=1297784&r2=1297785&view=diff
==============================================================================
--- lucene/dev/branches/lucene3795_lsp_spatial_module/solr/contrib/langid/src/test-files/langid/solr/conf/solrconfig-languageidentifier.xml (original)
+++ lucene/dev/branches/lucene3795_lsp_spatial_module/solr/contrib/langid/src/test-files/langid/solr/conf/solrconfig-languageidentifier.xml Tue Mar  6 23:17:08 2012
@@ -31,7 +31,7 @@
         solr.RAMDirectoryFactory is memory based and not persistent. -->
   <directoryFactory name="DirectoryFactory" class="${solr.directoryFactory:solr.RAMDirectoryFactory}"/>
 
-  <luceneMatchVersion>LUCENE_40</luceneMatchVersion>
+  <luceneMatchVersion>${tests.luceneMatchVersion:LUCENE_CURRENT}</luceneMatchVersion>
 
   <requestHandler name="search" class="solr.SearchHandler" default="true">
      <lst name="defaults">

Modified: lucene/dev/branches/lucene3795_lsp_spatial_module/solr/contrib/uima/CHANGES.txt
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene3795_lsp_spatial_module/solr/contrib/uima/CHANGES.txt?rev=1297785&r1=1297784&r2=1297785&view=diff
==============================================================================
--- lucene/dev/branches/lucene3795_lsp_spatial_module/solr/contrib/uima/CHANGES.txt (original)
+++ lucene/dev/branches/lucene3795_lsp_spatial_module/solr/contrib/uima/CHANGES.txt Tue Mar  6 23:17:08 2012
@@ -5,9 +5,12 @@ This file describes changes to the Solr 
 
 Introduction
 ------------
-This module is intended to be used while indexing documents.
-Its purpose is to provide additional on the fly automatically generated fields to the Solr index.
+This module is intended to be used both as an UpdateRequestProcessor while indexing documents and as a set of tokenizer/filters
+to be configured inside the schema.xml for use during analysis phase.
+UIMAUpdateRequestProcessor purpose is to provide additional on the fly automatically generated fields to the Solr index.
 Such fields could be language, concepts, keywords, sentences, named entities, etc.
+UIMA based tokenizers/filters can be used either inside plain Lucene or as index/query analyzers to be defined
+inside the schema.xml of a Solr core to create/filter tokens using specific UIMA annotations.
 
  UIMA Dependency
  ---------------

Modified: lucene/dev/branches/lucene3795_lsp_spatial_module/solr/contrib/uima/build.xml
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene3795_lsp_spatial_module/solr/contrib/uima/build.xml?rev=1297785&r1=1297784&r2=1297785&view=diff
==============================================================================
--- lucene/dev/branches/lucene3795_lsp_spatial_module/solr/contrib/uima/build.xml (original)
+++ lucene/dev/branches/lucene3795_lsp_spatial_module/solr/contrib/uima/build.xml Tue Mar  6 23:17:08 2012
@@ -20,9 +20,25 @@
 <project name="solr-uima" default="default">
 
   <description>
-    Solr Integration with UIMA for extracting metadata from arbitrary (text) fields and enrich document with features extracted from UIMA types (language, sentences, concepts, named entities, etc.)
+    Solr Integration with UIMA for extracting metadata from arbitrary (text) fields and enrich document with features
+    extracted from UIMA types (language, sentences, concepts, named entities, etc.)
   </description>
 
   <import file="../contrib-build.xml"/>
+  
+  <path id="classpath">
+    <pathelement path="${analyzers-uima.jar}"/>
+    <path refid="solr.base.classpath"/>
+  </path>
+
+  <target name="module-jars-to-solr" depends="jar-analyzers-uima">
+    <mkdir dir="${build.dir}/lucene-libs"/>
+    <copy todir="${build.dir}/lucene-libs" preservelastmodified="true" flatten="true" failonerror="true" overwrite="true">
+      <fileset file="${analyzers-uima.jar}"/>
+    </copy>
+  </target>
+
+  <target name="compile-core" depends="jar-analyzers-uima, solr-contrib-build.compile-core"/>
+  <target name="dist" depends="module-jars-to-solr, common-solr.dist"/>
 
 </project>

Modified: lucene/dev/branches/lucene3795_lsp_spatial_module/solr/contrib/uima/src/java/org/apache/solr/uima/processor/UIMAUpdateRequestProcessor.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene3795_lsp_spatial_module/solr/contrib/uima/src/java/org/apache/solr/uima/processor/UIMAUpdateRequestProcessor.java?rev=1297785&r1=1297784&r2=1297785&view=diff
==============================================================================
--- lucene/dev/branches/lucene3795_lsp_spatial_module/solr/contrib/uima/src/java/org/apache/solr/uima/processor/UIMAUpdateRequestProcessor.java (original)
+++ lucene/dev/branches/lucene3795_lsp_spatial_module/solr/contrib/uima/src/java/org/apache/solr/uima/processor/UIMAUpdateRequestProcessor.java Tue Mar  6 23:17:08 2012
@@ -23,14 +23,16 @@ import org.apache.solr.common.SolrInputD
 import org.apache.solr.core.SolrCore;
 import org.apache.solr.schema.SchemaField;
 import org.apache.solr.uima.processor.SolrUIMAConfiguration.MapField;
-import org.apache.solr.uima.processor.ae.AEProvider;
-import org.apache.solr.uima.processor.ae.AEProviderFactory;
+import org.apache.lucene.analysis.uima.ae.AEProvider;
+import org.apache.lucene.analysis.uima.ae.AEProviderFactory;
 import org.apache.solr.update.AddUpdateCommand;
 import org.apache.solr.update.processor.UpdateRequestProcessor;
 import org.apache.uima.analysis_engine.AnalysisEngine;
 import org.apache.uima.analysis_engine.AnalysisEngineProcessException;
 import org.apache.uima.jcas.JCas;
 import org.apache.uima.resource.ResourceInitializationException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
 import java.util.Map;
@@ -42,6 +44,8 @@ import java.util.Map;
  */
 public class UIMAUpdateRequestProcessor extends UpdateRequestProcessor {
 
+  private final Logger log = LoggerFactory.getLogger(UIMAUpdateRequestProcessor.class);
+
   SolrUIMAConfiguration solrUIMAConfiguration;
 
   private AEProvider aeProvider;

Modified: lucene/dev/branches/lucene3795_lsp_spatial_module/solr/contrib/velocity/src/java/org/apache/solr/response/VelocityResponseWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene3795_lsp_spatial_module/solr/contrib/velocity/src/java/org/apache/solr/response/VelocityResponseWriter.java?rev=1297785&r1=1297784&r2=1297785&view=diff
==============================================================================
--- lucene/dev/branches/lucene3795_lsp_spatial_module/solr/contrib/velocity/src/java/org/apache/solr/response/VelocityResponseWriter.java (original)
+++ lucene/dev/branches/lucene3795_lsp_spatial_module/solr/contrib/velocity/src/java/org/apache/solr/response/VelocityResponseWriter.java Tue Mar  6 23:17:08 2012
@@ -25,6 +25,7 @@ import org.apache.solr.request.SolrQuery
 import org.apache.velocity.Template;
 import org.apache.velocity.VelocityContext;
 import org.apache.velocity.app.VelocityEngine;
+import org.apache.velocity.runtime.RuntimeConstants;
 import org.apache.velocity.tools.generic.*;
 
 import java.io.*;
@@ -117,14 +118,14 @@ public class VelocityResponseWriter impl
     if (template_root != null) {
       baseDir = new File(template_root);
     }
-    engine.setProperty(VelocityEngine.FILE_RESOURCE_LOADER_PATH, baseDir.getAbsolutePath());
+    engine.setProperty(RuntimeConstants.FILE_RESOURCE_LOADER_PATH, baseDir.getAbsolutePath());
     engine.setProperty("params.resource.loader.instance", new SolrParamResourceLoader(request));
     SolrVelocityResourceLoader resourceLoader =
         new SolrVelocityResourceLoader(request.getCore().getSolrConfig().getResourceLoader());
     engine.setProperty("solr.resource.loader.instance", resourceLoader);
 
     // TODO: Externalize Velocity properties
-    engine.setProperty(VelocityEngine.RESOURCE_LOADER, "params,file,solr");
+    engine.setProperty(RuntimeConstants.RESOURCE_LOADER, "params,file,solr");
     String propFile = request.getParams().get("v.properties");
     try {
       if (propFile == null)

Modified: lucene/dev/branches/lucene3795_lsp_spatial_module/solr/core/src/java/org/apache/solr/analysis/FSTSynonymFilterFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene3795_lsp_spatial_module/solr/core/src/java/org/apache/solr/analysis/FSTSynonymFilterFactory.java?rev=1297785&r1=1297784&r2=1297785&view=diff
==============================================================================
--- lucene/dev/branches/lucene3795_lsp_spatial_module/solr/core/src/java/org/apache/solr/analysis/FSTSynonymFilterFactory.java (original)
+++ lucene/dev/branches/lucene3795_lsp_spatial_module/solr/core/src/java/org/apache/solr/analysis/FSTSynonymFilterFactory.java Tue Mar  6 23:17:08 2012
@@ -157,6 +157,9 @@ final class FSTSynonymFilterFactory exte
   private static TokenizerFactory loadTokenizerFactory(ResourceLoader loader, String cname, Map<String,String> args){
     TokenizerFactory tokFactory = (TokenizerFactory) loader.newInstance(cname);
     tokFactory.init(args);
+    if (tokFactory instanceof ResourceLoaderAware) {
+      ((ResourceLoaderAware) tokFactory).inform(loader);
+    }
     return tokFactory;
   }
 }

Modified: lucene/dev/branches/lucene3795_lsp_spatial_module/solr/core/src/java/org/apache/solr/analysis/KuromojiTokenizerFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene3795_lsp_spatial_module/solr/core/src/java/org/apache/solr/analysis/KuromojiTokenizerFactory.java?rev=1297785&r1=1297784&r2=1297785&view=diff
==============================================================================
--- lucene/dev/branches/lucene3795_lsp_spatial_module/solr/core/src/java/org/apache/solr/analysis/KuromojiTokenizerFactory.java (original)
+++ lucene/dev/branches/lucene3795_lsp_spatial_module/solr/core/src/java/org/apache/solr/analysis/KuromojiTokenizerFactory.java Tue Mar  6 23:17:08 2012
@@ -28,8 +28,7 @@ import java.util.Map;
 
 import org.apache.lucene.analysis.Tokenizer;
 import org.apache.lucene.analysis.kuromoji.KuromojiTokenizer;
-import org.apache.lucene.analysis.kuromoji.Segmenter;
-import org.apache.lucene.analysis.kuromoji.Segmenter.Mode;
+import org.apache.lucene.analysis.kuromoji.KuromojiTokenizer.Mode;
 import org.apache.lucene.analysis.kuromoji.dict.UserDictionary;
 import org.apache.lucene.util.IOUtils;
 import org.apache.solr.analysis.BaseTokenizerFactory;
@@ -88,7 +87,7 @@ public class KuromojiTokenizerFactory ex
   
   @Override
   public Tokenizer create(Reader input) {
-    return new KuromojiTokenizer(new Segmenter(userDictionary, mode), input);
+    return new KuromojiTokenizer(input, userDictionary, true, mode);
   }
   
   private Mode getMode(Map<String, String> args) {
@@ -96,7 +95,7 @@ public class KuromojiTokenizerFactory ex
     if (mode != null) {
       return Mode.valueOf(mode.toUpperCase(Locale.ENGLISH));
     } else {
-      return Segmenter.DEFAULT_MODE;
+      return KuromojiTokenizer.DEFAULT_MODE;
     }
   }
 }

Modified: lucene/dev/branches/lucene3795_lsp_spatial_module/solr/core/src/java/org/apache/solr/analysis/SlowSynonymFilterFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene3795_lsp_spatial_module/solr/core/src/java/org/apache/solr/analysis/SlowSynonymFilterFactory.java?rev=1297785&r1=1297784&r2=1297785&view=diff
==============================================================================
--- lucene/dev/branches/lucene3795_lsp_spatial_module/solr/core/src/java/org/apache/solr/analysis/SlowSynonymFilterFactory.java (original)
+++ lucene/dev/branches/lucene3795_lsp_spatial_module/solr/core/src/java/org/apache/solr/analysis/SlowSynonymFilterFactory.java Tue Mar  6 23:17:08 2012
@@ -171,6 +171,9 @@ final class SlowSynonymFilterFactory ext
   private static TokenizerFactory loadTokenizerFactory(ResourceLoader loader, String cname, Map<String,String> args){
     TokenizerFactory tokFactory = (TokenizerFactory)loader.newInstance( cname );
     tokFactory.init( args );
+    if (tokFactory instanceof ResourceLoaderAware) {
+      ((ResourceLoaderAware) tokFactory).inform(loader);
+    }
     return tokFactory;
   }
 

Modified: lucene/dev/branches/lucene3795_lsp_spatial_module/solr/core/src/java/org/apache/solr/client/solrj/embedded/EmbeddedSolrServer.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene3795_lsp_spatial_module/solr/core/src/java/org/apache/solr/client/solrj/embedded/EmbeddedSolrServer.java?rev=1297785&r1=1297784&r2=1297785&view=diff
==============================================================================
--- lucene/dev/branches/lucene3795_lsp_spatial_module/solr/core/src/java/org/apache/solr/client/solrj/embedded/EmbeddedSolrServer.java (original)
+++ lucene/dev/branches/lucene3795_lsp_spatial_module/solr/core/src/java/org/apache/solr/client/solrj/embedded/EmbeddedSolrServer.java Tue Mar  6 23:17:08 2012
@@ -155,6 +155,9 @@ public class EmbeddedSolrServer extends 
       
       core.execute( handler, req, rsp );
       if( rsp.getException() != null ) {
+        if(rsp.getException() instanceof SolrException) {
+          throw rsp.getException();
+        }
         throw new SolrServerException( rsp.getException() );
       }
       
@@ -219,6 +222,9 @@ public class EmbeddedSolrServer extends 
     catch( IOException iox ) {
       throw iox;
     }
+    catch( SolrException sx ) {
+      throw sx;
+    }
     catch( Exception ex ) {
       throw new SolrServerException( ex );
     }

Modified: lucene/dev/branches/lucene3795_lsp_spatial_module/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene3795_lsp_spatial_module/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java?rev=1297785&r1=1297784&r2=1297785&view=diff
==============================================================================
--- lucene/dev/branches/lucene3795_lsp_spatial_module/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java (original)
+++ lucene/dev/branches/lucene3795_lsp_spatial_module/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java Tue Mar  6 23:17:08 2012
@@ -8,6 +8,7 @@ import org.apache.solr.common.SolrExcept
 import org.apache.solr.common.cloud.CloudState;
 import org.apache.solr.common.cloud.Slice;
 import org.apache.solr.common.cloud.SolrZkClient;
+import org.apache.solr.common.cloud.ZkClientConnectionStrategy;
 import org.apache.solr.common.cloud.ZkCoreNodeProps;
 import org.apache.solr.common.cloud.ZkNodeProps;
 import org.apache.solr.common.cloud.ZkStateReader;
@@ -40,19 +41,25 @@ public abstract class ElectionContext {
   final ZkNodeProps leaderProps;
   final String id;
   final String leaderPath;
+  String leaderSeqPath;
+  private SolrZkClient zkClient;
   
   public ElectionContext(final String shardZkNodeName,
-      final String electionPath, final String leaderPath, final ZkNodeProps leaderProps) {
+      final String electionPath, final String leaderPath, final ZkNodeProps leaderProps, final SolrZkClient zkClient) {
     this.id = shardZkNodeName;
     this.electionPath = electionPath;
     this.leaderPath = leaderPath;
     this.leaderProps = leaderProps;
+    this.zkClient = zkClient;
   }
   
+  public void cancelElection() throws InterruptedException, KeeperException {
+    zkClient.delete(leaderSeqPath, -1, true);
+  }
   // the given core may or may not be null - if you need access to the current core, you must pass
   // the core container and core name to your context impl - then use this core ref if it is not null
   // else access it from the core container
-  abstract void runLeaderProcess(String leaderSeqPath, boolean weAreReplacement, SolrCore core) throws KeeperException, InterruptedException, IOException;
+  abstract void runLeaderProcess(boolean weAreReplacement) throws KeeperException, InterruptedException, IOException;
 }
 
 class ShardLeaderElectionContextBase extends ElectionContext {
@@ -66,7 +73,7 @@ class ShardLeaderElectionContextBase ext
       final String collection, final String shardZkNodeName, ZkNodeProps props, ZkStateReader zkStateReader) {
     super(shardZkNodeName, ZkStateReader.COLLECTIONS_ZKNODE + "/" + collection + "/leader_elect/"
         + shardId, ZkStateReader.getShardLeadersPath(collection, shardId),
-        props);
+        props, zkStateReader.getZkClient());
     this.leaderElector = leaderElector;
     this.zkClient = zkStateReader.getZkClient();
     this.shardId = shardId;
@@ -74,7 +81,7 @@ class ShardLeaderElectionContextBase ext
   }
 
   @Override
-  void runLeaderProcess(String leaderSeqPath, boolean weAreReplacement, SolrCore core)
+  void runLeaderProcess(boolean weAreReplacement)
       throws KeeperException, InterruptedException, IOException {
 
     try {
@@ -109,7 +116,7 @@ final class ShardLeaderElectionContext e
   }
   
   @Override
-  void runLeaderProcess(String leaderSeqPath, boolean weAreReplacement, SolrCore startupCore)
+  void runLeaderProcess(boolean weAreReplacement)
       throws KeeperException, InterruptedException, IOException {
     if (cc != null) {
       String coreName = leaderProps.get(ZkStateReader.CORE_NAME_PROP);
@@ -117,13 +124,12 @@ final class ShardLeaderElectionContext e
       try {
         // the first time we are run, we will get a startupCore - after
         // we will get null and must use cc.getCore
-        if (startupCore == null) {
-          core = cc.getCore(coreName);
-        } else {
-          core = startupCore;
-        }
+     
+        core = cc.getCore(coreName);
+
         if (core == null) {
-          throw new SolrException(ErrorCode.SERVER_ERROR, "Core not found:" + coreName);
+          cancelElection();
+          throw new SolrException(ErrorCode.SERVER_ERROR, "Fatal Error, SolrCore not found:" + coreName + " in " + cc.getCoreNames());
         }
         // should I be leader?
         if (weAreReplacement && !shouldIBeLeader(leaderProps)) {
@@ -131,7 +137,7 @@ final class ShardLeaderElectionContext e
           rejoinLeaderElection(leaderSeqPath, core);
           return;
         }
-        
+
         if (weAreReplacement) {
           if (zkClient.exists(leaderPath, true)) {
             zkClient.delete(leaderPath, -1, true);
@@ -139,44 +145,41 @@ final class ShardLeaderElectionContext e
 //          System.out.println("I may be the new Leader:" + leaderPath
 //              + " - I need to try and sync");
           boolean success = syncStrategy.sync(zkController, core, leaderProps);
-          if (!success) {
-            // TODO: what if no one can be the leader in a loop?
-            // perhaps we look down the list and if no one is active, we
-            // accept leader role anyhow
-            core.getUpdateHandler().getSolrCoreState().doRecovery(core);
-            
+          if (!success && anyoneElseActive()) {
             rejoinLeaderElection(leaderSeqPath, core);
             return;
           } 
         }
         
         // If I am going to be the leader I have to be active
-        
+        // System.out.println("I am leader go active");
         core.getUpdateHandler().getSolrCoreState().cancelRecovery();
         zkController.publish(core.getCoreDescriptor(), ZkStateReader.ACTIVE);
         
       } finally {
-        if (core != null && startupCore == null) {
+        if (core != null ) {
           core.close();
         }
       }
       
     }
     
-    super.runLeaderProcess(leaderSeqPath, weAreReplacement, startupCore);
+    super.runLeaderProcess(weAreReplacement);
   }
 
   private void rejoinLeaderElection(String leaderSeqPath, SolrCore core)
       throws InterruptedException, KeeperException, IOException {
     // remove our ephemeral and re join the election
-   // System.out.println("sync failed, delete our election node:"
-   //     + leaderSeqPath);
+    // System.out.println("sync failed, delete our election node:"
+    // + leaderSeqPath);
+
     zkController.publish(core.getCoreDescriptor(), ZkStateReader.DOWN);
-    zkClient.delete(leaderSeqPath, -1, true);
     
-    core.getUpdateHandler().getSolrCoreState().doRecovery(core);
+    cancelElection();
+    
+    core.getUpdateHandler().getSolrCoreState().doRecovery(cc, core.getName());
     
-    leaderElector.joinElection(this, null);
+    leaderElector.joinElection(this);
   }
   
   private boolean shouldIBeLeader(ZkNodeProps leaderProps) {
@@ -210,6 +213,26 @@ final class ShardLeaderElectionContext e
     return !foundSomeoneElseActive;
   }
   
+  private boolean anyoneElseActive() {
+    CloudState cloudState = zkController.getZkStateReader().getCloudState();
+    Map<String,Slice> slices = cloudState.getSlices(this.collection);
+    Slice slice = slices.get(shardId);
+    Map<String,ZkNodeProps> shards = slice.getShards();
+
+    for (Map.Entry<String,ZkNodeProps> shard : shards.entrySet()) {
+      String state = shard.getValue().get(ZkStateReader.STATE_PROP);
+
+      
+      if ((state.equals(ZkStateReader.ACTIVE))
+          && cloudState.liveNodesContain(shard.getValue().get(
+              ZkStateReader.NODE_NAME_PROP))) {
+        return true;
+      }
+    }
+    
+    return false;
+  }
+  
 }
 
 final class OverseerElectionContext extends ElectionContext {
@@ -218,13 +241,13 @@ final class OverseerElectionContext exte
   private final ZkStateReader stateReader;
 
   public OverseerElectionContext(final String zkNodeName, SolrZkClient zkClient, ZkStateReader stateReader) {
-    super(zkNodeName, "/overseer_elect", "/overseer_elect/leader", null);
+    super(zkNodeName, "/overseer_elect", "/overseer_elect/leader", null, stateReader.getZkClient());
     this.zkClient = zkClient;
     this.stateReader = stateReader;
   }
 
   @Override
-  void runLeaderProcess(String leaderSeqPath, boolean weAreReplacement, SolrCore firstCore) throws KeeperException, InterruptedException {
+  void runLeaderProcess(boolean weAreReplacement) throws KeeperException, InterruptedException {
     
     final String id = leaderSeqPath.substring(leaderSeqPath.lastIndexOf("/")+1);
     ZkNodeProps myProps = new ZkNodeProps("id", id);

Modified: lucene/dev/branches/lucene3795_lsp_spatial_module/solr/core/src/java/org/apache/solr/cloud/LeaderElector.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene3795_lsp_spatial_module/solr/core/src/java/org/apache/solr/cloud/LeaderElector.java?rev=1297785&r1=1297784&r2=1297785&view=diff
==============================================================================
--- lucene/dev/branches/lucene3795_lsp_spatial_module/solr/core/src/java/org/apache/solr/cloud/LeaderElector.java (original)
+++ lucene/dev/branches/lucene3795_lsp_spatial_module/solr/core/src/java/org/apache/solr/cloud/LeaderElector.java Tue Mar  6 23:17:08 2012
@@ -43,7 +43,7 @@ import org.slf4j.LoggerFactory;
  * Leader Election process. This class contains the logic by which a
  * leader is chosen. First call * {@link #setup(ElectionContext)} to ensure
  * the election process is init'd. Next call
- * {@link #joinElection(ElectionContext, SolrCore)} to start the leader election.
+ * {@link #joinElection(ElectionContext)} to start the leader election.
  * 
  * The implementation follows the classic ZooKeeper recipe of creating an
  * ephemeral, sequential node for each candidate and then looking at the set
@@ -80,13 +80,12 @@ public  class LeaderElector {
    * @param seq
    * @param context 
    * @param replacement has someone else been the leader already?
-   * @param core 
    * @throws KeeperException
    * @throws InterruptedException
    * @throws IOException 
    * @throws UnsupportedEncodingException
    */
-  private void checkIfIamLeader(final String leaderSeqPath, final int seq, final ElectionContext context, boolean replacement, SolrCore core) throws KeeperException,
+  private void checkIfIamLeader(final int seq, final ElectionContext context, boolean replacement) throws KeeperException,
       InterruptedException, IOException {
     // get all other numbers...
     final String holdElectionPath = context.electionPath + ELECTION_NODE;
@@ -95,7 +94,7 @@ public  class LeaderElector {
     sortSeqs(seqs);
     List<Integer> intSeqs = getSeqs(seqs);
     if (seq <= intSeqs.get(0)) {
-      runIamLeaderProcess(leaderSeqPath, context, replacement, core);
+      runIamLeaderProcess(context, replacement);
     } else {
       // I am not the leader - watch the node below me
       int i = 1;
@@ -119,7 +118,7 @@ public  class LeaderElector {
               public void process(WatchedEvent event) {
                 // am I the next leader?
                 try {
-                  checkIfIamLeader(leaderSeqPath, seq, context, true, null);
+                  checkIfIamLeader(seq, context, true);
                 } catch (InterruptedException e) {
                   // Restore the interrupted status
                   Thread.currentThread().interrupt();
@@ -137,16 +136,15 @@ public  class LeaderElector {
       } catch (KeeperException e) {
         // we couldn't set our watch - the node before us may already be down?
         // we need to check if we are the leader again
-        checkIfIamLeader(leaderSeqPath, seq, context, true, null);
+        checkIfIamLeader(seq, context, true);
       }
     }
   }
 
   // TODO: get this core param out of here
-  protected void runIamLeaderProcess(String leaderSeqPath, final ElectionContext context, boolean weAreReplacement, SolrCore core) throws KeeperException,
+  protected void runIamLeaderProcess(final ElectionContext context, boolean weAreReplacement) throws KeeperException,
       InterruptedException, IOException {
-
-    context.runLeaderProcess(leaderSeqPath, weAreReplacement, core);
+    context.runLeaderProcess(weAreReplacement);
   }
   
   /**
@@ -207,7 +205,7 @@ public  class LeaderElector {
    * @throws IOException 
    * @throws UnsupportedEncodingException
    */
-  public int joinElection(ElectionContext context, SolrCore core) throws KeeperException, InterruptedException, IOException {
+  public int joinElection(ElectionContext context) throws KeeperException, InterruptedException, IOException {
     final String shardsElectZkPath = context.electionPath + LeaderElector.ELECTION_NODE;
     
     long sessionId = zkClient.getSolrZooKeeper().getSessionId();
@@ -219,6 +217,7 @@ public  class LeaderElector {
       try {
         leaderSeqPath = zkClient.create(shardsElectZkPath + "/" + id + "-n_", null,
             CreateMode.EPHEMERAL_SEQUENTIAL, false);
+        context.leaderSeqPath = leaderSeqPath;
         cont = false;
       } catch (ConnectionLossException e) {
         // we don't know if we made our node or not...
@@ -249,7 +248,7 @@ public  class LeaderElector {
       }
     }
     int seq = getSeq(leaderSeqPath);
-    checkIfIamLeader(leaderSeqPath, seq, context, false, core);
+    checkIfIamLeader(seq, context, false);
     
     return seq;
   }

Modified: lucene/dev/branches/lucene3795_lsp_spatial_module/solr/core/src/java/org/apache/solr/cloud/NodeStateWatcher.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene3795_lsp_spatial_module/solr/core/src/java/org/apache/solr/cloud/NodeStateWatcher.java?rev=1297785&r1=1297784&r2=1297785&view=diff
==============================================================================
--- lucene/dev/branches/lucene3795_lsp_spatial_module/solr/core/src/java/org/apache/solr/cloud/NodeStateWatcher.java (original)
+++ lucene/dev/branches/lucene3795_lsp_spatial_module/solr/core/src/java/org/apache/solr/cloud/NodeStateWatcher.java Tue Mar  6 23:17:08 2012
@@ -18,6 +18,7 @@ package org.apache.solr.cloud;
  */
 
 import java.util.Arrays;
+import java.util.Collection;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
@@ -42,6 +43,8 @@ public class NodeStateWatcher implements
   public static interface NodeStateChangeListener {
     void coreChanged(String nodeName, Set<CoreState> states)
         throws KeeperException, InterruptedException;
+    void coreDeleted(String nodeName, Collection<CoreState> states)
+    throws KeeperException, InterruptedException;
   }
 
   private final SolrZkClient zkClient;
@@ -104,6 +107,19 @@ public class NodeStateWatcher implements
           }
         }
         
+        HashMap<String, CoreState> deletedCores = new HashMap<String, CoreState>();
+        for(CoreState state: currentState) {
+          deletedCores.put(state.getCoreNodeName(), state);
+        }
+
+        for(CoreState state: stateList) {
+          deletedCores.remove(state.getCoreNodeName());
+        }
+
+        if (deletedCores.size() > 0) {
+          listener.coreDeleted(nodeName, deletedCores.values());
+        }
+        
         currentState = Collections.unmodifiableSet(newState);
 
         if (modifiedCores.size() > 0) {

Modified: lucene/dev/branches/lucene3795_lsp_spatial_module/solr/core/src/java/org/apache/solr/cloud/Overseer.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene3795_lsp_spatial_module/solr/core/src/java/org/apache/solr/cloud/Overseer.java?rev=1297785&r1=1297784&r2=1297785&view=diff
==============================================================================
--- lucene/dev/branches/lucene3795_lsp_spatial_module/solr/core/src/java/org/apache/solr/cloud/Overseer.java (original)
+++ lucene/dev/branches/lucene3795_lsp_spatial_module/solr/core/src/java/org/apache/solr/cloud/Overseer.java Tue Mar  6 23:17:08 2012
@@ -57,7 +57,7 @@ public class Overseer implements NodeSta
   private static final int STATE_UPDATE_DELAY = 500;  // delay between cloud state updates
 
   static enum Op {
-    LeaderChange, StateChange; 
+    LeaderChange, StateChange, CoreDeleted; 
   }
 
   private final class CloudStateUpdateRequest {
@@ -135,6 +135,9 @@ public class Overseer implements NodeSta
                         (String) request.args[0], (CoreState) request.args[1]);
                     break;
 
+                  case CoreDeleted:
+                    cloudState = removeCore(cloudState, (String) request.args[0], (String) request.args[1]);
+                    break;
                   }
                 }
 
@@ -168,7 +171,7 @@ public class Overseer implements NodeSta
       
       private boolean amILeader() {
         try {
-          ZkNodeProps props = ZkNodeProps.load(zkClient.getData("/overseer_elect/leader", null, null, false));
+          ZkNodeProps props = ZkNodeProps.load(zkClient.getData("/overseer_elect/leader", null, null, true));
           if(myId.equals(props.get("id"))) {
             return true;
           }
@@ -294,7 +297,6 @@ public class Overseer implements NodeSta
       
       private CloudState setShardLeader(CloudState state, String collection, String sliceName, String leaderUrl) {
         
-        boolean updated = false;
         final Map<String, Map<String, Slice>> newStates = new LinkedHashMap<String,Map<String,Slice>>();
         newStates.putAll(state.getCollectionStates());
         
@@ -314,32 +316,49 @@ public class Overseer implements NodeSta
             Map<String, String> newShardProps = new LinkedHashMap<String,String>();
             newShardProps.putAll(shard.getValue().getProperties());
             
-            String wasLeader = newShardProps.remove(ZkStateReader.LEADER_PROP);  //clean any previously existed flag
-
+            newShardProps.remove(ZkStateReader.LEADER_PROP);  //clean any previously existed flag
+            
             ZkCoreNodeProps zkCoreNodeProps = new ZkCoreNodeProps(new ZkNodeProps(newShardProps));
             if(leaderUrl!=null && leaderUrl.equals(zkCoreNodeProps.getCoreUrl())) {
               newShardProps.put(ZkStateReader.LEADER_PROP,"true");
-              if (wasLeader == null) {
-                updated = true;
-              }
-            } else {
-              if (wasLeader != null) {
-                updated = true;
-              }
             }
             newShards.put(shard.getKey(), new ZkNodeProps(newShardProps));
           }
           Slice slice = new Slice(sliceName, newShards);
           slices.put(sliceName, slice);
         }
-        if (updated) {
-          return new CloudState(state.getLiveNodes(), newStates);
-        } else {
-          return state;
-        }
+        return new CloudState(state.getLiveNodes(), newStates);
       }
-
-    }
+      
+      /*
+       * Remove core from cloudstate
+       */
+      private CloudState removeCore(final CloudState cloudState, final String collection, final String coreNodeName) {
+        final LinkedHashMap<String, Map<String, Slice>> newStates = new LinkedHashMap<String,Map<String,Slice>>();
+        for(String collectionName: cloudState.getCollections()) {
+          if(collection.equals(collectionName)) {
+            Map<String, Slice> slices = cloudState.getSlices(collection);
+            LinkedHashMap<String, Slice> newSlices = new LinkedHashMap<String, Slice>();
+            for(Slice slice: slices.values()) {
+              if(slice.getShards().containsKey(coreNodeName)) {
+                LinkedHashMap<String, ZkNodeProps> newShards = new LinkedHashMap<String, ZkNodeProps>();
+                newShards.putAll(slice.getShards());
+                newShards.remove(coreNodeName);
+                Slice newSlice = new Slice(slice.getName(), newShards);
+                newSlices.put(slice.getName(), newSlice);
+              } else {
+                newSlices.put(slice.getName(), slice);
+              }
+            }
+            newStates.put(collectionName, newSlices);
+          } else {
+            newStates.put(collectionName, cloudState.getSlices(collectionName));
+          }
+        }
+        CloudState newState = new CloudState(cloudState.getLiveNodes(), newStates);
+        return newState;
+     }
+  }
   
   public Overseer(final SolrZkClient zkClient, final ZkStateReader reader, String id) throws KeeperException, InterruptedException {
     log.info("Constructing new Overseer id=" + id);
@@ -462,7 +481,6 @@ public class Overseer implements NodeSta
       ShardLeaderWatcher watcher = watches.remove(shardId);
       if (watcher != null) {
         watcher.close();
-        announceLeader(collection, shardId, new ZkCoreNodeProps(new ZkNodeProps()));  //removes loeader for shard
       }
     }
     
@@ -494,9 +512,9 @@ public class Overseer implements NodeSta
                 try {
                     List<String> liveNodes = zkClient.getChildren(
                         ZkStateReader.LIVE_NODES_ZKNODE, this, true);
-                    Set<String> liveNodesSet = new HashSet<String>();
-                    liveNodesSet.addAll(liveNodes);
-                    processLiveNodesChanged(nodeStateWatches.keySet(), liveNodes);
+                    synchronized (nodeStateWatches) {
+                      processLiveNodesChanged(nodeStateWatches.keySet(), liveNodes);
+                    }
                 } catch (KeeperException e) {
                   if (e.code() == KeeperException.Code.SESSIONEXPIRED
                       || e.code() == KeeperException.Code.CONNECTIONLOSS) {
@@ -528,7 +546,9 @@ public class Overseer implements NodeSta
     
     Set<String> downNodes = complement(oldLiveNodes, liveNodes);
     for(String node: downNodes) {
-      NodeStateWatcher watcher = nodeStateWatches.remove(node);
+      synchronized (nodeStateWatches) {
+        NodeStateWatcher watcher = nodeStateWatches.remove(node);
+      }
       log.debug("Removed NodeStateWatcher for node:" + node);
     }
   }
@@ -565,7 +585,15 @@ public class Overseer implements NodeSta
       fifo.add(new CloudStateUpdateRequest(Op.StateChange, nodeName, state));
     }
   }
-  
+
+  @Override
+  public void coreDeleted(String nodeName, Collection<CoreState> states)
+      throws KeeperException, InterruptedException {
+    for (CoreState state : states) {
+      fifo.add(new CloudStateUpdateRequest(Op.CoreDeleted, state.getCollectionName(), state.getCoreNodeName()));
+    }
+  }
+
   public static void createClientNodes(SolrZkClient zkClient, String nodeName) throws KeeperException, InterruptedException {
     final String node = STATES_NODE + "/" + nodeName;
     if (log.isInfoEnabled()) {
@@ -583,4 +611,4 @@ public class Overseer implements NodeSta
     fifo.add(new CloudStateUpdateRequest(Op.LeaderChange, collection, shardId, coreUrl));
   }
   
-}
\ No newline at end of file
+}

Modified: lucene/dev/branches/lucene3795_lsp_spatial_module/solr/core/src/java/org/apache/solr/cloud/RecoveryStrategy.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene3795_lsp_spatial_module/solr/core/src/java/org/apache/solr/cloud/RecoveryStrategy.java?rev=1297785&r1=1297784&r2=1297785&view=diff
==============================================================================
--- lucene/dev/branches/lucene3795_lsp_spatial_module/solr/core/src/java/org/apache/solr/cloud/RecoveryStrategy.java (original)
+++ lucene/dev/branches/lucene3795_lsp_spatial_module/solr/core/src/java/org/apache/solr/cloud/RecoveryStrategy.java Tue Mar  6 23:17:08 2012
@@ -37,6 +37,7 @@ import org.apache.solr.common.cloud.ZkCo
 import org.apache.solr.common.cloud.ZkNodeProps;
 import org.apache.solr.common.cloud.ZkStateReader;
 import org.apache.solr.common.params.ModifiableSolrParams;
+import org.apache.solr.core.CoreContainer;
 import org.apache.solr.core.CoreDescriptor;
 import org.apache.solr.core.RequestHandlers.LazyRequestHandlerWrapper;
 import org.apache.solr.core.SolrCore;
@@ -44,6 +45,8 @@ import org.apache.solr.handler.Replicati
 import org.apache.solr.request.LocalSolrQueryRequest;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.request.SolrRequestHandler;
+import org.apache.solr.request.SolrRequestInfo;
+import org.apache.solr.response.SolrQueryResponse;
 import org.apache.solr.update.CommitUpdateCommand;
 import org.apache.solr.update.PeerSync;
 import org.apache.solr.update.UpdateLog;
@@ -69,14 +72,14 @@ public class RecoveryStrategy extends Th
   private ZkStateReader zkStateReader;
   private volatile String coreName;
   private int retries;
-  private SolrCore core;
   private boolean recoveringAfterStartup;
+  private CoreContainer cc;
   
-  public RecoveryStrategy(SolrCore core) {
-    this.core = core;
-    this.coreName = core.getName();
+  public RecoveryStrategy(CoreContainer cc, String name) {
+    this.cc = cc;
+    this.coreName = name;
     setName("RecoveryThread");
-    zkController = core.getCoreDescriptor().getCoreContainer().getZkController();
+    zkController = cc.getZkController();
     zkStateReader = zkController.getZkStateReader();
     baseUrl = zkController.getBaseUrl();
     coreZkNodeName = zkController.getNodeName() + "_" + coreName;
@@ -103,9 +106,6 @@ public class RecoveryStrategy extends Th
   
   private void replicate(String nodeName, SolrCore core, ZkNodeProps leaderprops, String baseUrl)
       throws SolrServerException, IOException {
-    // start buffer updates to tran log
-    // and do recovery - either replay via realtime get (eventually)
-    // or full index replication
    
     String leaderBaseUrl = leaderprops.get(ZkStateReader.BASE_URL_PROP);
     ZkCoreNodeProps leaderCNodeProps = new ZkCoreNodeProps(leaderprops);
@@ -183,18 +183,42 @@ public class RecoveryStrategy extends Th
     prepCmd.setCoreNodeName(coreZkNodeName);
     prepCmd.setState(ZkStateReader.RECOVERING);
     prepCmd.setCheckLive(true);
-    prepCmd.setPauseFor(4000);
+    prepCmd.setPauseFor(6000);
     
     server.request(prepCmd);
     server.shutdown();
   }
-  
+
   @Override
   public void run() {
+    SolrCore core = cc.getCore(coreName);
+    if (core == null) {
+      SolrException.log(log, "SolrCore not found - cannot recover:" + coreName);
+      return;
+    }
+
+    // set request info for logging
+    try {
+      SolrQueryRequest req = new LocalSolrQueryRequest(core, new ModifiableSolrParams());
+      SolrQueryResponse rsp = new SolrQueryResponse();
+      SolrRequestInfo.setRequestInfo(new SolrRequestInfo(req, rsp));
+
+      log.info("Starting recovery process. recoveringAfterStartup=" + recoveringAfterStartup);
+
+      doRecovery(core);
+    } finally {
+      if (core != null) core.close();
+      SolrRequestInfo.clearRequestInfo();
+    }
+  }
+
+  // TODO: perhaps make this grab a new core each time through the loop to handle core reloads?
+  public void doRecovery(SolrCore core) {
     boolean replayed = false;
-    boolean succesfulRecovery = false;
+    boolean successfulRecovery = false;
 
-    UpdateLog ulog = core.getUpdateHandler().getUpdateLog();
+    UpdateLog ulog;
+    ulog = core.getUpdateHandler().getUpdateLog();
     if (ulog == null) {
       SolrException.log(log, "No UpdateLog found - cannot recover");
       recoveryFailed(core, zkController, baseUrl, coreZkNodeName,
@@ -202,6 +226,7 @@ public class RecoveryStrategy extends Th
       return;
     }
 
+
     List<Long> startingRecentVersions;
     UpdateLog.RecentUpdates startingRecentUpdates = ulog.getRecentUpdates();
     try {
@@ -221,14 +246,14 @@ public class RecoveryStrategy extends Th
         if (startingRecentVersions.get(oldIdx) == firstStartingVersion) break;
       }
 
-      if (oldIdx < startingRecentVersions.size()) {
-        log.info("####### Found new versions added after startup: num=" + (startingRecentVersions.size()-oldIdx));
+      if (oldIdx > 0) {
+        log.info("####### Found new versions added after startup: num=" + oldIdx);
+        log.info("###### currentVersions=" + startingRecentVersions);
       }
-      
+
       log.info("###### startupVersions=" + reallyStartingVersions);
-      log.info("###### currentVersions=" + startingRecentVersions);
     }
-    
+
     if (recoveringAfterStartup) {
       // if we're recovering after startup (i.e. we have been down), then we need to know what the last versions were
       // when we went down.
@@ -237,9 +262,9 @@ public class RecoveryStrategy extends Th
 
     boolean firstTime = true;
 
-    while (!succesfulRecovery && !close && !isInterrupted()) { // don't use interruption or it will close channels though
+    while (!successfulRecovery && !close && !isInterrupted()) { // don't use interruption or it will close channels though
       try {
-
+        // first thing we just try to sync
         zkController.publish(core.getCoreDescriptor(), ZkStateReader.RECOVERING);
 
         CloudDescriptor cloudDesc = core.getCoreDescriptor()
@@ -257,8 +282,10 @@ public class RecoveryStrategy extends Th
 
         // first thing we just try to sync
         if (firstTime) {
-          firstTime = false;    // only try sync the first time through the loop
+          firstTime = false; // only try sync the first time through the loop
           log.info("Attempting to PeerSync from " + leaderUrl + " recoveringAfterStartup="+recoveringAfterStartup);
+          // System.out.println("Attempting to PeerSync from " + leaderUrl
+          // + " i am:" + zkController.getNodeName());
           PeerSync peerSync = new PeerSync(core,
               Collections.singletonList(leaderUrl), ulog.numRecordsToKeep);
           peerSync.setStartingVersions(startingRecentVersions);
@@ -267,42 +294,63 @@ public class RecoveryStrategy extends Th
             SolrQueryRequest req = new LocalSolrQueryRequest(core,
                 new ModifiableSolrParams());
             core.getUpdateHandler().commit(new CommitUpdateCommand(req, false));
-            log.info("Sync Recovery was succesful - registering as Active");
+            log.info("Sync Recovery was successful - registering as Active");
+            // System.out
+            // .println("Sync Recovery was successful - registering as Active "
+            // + zkController.getNodeName());
+
+            // solrcloud_debug
+            // try {
+            // RefCounted<SolrIndexSearcher> searchHolder =
+            // core.getNewestSearcher(false);
+            // SolrIndexSearcher searcher = searchHolder.get();
+            // try {
+            // System.out.println(core.getCoreDescriptor().getCoreContainer().getZkController().getNodeName()
+            // + " synched "
+            // + searcher.search(new MatchAllDocsQuery(), 1).totalHits);
+            // } finally {
+            // searchHolder.decref();
+            // }
+            // } catch (Exception e) {
+            //
+            // }
+
             // sync success - register as active and return
             zkController.publishAsActive(baseUrl, core.getCoreDescriptor(),
                 coreZkNodeName, coreName);
-            succesfulRecovery = true;
+            successfulRecovery = true;
             close = true;
             return;
           }
 
           log.info("Sync Recovery was not successful - trying replication");
         }
+        //System.out.println("Sync Recovery was not successful - trying replication");
 
         log.info("Begin buffering updates");
         ulog.bufferUpdates();
         replayed = false;
-        
+
         try {
-          
+
           replicate(zkController.getNodeName(), core,
               leaderprops, leaderUrl);
-          
+
           replay(ulog);
           replayed = true;
-          
-          log.info("Recovery was succesful - registering as Active");
+
+          log.info("Recovery was successful - registering as Active");
           // if there are pending recovery requests, don't advert as active
           zkController.publishAsActive(baseUrl, core.getCoreDescriptor(),
               coreZkNodeName, coreName);
           close = true;
-          succesfulRecovery = true;
+          successfulRecovery = true;
         } catch (InterruptedException e) {
           Thread.currentThread().interrupt();
           log.warn("Recovery was interrupted", e);
           retries = INTERRUPTED;
         } catch (Throwable t) {
-          SolrException.log(log, "Error while trying to recover", t);
+          log.error("Error while trying to recover", t);
         } finally {
           if (!replayed) {
             try {
@@ -311,36 +359,36 @@ public class RecoveryStrategy extends Th
               SolrException.log(log, "", t);
             }
           }
-          
+
         }
-        
+
       } catch (Throwable t) {
-        SolrException.log(log, "Error while trying to recover", t);
+        log.error("Error while trying to recover.", t);
       }
-      
-      if (!succesfulRecovery) {
+
+      if (!successfulRecovery) {
         // lets pause for a moment and we need to try again...
         // TODO: we don't want to retry for some problems?
         // Or do a fall off retry...
         try {
-          
-          SolrException.log(log, "Recovery failed - trying again...");
+
+          log.error("Recovery failed - trying again...");
           retries++;
           if (retries >= MAX_RETRIES) {
             if (retries == INTERRUPTED) {
-              
+
             } else {
-              // TODO: for now, give up after X tries - should we do more?
+              log.error("Recovery failed - max retries exceeded.");
               recoveryFailed(core, zkController, baseUrl, coreZkNodeName,
                   core.getCoreDescriptor());
             }
             break;
           }
-          
+
         } catch (Exception e) {
           SolrException.log(log, "", e);
         }
-        
+
         try {
           Thread.sleep(Math.min(START_TIMEOUT * retries, 60000));
         } catch (InterruptedException e) {
@@ -349,10 +397,10 @@ public class RecoveryStrategy extends Th
           retries = INTERRUPTED;
         }
       }
-      
-      log.info("Finished recovery process");
-      
+
     }
+    log.info("Finished recovery process");
+
   }
 
   private Future<RecoveryInfo> replay(UpdateLog ulog)

Modified: lucene/dev/branches/lucene3795_lsp_spatial_module/solr/core/src/java/org/apache/solr/cloud/SolrZkServer.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene3795_lsp_spatial_module/solr/core/src/java/org/apache/solr/cloud/SolrZkServer.java?rev=1297785&r1=1297784&r2=1297785&view=diff
==============================================================================
--- lucene/dev/branches/lucene3795_lsp_spatial_module/solr/core/src/java/org/apache/solr/cloud/SolrZkServer.java (original)
+++ lucene/dev/branches/lucene3795_lsp_spatial_module/solr/core/src/java/org/apache/solr/cloud/SolrZkServer.java Tue Mar  6 23:17:08 2012
@@ -45,17 +45,21 @@ public class SolrZkServer {
   
   String zkRun;
   String zkHost;
-  String solrHome;
+
   String solrPort;
   Properties props;
   SolrZkServerProps zkProps;
 
   private Thread zkThread;  // the thread running a zookeeper server, only if zkRun is set
 
-  public SolrZkServer(String zkRun, String zkHost, String solrHome, String solrPort) {
+  private String dataHome;
+  private String confHome;
+
+  public SolrZkServer(String zkRun, String zkHost, String dataHome, String confHome, String solrPort) {
     this.zkRun = zkRun;
     this.zkHost = zkHost;
-    this.solrHome = solrHome;
+    this.dataHome = dataHome;
+    this.confHome = confHome;
     this.solrPort = solrPort;
   }
 
@@ -74,13 +78,13 @@ public class SolrZkServer {
       zkProps = new SolrZkServerProps();
       // set default data dir
       // TODO: use something based on IP+port???  support ensemble all from same solr home?
-      zkProps.setDataDir(solrHome + '/' + "zoo_data");
+      zkProps.setDataDir(dataHome);
       zkProps.zkRun = zkRun;
       zkProps.solrPort = solrPort;
     }
     
     try {
-      props = SolrZkServerProps.getProperties(solrHome + '/' + "zoo.cfg");
+      props = SolrZkServerProps.getProperties(confHome + '/' + "zoo.cfg");
       SolrZkServerProps.injectServers(props, zkRun, zkHost);
       zkProps.parseProperties(props);
       if (zkProps.getClientPortAddress() == null) {

Modified: lucene/dev/branches/lucene3795_lsp_spatial_module/solr/core/src/java/org/apache/solr/cloud/SyncStrategy.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene3795_lsp_spatial_module/solr/core/src/java/org/apache/solr/cloud/SyncStrategy.java?rev=1297785&r1=1297784&r2=1297785&view=diff
==============================================================================
--- lucene/dev/branches/lucene3795_lsp_spatial_module/solr/core/src/java/org/apache/solr/cloud/SyncStrategy.java (original)
+++ lucene/dev/branches/lucene3795_lsp_spatial_module/solr/core/src/java/org/apache/solr/cloud/SyncStrategy.java Tue Mar  6 23:17:08 2012
@@ -23,10 +23,13 @@ import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
 
+import org.apache.commons.httpclient.DefaultHttpMethodRetryHandler;
+import org.apache.commons.httpclient.HttpClient;
+import org.apache.commons.httpclient.MultiThreadedHttpConnectionManager;
+import org.apache.commons.httpclient.params.HttpMethodParams;
 import org.apache.solr.client.solrj.SolrServerException;
 import org.apache.solr.client.solrj.impl.CommonsHttpSolrServer;
 import org.apache.solr.client.solrj.request.CoreAdminRequest.RequestRecovery;
-import org.apache.solr.client.solrj.request.QueryRequest;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.cloud.CloudState;
 import org.apache.solr.common.cloud.Slice;
@@ -37,12 +40,42 @@ import org.apache.solr.common.params.Cor
 import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.core.SolrCore;
+import org.apache.solr.handler.component.HttpShardHandlerFactory;
+import org.apache.solr.handler.component.ShardHandler;
+import org.apache.solr.handler.component.ShardRequest;
+import org.apache.solr.handler.component.ShardResponse;
 import org.apache.solr.update.PeerSync;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 public class SyncStrategy {
   protected final Logger log = LoggerFactory.getLogger(getClass());
+
+  private HttpShardHandlerFactory shardHandlerFactory;
+
+  private ShardHandler shardHandler;
+  
+  private static MultiThreadedHttpConnectionManager mgr = new MultiThreadedHttpConnectionManager();
+  private static HttpClient client = new HttpClient(mgr);
+  static {
+    mgr.getParams().setDefaultMaxConnectionsPerHost(20);
+    mgr.getParams().setMaxTotalConnections(10000);
+    mgr.getParams().setConnectionTimeout(30000);
+    mgr.getParams().setSoTimeout(30000);
+
+    // prevent retries  (note: this didn't work when set on mgr.. needed to be set on client)
+    DefaultHttpMethodRetryHandler retryhandler = new DefaultHttpMethodRetryHandler(0, false);
+    client.getParams().setParameter(HttpMethodParams.RETRY_HANDLER, retryhandler);
+  }
+  
+  public SyncStrategy() {
+    shardHandlerFactory = new HttpShardHandlerFactory();
+    shardHandler = shardHandlerFactory.getShardHandler(client);
+  }
+  
+  private static class SyncShardRequest extends ShardRequest {
+    String coreName;
+  }
   
   public boolean sync(ZkController zkController, SolrCore core,
       ZkNodeProps leaderProps) {
@@ -51,6 +84,10 @@ public class SyncStrategy {
     
     // solrcloud_debug
     // System.out.println("SYNC UP");
+    if (core.getUpdateHandler().getUpdateLog() == null) {
+      log.error("No UpdateLog found - cannot sync");
+      return false;
+    }
     boolean success = syncReplicas(zkController, core, leaderProps);
     return success;
   }
@@ -156,7 +193,7 @@ public class SyncStrategy {
     }
     
  
-    PeerSync peerSync = new PeerSync(core, syncWith, 1000);
+    PeerSync peerSync = new PeerSync(core, syncWith, core.getUpdateHandler().getUpdateLog().numRecordsToKeep);
     return peerSync.sync();
   }
   
@@ -180,44 +217,68 @@ public class SyncStrategy {
     ZkCoreNodeProps zkLeader = new ZkCoreNodeProps(leaderProps);
     for (ZkCoreNodeProps node : nodes) {
       try {
-        // TODO: do we first everyone register as sync phase? get the overseer
-        // to do it?
-        // TODO: this should be done in parallel
-        QueryRequest qr = new QueryRequest(params("qt", "/get", "getVersions",
-            Integer.toString(1000), "sync", zkLeader.getCoreUrl(), "distrib",
-            "false"));
-        CommonsHttpSolrServer server = new CommonsHttpSolrServer(
-            node.getCoreUrl());
-        server.setConnectionTimeout(15000);
-        server.setSoTimeout(15000);
-        //System.out.println("ask " + node.getCoreUrl() + " to sync");
-        NamedList rsp = server.request(qr);
-        //System.out.println("response about syncing to leader:" + rsp + " node:"
-        //    + node.getCoreUrl() + " me:" + zkController.getBaseUrl());
-        boolean success = (Boolean) rsp.get("sync");
-        //System.out.println("success:" + success);
-        if (!success) {
-         // System.out
-         //     .println("try and ask " + node.getCoreUrl() + " to recover");
-          log.info("try and ask " + node.getCoreUrl() + " to recover");
-          try {
-            server = new CommonsHttpSolrServer(node.getBaseUrl());
-            server.setSoTimeout(5000);
-            server.setConnectionTimeout(5000);
-            
-            RequestRecovery recoverRequestCmd = new RequestRecovery();
-            recoverRequestCmd.setAction(CoreAdminAction.REQUESTRECOVERY);
-            recoverRequestCmd.setCoreName(node.getCoreName());
-            
-            server.request(recoverRequestCmd);
-          } catch (Exception e) {
-            log.info("Could not tell a replica to recover", e);
-          }
-        }
+//         System.out
+//             .println("try and ask " + node.getCoreUrl() + " to sync");
+        log.info("try and ask " + node.getCoreUrl() + " to sync");
+        requestSync(zkLeader.getCoreUrl(), node.getCoreName());
+
       } catch (Exception e) {
         SolrException.log(log, "Error syncing replica to leader", e);
       }
     }
+    
+    
+    for(;;) {
+      ShardResponse srsp = shardHandler.takeCompletedOrError();
+      if (srsp == null) break;
+      boolean success = handleResponse(srsp);
+      //System.out.println("got response:" + success);
+      if (!success) {
+         try {
+           log.info("Sync failed - asking replica to recover.");
+           //System.out.println("Sync failed - asking replica to recover.");
+           RequestRecovery recoverRequestCmd = new RequestRecovery();
+           recoverRequestCmd.setAction(CoreAdminAction.REQUESTRECOVERY);
+           recoverRequestCmd.setCoreName(((SyncShardRequest)srsp.getShardRequest()).coreName);
+           
+           CommonsHttpSolrServer server = new CommonsHttpSolrServer(zkLeader.getBaseUrl());
+           server.request(recoverRequestCmd);
+         } catch (Exception e) {
+           log.info("Could not tell a replica to recover", e);
+         }
+         shardHandler.cancelAll();
+        break;
+      }
+    }
+  }
+  
+  private boolean handleResponse(ShardResponse srsp) {
+    NamedList<Object> response = srsp.getSolrResponse().getResponse();
+    // TODO: why does this return null sometimes?
+    if (response == null) {
+      return false;
+    }
+    boolean success = (Boolean) response.get("sync");
+    
+    return success;
+  }
+
+  private void requestSync(String replica, String coreName) {
+    SyncShardRequest sreq = new SyncShardRequest();
+    sreq.coreName = coreName;
+    sreq.purpose = 1;
+    // TODO: this sucks
+    if (replica.startsWith("http://"))
+      replica = replica.substring(7);
+    sreq.shards = new String[]{replica};
+    sreq.actualShards = sreq.shards;
+    sreq.params = new ModifiableSolrParams();
+    sreq.params.set("qt","/get");
+    sreq.params.set("distrib",false);
+    sreq.params.set("getVersions",Integer.toString(100));
+    sreq.params.set("sync",replica);
+    
+    shardHandler.submit(sreq, replica, sreq.params);
   }
   
   public static ModifiableSolrParams params(String... params) {