You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by si...@apache.org on 2012/07/20 03:01:52 UTC

svn commit: r1363608 [8/10] - in /lucene/dev/branches/LUCENE-2878: ./ dev-tools/ dev-tools/eclipse/ dev-tools/idea/.idea/copyright/ dev-tools/idea/.idea/libraries/ dev-tools/idea/lucene/ dev-tools/maven/ dev-tools/maven/lucene/benchmark/ dev-tools/mave...

Modified: lucene/dev/branches/LUCENE-2878/lucene/test-framework/src/java/org/apache/lucene/search/QueryUtils.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE-2878/lucene/test-framework/src/java/org/apache/lucene/search/QueryUtils.java?rev=1363608&r1=1363607&r2=1363608&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE-2878/lucene/test-framework/src/java/org/apache/lucene/search/QueryUtils.java (original)
+++ lucene/dev/branches/LUCENE-2878/lucene/test-framework/src/java/org/apache/lucene/search/QueryUtils.java Fri Jul 20 01:01:39 2012
@@ -280,7 +280,7 @@ public class QueryUtils {
               if (scorer == null) {
                 Weight w = s.createNormalizedWeight(q);
                 AtomicReaderContext context = readerContextArray.get(leafPtr);
-                scorer = w.scorer(context, true, false, context.reader().getLiveDocs());
+                scorer = w.scorer(context, true, false, false, false, false, context.reader().getLiveDocs());
               }
               
               int op = order[(opidx[0]++) % order.length];
@@ -327,7 +327,7 @@ public class QueryUtils {
               indexSearcher.setSimilarity(s.getSimilarity());
               Weight w = indexSearcher.createNormalizedWeight(q);
               AtomicReaderContext ctx = (AtomicReaderContext)indexSearcher.getTopReaderContext();
-              Scorer scorer = w.scorer(ctx, true, false, ctx.reader().getLiveDocs());
+              Scorer scorer = w.scorer(ctx, true, false, false, false, false, ctx.reader().getLiveDocs());
               if (scorer != null) {
                 boolean more = scorer.advance(lastDoc[0] + 1) != DocIdSetIterator.NO_MORE_DOCS;
                 Assert.assertFalse("query's last doc was "+ lastDoc[0] +" but skipTo("+(lastDoc[0]+1)+") got to "+scorer.docID(),more);
@@ -354,7 +354,7 @@ public class QueryUtils {
           indexSearcher.setSimilarity(s.getSimilarity());
           Weight w = indexSearcher.createNormalizedWeight(q);
           AtomicReaderContext ctx = previousReader.getTopReaderContext();
-          Scorer scorer = w.scorer(ctx, true, false, ctx.reader().getLiveDocs());
+          Scorer scorer = w.scorer(ctx, true, false, false, false, false, ctx.reader().getLiveDocs());
           if (scorer != null) {
             boolean more = scorer.advance(lastDoc[0] + 1) != DocIdSetIterator.NO_MORE_DOCS;
             Assert.assertFalse("query's last doc was "+ lastDoc[0] +" but skipTo("+(lastDoc[0]+1)+") got to "+scorer.docID(),more);
@@ -385,7 +385,7 @@ public class QueryUtils {
           long startMS = System.currentTimeMillis();
           for (int i=lastDoc[0]+1; i<=doc; i++) {
             Weight w = s.createNormalizedWeight(q);
-            Scorer scorer = w.scorer(context.get(leafPtr), true, false, liveDocs);
+            Scorer scorer = w.scorer(context.get(leafPtr), true, false, false, false, false, liveDocs);
             Assert.assertTrue("query collected "+doc+" but skipTo("+i+") says no more docs!",scorer.advance(i) != DocIdSetIterator.NO_MORE_DOCS);
             Assert.assertEquals("query collected "+doc+" but skipTo("+i+") got to "+scorer.docID(),doc,scorer.docID());
             float skipToScore = scorer.score();
@@ -413,7 +413,7 @@ public class QueryUtils {
           IndexSearcher indexSearcher = LuceneTestCase.newSearcher(previousReader);
           indexSearcher.setSimilarity(s.getSimilarity());
           Weight w = indexSearcher.createNormalizedWeight(q);
-          Scorer scorer = w.scorer((AtomicReaderContext)indexSearcher.getTopReaderContext(), true, false, previousReader.getLiveDocs());
+          Scorer scorer = w.scorer((AtomicReaderContext)indexSearcher.getTopReaderContext(), true, false, false, false, false, previousReader.getLiveDocs());
           if (scorer != null) {
             boolean more = scorer.advance(lastDoc[0] + 1) != DocIdSetIterator.NO_MORE_DOCS;
             Assert.assertFalse("query's last doc was "+ lastDoc[0] +" but skipTo("+(lastDoc[0]+1)+") got to "+scorer.docID(),more);
@@ -438,7 +438,7 @@ public class QueryUtils {
       IndexSearcher indexSearcher = LuceneTestCase.newSearcher(previousReader);
       indexSearcher.setSimilarity(s.getSimilarity());
       Weight w = indexSearcher.createNormalizedWeight(q);
-      Scorer scorer = w.scorer((AtomicReaderContext)indexSearcher.getTopReaderContext(), true, false, previousReader.getLiveDocs());
+      Scorer scorer = w.scorer((AtomicReaderContext)indexSearcher.getTopReaderContext(), true, false, false, false, false, previousReader.getLiveDocs());
       if (scorer != null) {
         boolean more = scorer.advance(lastDoc[0] + 1) != DocIdSetIterator.NO_MORE_DOCS;
         Assert.assertFalse("query's last doc was "+ lastDoc[0] +" but skipTo("+(lastDoc[0]+1)+") got to "+scorer.docID(),more);

Modified: lucene/dev/branches/LUCENE-2878/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE-2878/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java?rev=1363608&r1=1363607&r2=1363608&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE-2878/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java (original)
+++ lucene/dev/branches/LUCENE-2878/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java Fri Jul 20 01:01:39 2012
@@ -1010,7 +1010,7 @@ public abstract class LuceneTestCase ext
       // TODO: remove this, and fix those tests to wrap before putting slow around:
       final boolean wasOriginallyAtomic = r instanceof AtomicReader;
       for (int i = 0, c = random.nextInt(6)+1; i < c; i++) {
-        switch(random.nextInt(4)) {
+        switch(random.nextInt(5)) {
           case 0:
             r = SlowCompositeReaderWrapper.wrap(r);
             break;
@@ -1041,6 +1041,16 @@ public abstract class LuceneTestCase ext
               new FieldFilterAtomicReader(ar, fields, true)
             );
             break;
+          case 4:
+            // Häckidy-Hick-Hack: a standard Reader will cause FC insanity, so we use
+            // QueryUtils' reader with a fake cache key, so insanity checker cannot walk
+            // along our reader:
+            if (r instanceof AtomicReader) {
+              r = new FCInvisibleMultiReader(new AssertingAtomicReader((AtomicReader)r));
+            } else if (r instanceof DirectoryReader) {
+              r = new FCInvisibleMultiReader((DirectoryReader)r);
+            }
+            break;
           default:
             fail("should not get here");
         }

Modified: lucene/dev/branches/LUCENE-2878/lucene/test-framework/src/java/org/apache/lucene/util/TestRuleSetupAndRestoreClassEnv.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE-2878/lucene/test-framework/src/java/org/apache/lucene/util/TestRuleSetupAndRestoreClassEnv.java?rev=1363608&r1=1363607&r2=1363608&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE-2878/lucene/test-framework/src/java/org/apache/lucene/util/TestRuleSetupAndRestoreClassEnv.java (original)
+++ lucene/dev/branches/LUCENE-2878/lucene/test-framework/src/java/org/apache/lucene/util/TestRuleSetupAndRestoreClassEnv.java Fri Jul 20 01:01:39 2012
@@ -30,7 +30,9 @@ import java.util.TimeZone;
 import org.apache.lucene.codecs.Codec;
 import org.apache.lucene.codecs.PostingsFormat;
 import org.apache.lucene.codecs.appending.AppendingCodec;
+import org.apache.lucene.codecs.asserting.AssertingCodec;
 import org.apache.lucene.codecs.lucene40.Lucene40Codec;
+import org.apache.lucene.codecs.mockrandom.MockRandomPostingsFormat;
 import org.apache.lucene.codecs.simpletext.SimpleTextCodec;
 import org.apache.lucene.index.RandomCodec;
 import org.apache.lucene.search.RandomSimilarityProvider;
@@ -167,9 +169,13 @@ final class TestRuleSetupAndRestoreClass
       assert (codec instanceof PreFlexRWCodec) : "fix your classpath to have tests-framework.jar before lucene-core.jar";
       PREFLEX_IMPERSONATION_IS_ACTIVE = true;
     } else */ if (!"random".equals(TEST_POSTINGSFORMAT)) {
-      codec = new Lucene40Codec() {
-        private final PostingsFormat format = PostingsFormat.forName(TEST_POSTINGSFORMAT);
-        
+      final PostingsFormat format;
+      if ("MockRandom".equals(TEST_POSTINGSFORMAT)) {
+        format = new MockRandomPostingsFormat(random);
+      } else {
+        format = PostingsFormat.forName(TEST_POSTINGSFORMAT);
+      }
+      codec = new Lucene40Codec() {       
         @Override
         public PostingsFormat getPostingsFormatForField(String field) {
           return format;
@@ -184,6 +190,8 @@ final class TestRuleSetupAndRestoreClass
       codec = new SimpleTextCodec();
     } else if ("Appending".equals(TEST_CODEC) || ("random".equals(TEST_CODEC) && randomVal == 8 && !shouldAvoidCodec("Appending"))) {
       codec = new AppendingCodec();
+    } else if ("Asserting".equals(TEST_CODEC) || ("random".equals(TEST_CODEC) && randomVal == 7 && !shouldAvoidCodec("Asserting"))) {
+      codec = new AssertingCodec();
     } else if (!"random".equals(TEST_CODEC)) {
       codec = Codec.forName(TEST_CODEC);
     } else if ("random".equals(TEST_POSTINGSFORMAT)) {

Modified: lucene/dev/branches/LUCENE-2878/lucene/test-framework/src/java/org/apache/lucene/util/_TestUtil.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE-2878/lucene/test-framework/src/java/org/apache/lucene/util/_TestUtil.java?rev=1363608&r1=1363607&r2=1363608&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE-2878/lucene/test-framework/src/java/org/apache/lucene/util/_TestUtil.java (original)
+++ lucene/dev/branches/LUCENE-2878/lucene/test-framework/src/java/org/apache/lucene/util/_TestUtil.java Fri Jul 20 01:01:39 2012
@@ -79,6 +79,7 @@ import org.apache.lucene.store.Directory
 import org.apache.lucene.store.IOContext;
 import org.junit.Assert;
 
+import com.carrotsearch.randomizedtesting.RandomizedContext;
 import com.carrotsearch.randomizedtesting.generators.RandomInts;
 import com.carrotsearch.randomizedtesting.generators.RandomPicks;
 
@@ -731,8 +732,12 @@ public class _TestUtil {
     }
     String newSuffix = suffix == null ? ".tmp" : suffix;
     File result;
+    // just pull one long always: we don't want to rely upon what may or may not
+    // already exist. otherwise tests might not reproduce, depending on when you last
+    // ran 'ant clean'
+    final Random random = new Random(RandomizedContext.current().getRandom().nextLong());
     do {
-      result = genTempFile(prefix, newSuffix, directory);
+      result = genTempFile(random, prefix, newSuffix, directory);
     } while (!result.createNewFile());
     return result;
   }
@@ -746,12 +751,12 @@ public class _TestUtil {
   private static class TempFileLocker {};
   private static TempFileLocker tempFileLocker = new TempFileLocker();
 
-  private static File genTempFile(String prefix, String suffix, File directory) {
+  private static File genTempFile(Random random, String prefix, String suffix, File directory) {
     int identify = 0;
 
     synchronized (tempFileLocker) {
       if (counter == 0) {
-        int newInt = new Random().nextInt();
+        int newInt = random.nextInt();
         counter = ((newInt / 65535) & 0xFFFF) + 0x2710;
         counterBase = counter;
       }

Modified: lucene/dev/branches/LUCENE-2878/lucene/test-framework/src/resources/META-INF/services/org.apache.lucene.codecs.Codec
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE-2878/lucene/test-framework/src/resources/META-INF/services/org.apache.lucene.codecs.Codec?rev=1363608&r1=1363607&r2=1363608&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE-2878/lucene/test-framework/src/resources/META-INF/services/org.apache.lucene.codecs.Codec (original)
+++ lucene/dev/branches/LUCENE-2878/lucene/test-framework/src/resources/META-INF/services/org.apache.lucene.codecs.Codec Fri Jul 20 01:01:39 2012
@@ -13,3 +13,4 @@
 #  See the License for the specific language governing permissions and
 #  limitations under the License.
 
+org.apache.lucene.codecs.asserting.AssertingCodec

Modified: lucene/dev/branches/LUCENE-2878/lucene/test-framework/src/resources/META-INF/services/org.apache.lucene.codecs.PostingsFormat
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE-2878/lucene/test-framework/src/resources/META-INF/services/org.apache.lucene.codecs.PostingsFormat?rev=1363608&r1=1363607&r2=1363608&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE-2878/lucene/test-framework/src/resources/META-INF/services/org.apache.lucene.codecs.PostingsFormat (original)
+++ lucene/dev/branches/LUCENE-2878/lucene/test-framework/src/resources/META-INF/services/org.apache.lucene.codecs.PostingsFormat Fri Jul 20 01:01:39 2012
@@ -20,3 +20,5 @@ org.apache.lucene.codecs.mocksep.MockSep
 org.apache.lucene.codecs.nestedpulsing.NestedPulsingPostingsFormat
 org.apache.lucene.codecs.ramonly.RAMOnlyPostingsFormat
 org.apache.lucene.codecs.lucene40ords.Lucene40WithOrds
+org.apache.lucene.codecs.asserting.AssertingPostingsFormat
+

Modified: lucene/dev/branches/LUCENE-2878/lucene/tools/src/java/org/apache/lucene/validation/ForbiddenApisCheckTask.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE-2878/lucene/tools/src/java/org/apache/lucene/validation/ForbiddenApisCheckTask.java?rev=1363608&r1=1363607&r2=1363608&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE-2878/lucene/tools/src/java/org/apache/lucene/validation/ForbiddenApisCheckTask.java (original)
+++ lucene/dev/branches/LUCENE-2878/lucene/tools/src/java/org/apache/lucene/validation/ForbiddenApisCheckTask.java Fri Jul 20 01:01:39 2012
@@ -343,8 +343,11 @@ public class ForbiddenApisCheckTask exte
             throw new BuildException("Resource does not exist: " + r);
           }
           if (r instanceof StringResource) {
-            log("Reading inline API signatures...", Project.MSG_INFO);
-            parseApiFile(new StringReader(((StringResource) r).getValue()));
+            final String s = ((StringResource) r).getValue();
+            if (s != null && s.trim().length() > 0) {
+              log("Reading inline API signatures...", Project.MSG_INFO);
+              parseApiFile(new StringReader(s));
+            }
           } else {
             log("Reading API signatures: " + r, Project.MSG_INFO);
             parseApiFile(new InputStreamReader(r.getInputStream(), "UTF-8"));

Modified: lucene/dev/branches/LUCENE-2878/solr/CHANGES.txt
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE-2878/solr/CHANGES.txt?rev=1363608&r1=1363607&r2=1363608&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE-2878/solr/CHANGES.txt (original)
+++ lucene/dev/branches/LUCENE-2878/solr/CHANGES.txt Fri Jul 20 01:01:39 2012
@@ -26,7 +26,19 @@ $Id$
 
 ==================  4.0.0-BETA ===================
 
+Versions of Major Components
+---------------------
+Apache Tika 1.1
+Carrot2 3.5.0
+Velocity 1.6.4 and Velocity Tools 2.0
+Apache UIMA 2.3.1
+Apache ZooKeeper 3.3.5
+
+Detailed Change List
+----------------------
+
 New Features
+----------------------
 
 * LUCENE-4201: Added JapaneseIterationMarkCharFilterFactory to normalize Japanese
   iteration marks. (Robert Muir, Christian Moen)
@@ -56,9 +68,22 @@ New Features
   
 * SOLR-2616: Include a sample java util logging configuration file. 
   (David Smiley, Mark Miller)
+ 
+* SOLR-3460: Add cloud-scripts directory and a zkcli.sh|bat tool for easy scripting
+  and interaction with ZooKeeper. (Mark Miller)
+
+* SOLR-1725: StatelessScriptUpdateProcessorFactory allows users to implement
+  the full ScriptUpdateProcessor API using any scripting language with a 
+  javax.script.ScriptEngineFactory
+  (Uri Boness, ehatcher, Simon Rosenthal, hossman)
+
+* SOLR-139: Change to updateable documents to create the document if it doesn't
+  already exist.  To assert that the document must exist, use the optimistic
+  concurrency feature by specifying a _version_ of 1.  (yonik)
 
 
 Bug Fixes
+----------------------
 
 * SOLR-3582: Our ZooKeeper watchers respond to session events as if they are change events, 
   creating undesirable side effects. (Trym R. Møller, Mark Miller)
@@ -73,14 +98,34 @@ Bug Fixes
 
 * SOLR-3610: After reloading a core, indexing would fail on any newly added fields to the schema. (Brent Mills, rmuir)
 
+* SOLR-3377: edismax fails to correctly parse a fielded query wrapped by parens.
+  This regression was introduced in 3.6.  (Bernd Fehling, Jan Høydahl, yonik)
+  
+* SOLR-3621: Fix rare concurrency issue when opening a new IndexWriter for replication or rollback.
+  (Mark Miller)
+
+* SOLR-1781: Replication index directories not always cleaned up. (Terje Sten Bjerkseth, Mark Miller)
+
+* SOLR-3639: Update ZooKeeper to 3.3.5 for a variety of bug fixes. (Mark Miller)
+
+* SOLR-3629: Typo in solr.xml persistence when overriding the solrconfig.xml 
+  file name using the "config" attribute prevented the override file from being
+  used. (Ryan Zezeski, hossman)
+
+
 Other Changes
+----------------------
+
+* SOLR-3524: Make discarding punctuation configurable in JapaneseTokenizerFactory.
+  The default is to discard punctuation, but this is overridable as an expert option.
+  (Kazuaki Hiraga, Jun Ohtani via Christian Moen)
 
 * SOLR-1770: Move the default core instance directory into a collection1 folder.
   (Mark Miller)
   
 * SOLR-3355: Add shard and collection to SolrCore statistics. (Michael Garski, Mark Miller)
 
-* SOLR-3475: solr.xml should default to persist=true (Mark Miller)
+* SOLR-3575: solr.xml should default to persist=true (Mark Miller)
 
 * SOLR-3563: Unloading all cores in a SolrCloud collection will now cause the removal of
   that collection's meta data from ZooKeeper. (Mark Miller, Per Steffensen)
@@ -88,6 +133,14 @@ Other Changes
 * SOLR-3599: Add zkClientTimeout to solr.xml so that it's obvious how to change it and so 
   that you can change it with a system property. (Mark Miller)
 
+* SOLR-3609: Change Solr's expanded webapp directory to be at a consistent path called
+  solr-webapp rather than a temporary directory. (Mark Miller)
+  
+* SOLR-3600: Raise the default zkClientTimeout from 10 seconds to 15 seconds. (Mark Miller)
+
+* SOLR-3215: Clone SolrInputDocument when distrib indexing so that update processors after
+  the distrib update process do not process the document twice. (Mark Miller)
+  
 
 ==================  4.0.0-ALPHA ==================
 More information about this release, including any errata related to the 

Modified: lucene/dev/branches/LUCENE-2878/solr/build.xml
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE-2878/solr/build.xml?rev=1363608&r1=1363607&r2=1363608&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE-2878/solr/build.xml (original)
+++ lucene/dev/branches/LUCENE-2878/solr/build.xml Fri Jul 20 01:01:39 2012
@@ -15,7 +15,7 @@
  See the License for the specific language governing permissions and
  limitations under the License.
 -->
-<project name="solr" default="usage">
+<project name="solr" default="usage" xmlns:ivy="antlib:org.apache.ivy.ant">
   <description>Solr</description>
   
   <target name="usage" description="Prints out instructions">
@@ -51,7 +51,7 @@
       </manifest>
     </jar>
     <delete includeemptydirs="true">
-      <fileset dir="${example}/work" includes="**/*"/>
+      <fileset dir="${example}/solr-webapp" includes="**/*"/>
     </delete>
     <echo>See ${example}/README.txt for how to run the Solr example configuration.</echo>
   </target>
@@ -171,7 +171,7 @@
   </target>
     
   <!-- Validation (license/notice/api checks). -->
-  <target name="validate" depends="check-licenses,check-forbidden-apis" description="Validate stuff." />
+  <target name="validate" depends="check-licenses,rat-sources,check-forbidden-apis" description="Validate stuff." />
 
   <target name="check-licenses" depends="compile-tools,resolve,load-custom-tasks" description="Validate license stuff.">
     <license-check-macro dir="${basedir}">
@@ -190,7 +190,10 @@
     </license-check-macro>
   </target>
   
-  <target name="check-forbidden-apis" depends="compile-tools,compile-test,load-custom-tasks" description="Check forbidden API calls in compiled class files.">
+  <target name="check-forbidden-apis" depends="compile-tools,compile-test,load-custom-tasks,-check-forbidden-java-apis,-check-forbidden-test-apis" 
+          description="Check forbidden API calls in compiled class files."/>
+
+  <target name="-check-forbidden-java-apis">
     <forbidden-apis>
       <classpath refid="additional.dependencies"/>
       <apiFileSet dir="${custom-tasks.dir}/forbiddenApis">
@@ -206,6 +209,20 @@
     </forbidden-apis>
   </target>
 
+  <target name="-check-forbidden-test-apis">
+    <forbidden-apis apiFile="${custom-tasks.dir}/forbiddenApis/tests.txt"> 
+      <classpath refid="junit-path"/>
+      <fileset dir="${basedir}/build">
+        <include name="**/classes/test/**/*.class"/>
+        <include name="solr-test-framework/**/*.class"/>
+        <!-- not actually a test -->
+        <exclude name="solr-core/classes/test/org/apache/solr/search/DocSetPerf.class"/>
+        <!-- imported code -->
+        <exclude name="solr-core/classes/test/org/apache/solr/internal/**/*.class"/>
+      </fileset>
+    </forbidden-apis>
+  </target>
+
   <!-- rat sources -->
   <target name="rat-sources">
     <sequential>
@@ -241,64 +258,26 @@
     </delete>
   </target>
 
-  <!-- Clover targets -->
-  <target name="clover" depends="clover.setup, clover.info"
-          description="Instrument the Unit tests using Clover.  Requires a Clover license and clover.jar in the ANT classpath.  To use, specify -Drun.clover=true on the command line."/>
-
-  <target name="clover.setup" if="clover.enabled">
-    <taskdef resource="cloverlib.xml"/>
-    <mkdir dir="${clover.db.dir}"/>
-    <clover-setup initString="${clover.db.dir}/lucene_coverage.db">
-      <fileset dir="core/src/java"/>
-      <fileset dir="solrj/src/java"/>
-      <fileset dir="contrib/analysis-extras/src/java"/>
-      <fileset dir="contrib/clustering/src/java"/>
-      <fileset dir="contrib/dataimporthandler/src/java"/>
-      <fileset dir="contrib/dataimporthandler-extras/src/java"/>
-      <fileset dir="contrib/extraction/src/java"/>
-      <fileset dir="contrib/uima/src/java"/>
-      <testsources dir="test-framework/src/java"/>
-      <testsources dir="core/src/test"/>
-      <testsources dir="solrj/src/test"/>
-      <testsources dir="contrib/analysis-extras/src/test" />
-      <testsources dir="contrib/clustering/src/test" />
-      <testsources dir="contrib/dataimporthandler/src/test" />
-      <testsources dir="contrib/dataimporthandler-extras/src/test" />
-      <testsources dir="contrib/extraction/src/test" />
-      <testsources dir="contrib/uima/src/test" />
-    </clover-setup>
-  </target>
-
-  <target name="clover.info" unless="clover.present">
-  	<echo>
-      Clover not found. Code coverage reports disabled.
-  	</echo>
-  </target>
-
-  <target name="clover.check">
-	<fail unless="clover.present">
-	  ##################################################################
-      Clover not found.
-      Please make sure clover.jar is in ANT_HOME/lib, or made available
-      to Ant using other mechanisms like -lib or CLASSPATH.
-      ##################################################################
-  	</fail>
-  </target>
-
-  <!-- Run after Junit tests. -->
-  <target name="generate-clover-reports" depends="clover.check, clover">
+  <!--
+   Run after Junit tests.
+   -->
+  <target name="generate-clover-reports" depends="clover">
+    <fail unless="run.clover">Clover not enabled!</fail>
     <mkdir dir="${clover.report.dir}"/>
+    <fileset dir="build" id="clover.test.result.files">
+      <include name="**/test/TEST-*.xml"/>
+    </fileset>
     <clover-report>
-       <current outfile="${clover.report.dir}/clover.xml"
-                title="${fullnamever}">
-          <format type="xml"/>
-       </current>
-       <current outfile="${clover.report.dir}" title="${fullnamever}">
-          <format type="html"/>
-       </current>
+      <current outfile="${clover.report.dir}" title="${final.name}" numThreads="0">
+        <format type="html" filter="assert"/>
+        <testresults refid="clover.test.result.files"/>
+      </current>
+      <current outfile="${clover.report.dir}/clover.xml" title="${final.name}">
+        <format type="xml" filter="assert"/>
+        <testresults refid="clover.test.result.files"/>
+      </current>
     </clover-report>
   </target>
-
   
   <!-- ========================================================================= -->
   <!-- ===================== DISTRIBUTION-RELATED TASKS ======================== -->
@@ -358,15 +337,17 @@
 
     <!-- Exclude javadoc package-list files under licenses incompatible with the ASL -->
     <delete dir="${svn.export.dir}/lucene/tools/javadoc/java6"/>
+    <!-- Exclude clover license files incompatible with the ASL -->
+    <delete dir="${svn.export.dir}/lucene/tools/clover"/>
 
     <tar destfile="${source.package.file}" compression="gzip" longfile="gnu">
       <tarfileset dir="${svn.export.dir}"
                   prefix="${fullnamever}"
-                  excludes="example/**/*.sh example/**/bin/"/>
+                  excludes="solr/example/**/*.sh solr/example/**/bin/ solr/scripts/**"/>
       <tarfileset dir="${svn.export.dir}"
                   prefix="${fullnamever}"
                   filemode="755"
-                  includes="example/**/*.sh example/**/bin/"/>
+                  includes="solr/example/**/*.sh solr/example/**/bin/ solr/scripts/**"/>
       <tarfileset dir="${svn.export.dir}/solr" prefix="${fullnamever}"
                   includes="NOTICE.txt,LICENSE.txt"/>
     </tar>
@@ -405,7 +386,7 @@
           depends="init-dist, dist, example, javadocs">
     <mkdir dir="${dest}/${fullnamever}"/>
     <delete includeemptydirs="true">
-      <fileset dir="${example}/work" includes="**/*"/>
+      <fileset dir="${example}/solr-webapp" includes="**/*"/>
       <fileset dir="${dest}/${fullnamever}" includes="**/*"/>
     </delete>
  

Modified: lucene/dev/branches/LUCENE-2878/solr/cloud-dev/solrcloud-multi-start.sh
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE-2878/solr/cloud-dev/solrcloud-multi-start.sh?rev=1363608&r1=1363607&r2=1363608&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE-2878/solr/cloud-dev/solrcloud-multi-start.sh (original)
+++ lucene/dev/branches/LUCENE-2878/solr/cloud-dev/solrcloud-multi-start.sh Fri Jul 20 01:01:39 2012
@@ -24,7 +24,7 @@ cp -r -f example example4
 cp -r -f example example5
 cp -r -f example example6
 
-java -classpath lib/*:dist/*:build/lucene-libs/* org.apache.solr.cloud.ZkController 127.0.0.1:9983 example/multicore 8983
+java -classpath lib/*:dist/*:build/lucene-libs/* org.apache.solr.cloud.ZkCLI -cmd upconf -zkhost 127.0.0.1:9983 -solrhome example/multicore -runzk 8983
 
 cd example
 java -DzkRun -DnumShards=2 -DSTOP.PORT=7983 -DSTOP.KEY=key -Dsolr.solr.home=multicore -jar start.jar 1>example.log 2>&1 &

Modified: lucene/dev/branches/LUCENE-2878/solr/cloud-dev/solrcloud-start.sh
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE-2878/solr/cloud-dev/solrcloud-start.sh?rev=1363608&r1=1363607&r2=1363608&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE-2878/solr/cloud-dev/solrcloud-start.sh (original)
+++ lucene/dev/branches/LUCENE-2878/solr/cloud-dev/solrcloud-start.sh Fri Jul 20 01:01:39 2012
@@ -22,7 +22,7 @@ cp -r -f example example4
 cp -r -f example example5
 cp -r -f example example6
 
-java -classpath lib/*:dist/*:build/lucene-libs/* org.apache.solr.cloud.ZkController 127.0.0.1:9983 example/solr 8983
+java -classpath lib/*:dist/*:build/lucene-libs/* org.apache.solr.cloud.ZkCLI -cmd bootstrap -zkhost 127.0.0.1:9983 -solrhome example/solr -runzk 8983
 
 cd example
 java -DzkRun -DnumShards=2 -DSTOP.PORT=7983 -DSTOP.KEY=key -jar start.jar 1>example.log 2>&1 &

Modified: lucene/dev/branches/LUCENE-2878/solr/common-build.xml
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE-2878/solr/common-build.xml?rev=1363608&r1=1363607&r2=1363608&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE-2878/solr/common-build.xml (original)
+++ lucene/dev/branches/LUCENE-2878/solr/common-build.xml Fri Jul 20 01:01:39 2012
@@ -49,15 +49,7 @@
   <property name="tests.loggingfile" value="${common-solr.dir}/testlogging.properties"/>
   <property name="tests.cleanthreads.sysprop" value="perClass"/>
 
-  <property name="clover.db.dir" location="${dest}/test/clover/db"/>
-  <property name="clover.report.dir" location="${dest}/test/clover/reports"/>
-  <available property="clover.present" classname="com.cenqua.clover.tasks.CloverReportTask"/>
-  <condition property="clover.enabled">
-    <and>
-      <isset property="run.clover"/>
-      <isset property="clover.present"/>
-    </and>
-  </condition>
+  <property name="clover.report.dir" location="${dest}/clover/reports"/>
 
   <import file="${common-solr.dir}/../lucene/module-build.xml"/>
 

Modified: lucene/dev/branches/LUCENE-2878/solr/contrib/clustering/src/java/org/apache/solr/handler/clustering/carrot2/LuceneCarrot2TokenizerFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE-2878/solr/contrib/clustering/src/java/org/apache/solr/handler/clustering/carrot2/LuceneCarrot2TokenizerFactory.java?rev=1363608&r1=1363607&r2=1363608&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE-2878/solr/contrib/clustering/src/java/org/apache/solr/handler/clustering/carrot2/LuceneCarrot2TokenizerFactory.java (original)
+++ lucene/dev/branches/LUCENE-2878/solr/contrib/clustering/src/java/org/apache/solr/handler/clustering/carrot2/LuceneCarrot2TokenizerFactory.java Fri Jul 20 01:01:39 2012
@@ -143,7 +143,7 @@ public class LuceneCarrot2TokenizerFacto
 
       public void reset(Reader input) {
         try {
-          sentenceTokenizer.reset(input);
+          sentenceTokenizer.setReader(input);
           wordTokenFilter = (TokenStream) tokenFilterClass.getConstructor(
               TokenStream.class).newInstance(sentenceTokenizer);
           term = wordTokenFilter.addAttribute(CharTermAttribute.class);

Modified: lucene/dev/branches/LUCENE-2878/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/HTMLStripTransformer.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE-2878/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/HTMLStripTransformer.java?rev=1363608&r1=1363607&r2=1363608&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE-2878/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/HTMLStripTransformer.java (original)
+++ lucene/dev/branches/LUCENE-2878/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/HTMLStripTransformer.java Fri Jul 20 01:01:39 2012
@@ -17,7 +17,6 @@
 package org.apache.solr.handler.dataimport;
 
 import org.apache.lucene.analysis.charfilter.HTMLStripCharFilter;
-import org.apache.lucene.analysis.CharReader;
 
 import java.io.IOException;
 import java.io.StringReader;
@@ -73,7 +72,7 @@ public class HTMLStripTransformer extend
     StringBuilder out = new StringBuilder();
     StringReader strReader = new StringReader(value);
     try {
-      HTMLStripCharFilter html = new HTMLStripCharFilter(CharReader.get(strReader.markSupported() ? strReader : new BufferedReader(strReader)));
+      HTMLStripCharFilter html = new HTMLStripCharFilter(strReader.markSupported() ? strReader : new BufferedReader(strReader));
       char[] cbuf = new char[1024 * 10];
       while (true) {
         int count = html.read(cbuf);

Modified: lucene/dev/branches/LUCENE-2878/solr/core/ivy.xml
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE-2878/solr/core/ivy.xml?rev=1363608&r1=1363607&r2=1363608&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE-2878/solr/core/ivy.xml (original)
+++ lucene/dev/branches/LUCENE-2878/solr/core/ivy.xml Fri Jul 20 01:01:39 2012
@@ -22,6 +22,7 @@
     <dependencies>
       <dependency org="commons-codec" name="commons-codec" rev="1.6" transitive="false"/>
       <dependency org="commons-fileupload" name="commons-fileupload" rev="1.2.1" transitive="false"/>
+      <dependency org="commons-cli" name="commons-cli" rev="1.2" transitive="false"/>
       <dependency org="org.apache.httpcomponents" name="httpcore" rev="4.1.4" transitive="false"/>
       <dependency org="org.apache.httpcomponents" name="httpclient" rev="4.1.3" transitive="false"/>
       <dependency org="org.apache.httpcomponents" name="httpmime" rev="4.1.3" transitive="false"/>

Modified: lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/analysis/HTMLStripCharFilterFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/analysis/HTMLStripCharFilterFactory.java?rev=1363608&r1=1363607&r2=1363608&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/analysis/HTMLStripCharFilterFactory.java (original)
+++ lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/analysis/HTMLStripCharFilterFactory.java Fri Jul 20 01:01:39 2012
@@ -18,10 +18,10 @@ package org.apache.solr.analysis;
  * limitations under the License.
  */
 
-import org.apache.lucene.analysis.CharStream;
 import org.apache.lucene.analysis.charfilter.HTMLStripCharFilter;
 import org.apache.lucene.analysis.util.CharFilterFactory;
 
+import java.io.Reader;
 import java.util.HashSet;
 import java.util.Map;
 import java.util.Set;
@@ -44,7 +44,7 @@ import java.util.regex.Pattern;
   Set<String> escapedTags = null;
   Pattern TAG_NAME_PATTERN = Pattern.compile("[^\\s,]+");
 
-  public HTMLStripCharFilter create(CharStream input) {
+  public HTMLStripCharFilter create(Reader input) {
     HTMLStripCharFilter charFilter;
     if (null == escapedTags) {
       charFilter = new HTMLStripCharFilter(input);

Modified: lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/analysis/HunspellStemFilterFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/analysis/HunspellStemFilterFactory.java?rev=1363608&r1=1363607&r2=1363608&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/analysis/HunspellStemFilterFactory.java (original)
+++ lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/analysis/HunspellStemFilterFactory.java Fri Jul 20 01:01:39 2012
@@ -28,6 +28,7 @@ import org.apache.lucene.analysis.util.I
 import org.apache.lucene.analysis.util.ResourceLoader;
 import org.apache.lucene.analysis.util.ResourceLoaderAware;
 import org.apache.lucene.analysis.util.TokenFilterFactory;
+import org.apache.lucene.util.IOUtils;
 
 /**
  * TokenFilterFactory that creates instances of {@link org.apache.lucene.analysis.hunspell.HunspellStemFilter}.
@@ -76,7 +77,6 @@ public class HunspellStemFilterFactory e
       else throw new InitializationException("Unknown value for " + PARAM_IGNORE_CASE + ": " + pic + ". Must be true or false");
     }
 
-
     String strictAffixParsingParam = args.get(PARAM_STRICT_AFFIX_PARSING);
     boolean strictAffixParsing = true;
     if(strictAffixParsingParam != null) {
@@ -85,14 +85,22 @@ public class HunspellStemFilterFactory e
       else throw new InitializationException("Unknown value for " + PARAM_STRICT_AFFIX_PARSING + ": " + strictAffixParsingParam + ". Must be true or false");
     }
 
+    InputStream affix = null;
+    List<InputStream> dictionaries = new ArrayList<InputStream>();
+
     try {
-      List<InputStream> dictionaries = new ArrayList<InputStream>();
+      dictionaries = new ArrayList<InputStream>();
       for (String file : dictionaryFiles) {
         dictionaries.add(loader.openResource(file));
       }
-      this.dictionary = new HunspellDictionary(loader.openResource(affixFile), dictionaries, luceneMatchVersion, ignoreCase, strictAffixParsing);
+      affix = loader.openResource(affixFile);
+
+      this.dictionary = new HunspellDictionary(affix, dictionaries, luceneMatchVersion, ignoreCase, strictAffixParsing);
     } catch (Exception e) {
       throw new InitializationException("Unable to load hunspell data! [dictionary=" + args.get("dictionary") + ",affix=" + affixFile + "]", e);
+    } finally {
+      IOUtils.closeWhileHandlingException(affix);
+      IOUtils.closeWhileHandlingException(dictionaries);
     }
   }
 

Modified: lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/analysis/JapaneseIterationMarkCharFilterFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/analysis/JapaneseIterationMarkCharFilterFactory.java?rev=1363608&r1=1363607&r2=1363608&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/analysis/JapaneseIterationMarkCharFilterFactory.java (original)
+++ lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/analysis/JapaneseIterationMarkCharFilterFactory.java Fri Jul 20 01:01:39 2012
@@ -17,12 +17,13 @@ package org.apache.solr.analysis;
  * limitations under the License.
  */
 
-import org.apache.lucene.analysis.CharStream;
+import org.apache.lucene.analysis.CharFilter;
 import org.apache.lucene.analysis.ja.JapaneseIterationMarkCharFilter;
 import org.apache.lucene.analysis.util.AbstractAnalysisFactory;
 import org.apache.lucene.analysis.util.CharFilterFactory;
 import org.apache.lucene.analysis.util.MultiTermAwareComponent;
 
+import java.io.Reader;
 import java.util.Map;
 
 /**
@@ -46,7 +47,7 @@ public class JapaneseIterationMarkCharFi
   private boolean normalizeKana = true;
 
   @Override
-  public CharStream create(CharStream input) {
+  public CharFilter create(Reader input) {
     return new JapaneseIterationMarkCharFilter(input, normalizeKanji, normalizeKana);
   }
 

Modified: lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/analysis/JapaneseTokenizerFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/analysis/JapaneseTokenizerFactory.java?rev=1363608&r1=1363607&r2=1363608&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/analysis/JapaneseTokenizerFactory.java (original)
+++ lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/analysis/JapaneseTokenizerFactory.java Fri Jul 20 01:01:39 2012
@@ -42,9 +42,10 @@ import org.apache.lucene.analysis.util.R
  * &lt;fieldType name="text_ja" class="solr.TextField"&gt;
  *   &lt;analyzer&gt;
  *     &lt;tokenizer class="solr.JapaneseTokenizerFactory"
- *       mode=NORMAL
- *       userDictionary=user.txt
- *       userDictionaryEncoding=UTF-8
+ *       mode="NORMAL"
+ *       userDictionary="user.txt"
+ *       userDictionaryEncoding="UTF-8"
+ *       discardPunctuation="true"
  *     /&gt;
  *     &lt;filter class="solr.JapaneseBaseFormFilterFactory"/&gt;
  *   &lt;/analyzer&gt;
@@ -58,9 +59,14 @@ public class JapaneseTokenizerFactory ex
   
   private static final String USER_DICT_ENCODING = "userDictionaryEncoding";
 
+  private static final String DISCARD_PUNCTUATION = "discardPunctuation"; // Expert option
+
   private UserDictionary userDictionary;
+
   private Mode mode;
-  
+
+  private boolean discardPunctuation;
+
   @Override
   public void inform(ResourceLoader loader) {
     mode = getMode(args);
@@ -83,11 +89,12 @@ public class JapaneseTokenizerFactory ex
     } catch (Exception e) {
       throw new InitializationException("Exception thrown while loading dictionary", e);
     }
+    discardPunctuation = getBoolean(DISCARD_PUNCTUATION, true);
   }
   
   @Override
   public Tokenizer create(Reader input) {
-    return new JapaneseTokenizer(input, userDictionary, true, mode);
+    return new JapaneseTokenizer(input, userDictionary, discardPunctuation, mode);
   }
   
   private Mode getMode(Map<String, String> args) {

Modified: lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/analysis/LegacyHTMLStripCharFilter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/analysis/LegacyHTMLStripCharFilter.java?rev=1363608&r1=1363607&r2=1363608&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/analysis/LegacyHTMLStripCharFilter.java (original)
+++ lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/analysis/LegacyHTMLStripCharFilter.java Fri Jul 20 01:01:39 2012
@@ -26,8 +26,6 @@ import java.util.HashMap;
 import java.util.Set;
 
 import org.apache.lucene.analysis.charfilter.BaseCharFilter;
-import org.apache.lucene.analysis.CharReader;
-import org.apache.lucene.analysis.CharStream;
 
 /**
  * <p>
@@ -72,21 +70,21 @@ public class LegacyHTMLStripCharFilter e
 
   public static void main(String[] args) throws IOException {
     Reader in = new LegacyHTMLStripCharFilter(
-            CharReader.get(new InputStreamReader(System.in, Charset.defaultCharset())));
+            new InputStreamReader(System.in, Charset.defaultCharset()));
     int ch;
     while ( (ch=in.read()) != -1 ) System.out.print((char)ch);
   }
 
-  public LegacyHTMLStripCharFilter(CharStream source) {
-    super(source.markSupported() ? source : CharReader.get(new BufferedReader(source)));
+  public LegacyHTMLStripCharFilter(Reader source) {
+    super(source.markSupported() ? source : new BufferedReader(source));
   }
 
-  public LegacyHTMLStripCharFilter(CharStream source, Set<String> escapedTags){
+  public LegacyHTMLStripCharFilter(Reader source, Set<String> escapedTags){
     this(source);
     this.escapedTags = escapedTags;
   }
 
-  public LegacyHTMLStripCharFilter(CharStream source, Set<String> escapedTags, int readAheadLimit){
+  public LegacyHTMLStripCharFilter(Reader source, Set<String> escapedTags, int readAheadLimit){
     this(source);
     this.escapedTags = escapedTags;
     this.readAheadLimit = readAheadLimit;
@@ -105,7 +103,7 @@ public class LegacyHTMLStripCharFilter e
       return ch;
     }
     numRead++;
-    return input.read();
+    return in.read();
   }
 
   private int nextSkipWS() throws IOException {
@@ -120,7 +118,7 @@ public class LegacyHTMLStripCharFilter e
       return pushed.charAt(len-1);
     }
     numRead++;
-    int ch = input.read();
+    int ch = in.read();
     push(ch);
     return ch;
   }
@@ -182,11 +180,11 @@ public class LegacyHTMLStripCharFilter e
 
   private void saveState() throws IOException {
     lastMark = numRead;
-    input.mark(readAheadLimit);
+    in.mark(readAheadLimit);
   }
 
   private void restoreState() throws IOException {
-    input.reset();
+    in.reset();
     pushed.setLength(0);
   }
 
@@ -775,12 +773,6 @@ public class LegacyHTMLStripCharFilter e
     return i;
   }
 
-  @Override
-  public void close() throws IOException {
-    input.close();
-  }
-
-
   private static final HashMap<String,Character> entityTable;
   static {
     entityTable = new HashMap<String,Character>();

Modified: lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/analysis/LegacyHTMLStripCharFilterFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/analysis/LegacyHTMLStripCharFilterFactory.java?rev=1363608&r1=1363607&r2=1363608&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/analysis/LegacyHTMLStripCharFilterFactory.java (original)
+++ lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/analysis/LegacyHTMLStripCharFilterFactory.java Fri Jul 20 01:01:39 2012
@@ -18,7 +18,8 @@ package org.apache.solr.analysis;
  * limitations under the License.
  */
 
-import org.apache.lucene.analysis.CharStream;
+import java.io.Reader;
+
 import org.apache.lucene.analysis.util.CharFilterFactory;
 
 /**
@@ -52,7 +53,7 @@ import org.apache.lucene.analysis.util.C
 @Deprecated
 public class LegacyHTMLStripCharFilterFactory extends CharFilterFactory {
 
-  public LegacyHTMLStripCharFilter create(CharStream input) {
+  public LegacyHTMLStripCharFilter create(Reader input) {
     return new LegacyHTMLStripCharFilter(input);
   }
 

Modified: lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/analysis/MappingCharFilterFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/analysis/MappingCharFilterFactory.java?rev=1363608&r1=1363607&r2=1363608&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/analysis/MappingCharFilterFactory.java (original)
+++ lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/analysis/MappingCharFilterFactory.java Fri Jul 20 01:01:39 2012
@@ -19,12 +19,13 @@ package org.apache.solr.analysis;
 
 import java.io.File;
 import java.io.IOException;
+import java.io.Reader;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
-import org.apache.lucene.analysis.CharStream;
+import org.apache.lucene.analysis.CharFilter;
 import org.apache.lucene.analysis.charfilter.MappingCharFilter;
 import org.apache.lucene.analysis.charfilter.NormalizeCharMap;
 import org.apache.lucene.analysis.util.*;
@@ -78,7 +79,7 @@ public class MappingCharFilterFactory ex
     }
   }
 
-  public CharStream create(CharStream input) {
+  public CharFilter create(Reader input) {
     return new MappingCharFilter(normMap,input);
   }
 

Modified: lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/analysis/PatternReplaceCharFilterFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/analysis/PatternReplaceCharFilterFactory.java?rev=1363608&r1=1363607&r2=1363608&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/analysis/PatternReplaceCharFilterFactory.java (original)
+++ lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/analysis/PatternReplaceCharFilterFactory.java Fri Jul 20 01:01:39 2012
@@ -17,10 +17,11 @@
 
 package org.apache.solr.analysis;
 
+import java.io.Reader;
 import java.util.Map;
 import java.util.regex.Pattern;
 
-import org.apache.lucene.analysis.CharStream;
+import org.apache.lucene.analysis.CharFilter;
 import org.apache.lucene.analysis.pattern.PatternReplaceCharFilter;
 import org.apache.lucene.analysis.util.CharFilterFactory;
 
@@ -53,7 +54,7 @@ public class PatternReplaceCharFilterFac
     // TODO: throw exception if you set maxBlockChars or blockDelimiters ?
   }
 
-  public CharStream create(CharStream input) {
+  public CharFilter create(Reader input) {
     return new PatternReplaceCharFilter( p, replacement, input );
   }
 }

Modified: lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/analysis/PersianCharFilterFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/analysis/PersianCharFilterFactory.java?rev=1363608&r1=1363607&r2=1363608&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/analysis/PersianCharFilterFactory.java (original)
+++ lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/analysis/PersianCharFilterFactory.java Fri Jul 20 01:01:39 2012
@@ -17,7 +17,9 @@ package org.apache.solr.analysis;
  * limitations under the License.
  */
 
-import org.apache.lucene.analysis.CharStream;
+import java.io.Reader;
+
+import org.apache.lucene.analysis.CharFilter;
 import org.apache.lucene.analysis.fa.PersianCharFilter;
 import org.apache.lucene.analysis.util.AbstractAnalysisFactory;
 import org.apache.lucene.analysis.util.CharFilterFactory;
@@ -37,7 +39,7 @@ import org.apache.lucene.analysis.util.M
 public class PersianCharFilterFactory extends CharFilterFactory implements MultiTermAwareComponent {
 
   @Override
-  public CharStream create(CharStream input) {
+  public CharFilter create(Reader input) {
     return new PersianCharFilter(input);
   }
 

Modified: lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/analysis/TokenizerChain.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/analysis/TokenizerChain.java?rev=1363608&r1=1363607&r2=1363608&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/analysis/TokenizerChain.java (original)
+++ lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/analysis/TokenizerChain.java Fri Jul 20 01:01:39 2012
@@ -50,7 +50,7 @@ public final class TokenizerChain extend
   @Override
   public Reader initReader(String fieldName, Reader reader) {
     if (charFilters != null && charFilters.length > 0) {
-      CharStream cs = CharReader.get( reader );
+      Reader cs = reader;
       for (CharFilterFactory charFilter : charFilters) {
         cs = charFilter.create(cs);
       }

Modified: lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/analysis/TrieTokenizerFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/analysis/TrieTokenizerFactory.java?rev=1363608&r1=1363607&r2=1363608&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/analysis/TrieTokenizerFactory.java (original)
+++ lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/analysis/TrieTokenizerFactory.java Fri Jul 20 01:01:39 2012
@@ -73,13 +73,13 @@ final class TrieTokenizer extends Tokeni
     this.precisionStep = precisionStep;
     this.ts = ts;
 
-    reset(input);
+    setReader(input);
   }
 
   @Override
-  public void reset(Reader input) {
+  public void setReader(Reader input) {
    try {
-      super.reset(input);
+      super.setReader(input);
       input = super.input;
       char[] buf = new char[32];
       int len = input.read(buf);

Modified: lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionProcessor.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionProcessor.java?rev=1363608&r1=1363607&r2=1363608&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionProcessor.java (original)
+++ lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionProcessor.java Fri Jul 20 01:01:39 2012
@@ -45,8 +45,12 @@ public class OverseerCollectionProcessor
 
   public static final String CREATECOLLECTION = "createcollection";
 
+  public static final String RELOADCOLLECTION = "reloadcollection";
+  
   // TODO: use from Overseer?
   private static final String QUEUE_OPERATION = "operation";
+
+
   
   private static Logger log = LoggerFactory
       .getLogger(OverseerCollectionProcessor.class);
@@ -124,77 +128,19 @@ public class OverseerCollectionProcessor
     if (CREATECOLLECTION.equals(operation)) {
       return createCollection(zkStateReader.getCloudState(), message);
     } else if (DELETECOLLECTION.equals(operation)) {
-      return deleteCollection(zkStateReader.getCloudState(), message);
+      ModifiableSolrParams params = new ModifiableSolrParams();
+      params.set(CoreAdminParams.ACTION, CoreAdminAction.UNLOAD.toString());
+      params.set(CoreAdminParams.DELETE_INSTANCE_DIR, true);
+      return collectionCmd(zkStateReader.getCloudState(), message, params);
+    } else if (RELOADCOLLECTION.equals(operation)) {
+      ModifiableSolrParams params = new ModifiableSolrParams();
+      params.set(CoreAdminParams.ACTION, CoreAdminAction.RELOAD.toString());
+      return collectionCmd(zkStateReader.getCloudState(), message, params);
     }
     // unknown command, toss it from our queue
     return true;
   }
-  
-  private boolean deleteCollection(CloudState cloudState, ZkNodeProps message) {
-    
-    String name = message.get("name");
-    
-    ModifiableSolrParams params = new ModifiableSolrParams();
-    params.set(CoreAdminParams.ACTION, CoreAdminAction.UNLOAD.toString());
-    
-    Map<String,Slice> slices = cloudState.getCollectionStates().get(name);
-    
-    if (slices == null) {
-      throw new SolrException(ErrorCode.BAD_REQUEST, "Could not find collection:" + name);
-    }
-    
-    for (Map.Entry<String,Slice> entry : slices.entrySet()) {
-      Slice slice = entry.getValue();
-      Map<String,ZkNodeProps> shards = slice.getShards();
-      Set<Map.Entry<String,ZkNodeProps>> shardEntries = shards.entrySet();
-      for (Map.Entry<String,ZkNodeProps> shardEntry : shardEntries) {
-        final ZkNodeProps node = shardEntry.getValue();
-        if (cloudState.liveNodesContain(node.get(ZkStateReader.NODE_NAME_PROP))) {
-          params.set(CoreAdminParams.CORE, name);
-          params.set(CoreAdminParams.DELETE_INSTANCE_DIR, true);
 
-          String replica = node.get(ZkStateReader.BASE_URL_PROP);
-          ShardRequest sreq = new ShardRequest();
-          // yes, they must use same admin handler path everywhere...
-          params.set("qt", adminPath);
-
-          sreq.purpose = 1;
-          // TODO: this sucks
-          if (replica.startsWith("http://")) replica = replica.substring(7);
-          sreq.shards = new String[] {replica};
-          sreq.actualShards = sreq.shards;
-          sreq.params = params;
-          
-          shardHandler.submit(sreq, replica, sreq.params);
-        }
-      }
-    }
-    
-    int failed = 0;
-    ShardResponse srsp;
-    do {
-      srsp = shardHandler.takeCompletedOrError();
-      if (srsp != null) {
-        Throwable e = srsp.getException();
-        if (e != null) {
-          // should we retry?
-          // TODO: we should return errors to the client
-          // TODO: what if one fails and others succeed?
-          failed++;
-          log.error("Error talking to shard: " + srsp.getShard(), e);
-        }
-      }
-    } while (srsp != null);
-
-    
-    // if all calls succeeded, return true
-    if (failed > 0) {
-      return false;
-    }
-    return true;
-  }
-
-  // TODO: bad name conflict with another method
   private boolean createCollection(CloudState cloudState, ZkNodeProps message) {
     
     // look at the replication factor and see if it matches reality
@@ -236,10 +182,13 @@ public class OverseerCollectionProcessor
     Collections.shuffle(nodeList);
     
     int numNodes = numShards * (numReplicas + 1);
-    List<String> createOnNodes = nodeList.subList(0, Math.min(nodeList.size() -1, numNodes - 1));
+    List<String> createOnNodes = nodeList.subList(0, Math.min(nodeList.size(), numNodes));
+    
+    log.info("Create collection " + name + " on " + createOnNodes);
     
     for (String replica : createOnNodes) {
       // TODO: this does not work if original url had _ in it
+      // We should have a master list
       replica = replica.replaceAll("_", "/");
       params.set(CoreAdminParams.NAME, name);
       params.set("collection.configName", configName);
@@ -279,4 +228,64 @@ public class OverseerCollectionProcessor
     }
     return true;
   }
+  
+  private boolean collectionCmd(CloudState cloudState, ZkNodeProps message, ModifiableSolrParams params) {
+    log.info("Executing Collection Cmd : " + params);
+    String name = message.get("name");
+    
+    Map<String,Slice> slices = cloudState.getCollectionStates().get(name);
+    
+    if (slices == null) {
+      throw new SolrException(ErrorCode.BAD_REQUEST, "Could not find collection:" + name);
+    }
+    
+    for (Map.Entry<String,Slice> entry : slices.entrySet()) {
+      Slice slice = entry.getValue();
+      Map<String,ZkNodeProps> shards = slice.getShards();
+      Set<Map.Entry<String,ZkNodeProps>> shardEntries = shards.entrySet();
+      for (Map.Entry<String,ZkNodeProps> shardEntry : shardEntries) {
+        final ZkNodeProps node = shardEntry.getValue();
+        if (cloudState.liveNodesContain(node.get(ZkStateReader.NODE_NAME_PROP))) {
+          params.set(CoreAdminParams.CORE, node.get(ZkStateReader.CORE_NAME_PROP));
+
+          String replica = node.get(ZkStateReader.BASE_URL_PROP);
+          ShardRequest sreq = new ShardRequest();
+          // yes, they must use same admin handler path everywhere...
+          params.set("qt", adminPath);
+
+          sreq.purpose = 1;
+          // TODO: this sucks
+          if (replica.startsWith("http://")) replica = replica.substring(7);
+          sreq.shards = new String[] {replica};
+          sreq.actualShards = sreq.shards;
+          sreq.params = params;
+          
+          shardHandler.submit(sreq, replica, sreq.params);
+        }
+      }
+    }
+    
+    int failed = 0;
+    ShardResponse srsp;
+    do {
+      srsp = shardHandler.takeCompletedOrError();
+      if (srsp != null) {
+        Throwable e = srsp.getException();
+        if (e != null) {
+          // should we retry?
+          // TODO: we should return errors to the client
+          // TODO: what if one fails and others succeed?
+          failed++;
+          log.error("Error talking to shard: " + srsp.getShard(), e);
+        }
+      }
+    } while (srsp != null);
+
+    
+    // if all calls succeeded, return true
+    if (failed > 0) {
+      return false;
+    }
+    return true;
+  }
 }

Modified: lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/cloud/ZkController.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/cloud/ZkController.java?rev=1363608&r1=1363607&r2=1363608&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/cloud/ZkController.java (original)
+++ lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/cloud/ZkController.java Fri Jul 20 01:01:39 2012
@@ -33,6 +33,7 @@ import java.util.regex.Pattern;
 
 import javax.xml.xpath.XPathConstants;
 
+import org.apache.commons.io.FileUtils;
 import org.apache.solr.client.solrj.impl.HttpSolrServer;
 import org.apache.solr.client.solrj.request.CoreAdminRequest.WaitForState;
 import org.apache.solr.common.SolrException;
@@ -50,8 +51,8 @@ import org.apache.solr.core.Config;
 import org.apache.solr.core.CoreContainer;
 import org.apache.solr.core.CoreDescriptor;
 import org.apache.solr.core.SolrCore;
-import org.apache.solr.core.SolrResourceLoader;
 import org.apache.solr.handler.component.HttpShardHandlerFactory;
+
 import org.apache.solr.handler.component.ShardHandler;
 import org.apache.solr.update.UpdateLog;
 import org.apache.solr.util.DOMUtil;
@@ -63,7 +64,6 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.w3c.dom.Node;
 import org.w3c.dom.NodeList;
-import org.xml.sax.InputSource;
 
 /**
  * Handle ZooKeeper interactions.
@@ -122,53 +122,6 @@ public final class ZkController {
   private CoreContainer cc;
 
   /**
-   * Bootstraps the current configs for all collections in solr.xml.
-   * Takes two params - the zkhost to connect to and the solrhome location
-   * to find solr.xml.
-   *
-   * If you also pass a solrPort, it will be used to start
-   * an embedded zk useful for single machine, multi node tests.
-   * 
-   * @param args
-   * @throws Exception
-   */
-  public static void main(String[] args) throws Exception {
-    // start up a tmp zk server first
-    String zkServerAddress = args[0];
-    
-    String solrHome = args[1];
-   
-    String solrPort = null;
-    if (args.length > 2) {
-      solrPort = args[2];
-    }
-    
-
-    SolrZkServer zkServer = null;
-    if (solrPort != null) {
-      zkServer = new SolrZkServer("true", null, solrHome + "/zoo_data", solrHome, solrPort);
-      zkServer.parseConfig();
-      zkServer.start();
-    }
-    
-    SolrZkClient zkClient = new SolrZkClient(zkServerAddress, 15000, 5000,
-        new OnReconnect() {
-          @Override
-          public void command() {
-          }});
-    
-    SolrResourceLoader loader = new SolrResourceLoader(solrHome);
-    solrHome = loader.getInstanceDir();
-    
-    InputSource cfgis = new InputSource(new File(solrHome, "solr.xml").toURI().toASCIIString());
-    Config cfg = new Config(loader, null, cfgis , null, false);
-    bootstrapConf(zkClient, cfg, solrHome);
-    if (solrPort != null) {
-      zkServer.stop();
-    }
-  }
-
-  /**
    * @param cc if null, recovery will not be enabled
    * @param zkServerAddress
    * @param zkClientTimeout
@@ -898,9 +851,7 @@ public final class ZkController {
           
           ZkNodeProps zkProps = new ZkNodeProps(collectionProps);
           zkClient.makePath(collectionPath, ZkStateReader.toJSON(zkProps), CreateMode.PERSISTENT, null, true);
-         
-          // ping that there is a new collection
-          zkClient.setData(ZkStateReader.COLLECTIONS_ZKNODE, (byte[])null, true);
+
         } catch (KeeperException e) {
           // its okay if the node already exists
           if (e.code() != KeeperException.Code.NODEEXISTS) {
@@ -1008,6 +959,24 @@ public final class ZkController {
     }
   }
   
+  public static void downloadFromZK(SolrZkClient zkClient, String zkPath,
+      File dir) throws IOException, KeeperException, InterruptedException {
+    List<String> files = zkClient.getChildren(zkPath, null, true);
+    
+    for (String file : files) {
+      List<String> children = zkClient.getChildren(zkPath + "/" + file, null, true);
+      if (children.size() == 0) {
+        byte[] data = zkClient.getData(zkPath + "/" + file, null, null, true);
+        dir.mkdirs(); 
+        log.info("Write file " + new File(dir, file));
+        FileUtils.writeStringToFile(new File(dir, file), new String(data, "UTF-8"), "UTF-8");
+      } else {
+        downloadFromZK(zkClient, zkPath + "/" + file, new File(dir, file));
+      }
+    }
+  }
+  
+  
   private String getCoreNodeName(CoreDescriptor descriptor){
     return getNodeName() + "_"
         + descriptor.getName();
@@ -1016,6 +985,10 @@ public final class ZkController {
   public static void uploadConfigDir(SolrZkClient zkClient, File dir, String configName) throws IOException, KeeperException, InterruptedException {
     uploadToZK(zkClient, dir, ZkController.CONFIGS_ZKNODE + "/" + configName);
   }
+  
+  public static void downloadConfigDir(SolrZkClient zkClient, String configName, File dir) throws IOException, KeeperException, InterruptedException {
+    downloadFromZK(zkClient, ZkController.CONFIGS_ZKNODE + "/" + configName, dir);
+  }
 
   public void preRegister(CoreDescriptor cd) throws KeeperException, InterruptedException {
     // before becoming available, make sure we are not live and active
@@ -1100,6 +1073,50 @@ public final class ZkController {
     return leaderProps;
   }
   
+  public static void linkConfSet(SolrZkClient zkClient, String collection, String confSetName) throws KeeperException, InterruptedException {
+    String path = ZkStateReader.COLLECTIONS_ZKNODE + "/" + collection;
+    if (log.isInfoEnabled()) {
+      log.info("Load collection config from:" + path);
+    }
+    byte[] data;
+    try {
+      data = zkClient.getData(path, null, null, true);
+    } catch (NoNodeException e) {
+      // if there is no node, we will try and create it
+      // first try to make in case we are pre configuring
+      ZkNodeProps props = new ZkNodeProps(CONFIGNAME_PROP, confSetName);
+      try {
+
+        zkClient.makePath(path, ZkStateReader.toJSON(props),
+            CreateMode.PERSISTENT, null, true);
+      } catch (KeeperException e2) {
+        // its okay if the node already exists
+        if (e2.code() != KeeperException.Code.NODEEXISTS) {
+          throw e;
+        }
+        // if we fail creating, setdata
+        // TODO: we should consider using version
+        zkClient.setData(path, ZkStateReader.toJSON(props), true);
+      }
+      return;
+    }
+    // we found existing data, let's update it
+    ZkNodeProps props = null;
+    if(data != null) {
+      props = ZkNodeProps.load(data);
+      Map<String,String> newProps = new HashMap<String,String>();
+      newProps.putAll(props.getProperties());
+      newProps.put(CONFIGNAME_PROP, confSetName);
+      props = new ZkNodeProps(newProps);
+    } else {
+      props = new ZkNodeProps(CONFIGNAME_PROP, confSetName);
+    }
+    
+    // TODO: we should consider using version
+    zkClient.setData(path, ZkStateReader.toJSON(props), true);
+
+  }
+  
   /**
    * If in SolrCloud mode, upload config sets for each SolrCore in solr.xml.
    * 
@@ -1114,18 +1131,20 @@ public final class ZkController {
 
     for (int i=0; i<nodes.getLength(); i++) {
       Node node = nodes.item(i);
-      String rawName = DOMUtil.getAttr(node, "name", null);
+      String rawName = DOMUtil.substituteProperty(DOMUtil.getAttr(node, "name", null), new Properties());
+
       String instanceDir = DOMUtil.getAttr(node, "instanceDir", null);
       File idir = new File(instanceDir);
       if (!idir.isAbsolute()) {
         idir = new File(solrHome, instanceDir);
       }
-      String confName = DOMUtil.getAttr(node, "collection", null);
+      String confName = DOMUtil.substituteProperty(DOMUtil.getAttr(node, "collection", null), new Properties());
       if (confName == null) {
         confName = rawName;
       }
-
-      ZkController.uploadConfigDir(zkClient, new File(idir, "conf"), confName);
+      File udir = new File(idir, "conf");
+      SolrException.log(log, "Uploading directory " + udir + " with name " + confName + " for SolrCore " + rawName);
+      ZkController.uploadConfigDir(zkClient, udir, confName);
     }
   }
 

Modified: lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/core/CachingDirectoryFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/core/CachingDirectoryFactory.java?rev=1363608&r1=1363607&r2=1363608&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/core/CachingDirectoryFactory.java (original)
+++ lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/core/CachingDirectoryFactory.java Fri Jul 20 01:01:39 2012
@@ -19,7 +19,9 @@ package org.apache.solr.core;
 
 import java.io.File;
 import java.io.IOException;
+import java.util.ArrayList;
 import java.util.HashMap;
+import java.util.List;
 import java.util.Locale;
 import java.util.Map;
 
@@ -54,6 +56,46 @@ public abstract class CachingDirectoryFa
   
   protected Map<Directory,CacheValue> byDirectoryCache = new HashMap<Directory,CacheValue>();
   
+  protected Map<Directory,List<CloseListener>> closeListeners = new HashMap<Directory,List<CloseListener>>();
+  
+  public interface CloseListener {
+    public void onClose();
+  }
+  
+  @Override
+  public void addCloseListener(Directory dir, CloseListener closeListener) {
+    synchronized (this) {
+      if (!byDirectoryCache.containsKey(dir)) {
+        throw new IllegalArgumentException("Unknown directory: " + dir
+            + " " + byDirectoryCache);
+      }
+      List<CloseListener> listeners = closeListeners.get(dir);
+      if (listeners == null) {
+        listeners = new ArrayList<CloseListener>();
+        closeListeners.put(dir, listeners);
+      }
+      listeners.add(closeListener);
+      
+      closeListeners.put(dir, listeners);
+    }
+  }
+  
+  @Override
+  public void doneWithDirectory(Directory directory) throws IOException {
+    synchronized (this) {
+      CacheValue cacheValue = byDirectoryCache.get(directory);
+      if (cacheValue == null) {
+        throw new IllegalArgumentException("Unknown directory: " + directory
+            + " " + byDirectoryCache);
+      }
+      cacheValue.doneWithDir = true;
+      if (cacheValue.refCnt == 0) {
+        cacheValue.refCnt++; // this will go back to 0 in close
+        close(directory);
+      }
+    }
+  }
+  
   /*
    * (non-Javadoc)
    * 
@@ -82,6 +124,13 @@ public abstract class CachingDirectoryFa
         directory.close();
         byDirectoryCache.remove(directory);
         byPathCache.remove(cacheValue.path);
+        List<CloseListener> listeners = closeListeners.remove(directory);
+        if (listeners != null) {
+          for (CloseListener listener : listeners) {
+            listener.onClose();
+          }
+          closeListeners.remove(directory);
+        }
       }
     }
   }

Modified: lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/core/CoreContainer.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/core/CoreContainer.java?rev=1363608&r1=1363607&r2=1363608&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/core/CoreContainer.java (original)
+++ lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/core/CoreContainer.java Fri Jul 20 01:01:39 2012
@@ -90,12 +90,24 @@ public class CoreContainer 
 {
   private static final String DEFAULT_HOST_CONTEXT = "solr";
   private static final String DEFAULT_HOST_PORT = "8983";
-  private static final int DEFAULT_ZK_CLIENT_TIMEOUT = 10000;
+  private static final int DEFAULT_ZK_CLIENT_TIMEOUT = 15000;
   public static final String DEFAULT_DEFAULT_CORE_NAME = "collection1";
   private static final boolean DEFAULT_SHARE_SCHEMA = false;
   
   protected static Logger log = LoggerFactory.getLogger(CoreContainer.class);
   
+  // solr.xml node constants
+  private static final String CORE_NAME = "name";
+  private static final String CORE_CONFIG = "config";
+  private static final String CORE_INSTDIR = "instanceDir";
+  private static final String CORE_DATADIR = "dataDir";
+  private static final String CORE_SCHEMA = "schema";
+  private static final String CORE_SHARD = "shard";
+  private static final String CORE_COLLECTION = "collection";
+  private static final String CORE_ROLES = "roles";
+  private static final String CORE_PROPERTIES = "properties";
+
+
   protected final Map<String, SolrCore> cores = new LinkedHashMap<String, SolrCore>();
   protected boolean persistent = false;
   protected String adminPath = null;
@@ -477,43 +489,43 @@ public class CoreContainer 
     for (int i=0; i<nodes.getLength(); i++) {
       Node node = nodes.item(i);
       try {
-        String rawName = DOMUtil.getAttr(node, "name", null);
+        String rawName = DOMUtil.getAttr(node, CORE_NAME, null);
         if (null == rawName) {
           throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
                                   "Each core in solr.xml must have a 'name'");
         }
         String name = rawName;
-        CoreDescriptor p = new CoreDescriptor(this, name, DOMUtil.getAttr(node, "instanceDir", null));
+        CoreDescriptor p = new CoreDescriptor(this, name, DOMUtil.getAttr(node, CORE_INSTDIR, null));
 
         // deal with optional settings
-        String opt = DOMUtil.getAttr(node, "config", null);
+        String opt = DOMUtil.getAttr(node, CORE_CONFIG, null);
 
         if (opt != null) {
           p.setConfigName(opt);
         }
-        opt = DOMUtil.getAttr(node, "schema", null);
+        opt = DOMUtil.getAttr(node, CORE_SCHEMA, null);
         if (opt != null) {
           p.setSchemaName(opt);
         }
         if (zkController != null) {
-          opt = DOMUtil.getAttr(node, "shard", null);
+          opt = DOMUtil.getAttr(node, CORE_SHARD, null);
           if (opt != null && opt.length() > 0) {
             p.getCloudDescriptor().setShardId(opt);
           }
-          opt = DOMUtil.getAttr(node, "collection", null);
+          opt = DOMUtil.getAttr(node, CORE_COLLECTION, null);
           if (opt != null) {
             p.getCloudDescriptor().setCollectionName(opt);
           }
-          opt = DOMUtil.getAttr(node, "roles", null);
+          opt = DOMUtil.getAttr(node, CORE_ROLES, null);
           if(opt != null){
         	  p.getCloudDescriptor().setRoles(opt);
           }
         }
-        opt = DOMUtil.getAttr(node, "properties", null);
+        opt = DOMUtil.getAttr(node, CORE_PROPERTIES, null);
         if (opt != null) {
           p.setPropertiesName(opt);
         }
-        opt = DOMUtil.getAttr(node, CoreAdminParams.DATA_DIR, null);
+        opt = DOMUtil.getAttr(node, CORE_DATADIR, null);
         if (opt != null) {
           p.setDataDir(opt);
         }
@@ -565,6 +577,8 @@ public class CoreContainer 
    */
   public void shutdown() {
     log.info("Shutting down CoreContainer instance="+System.identityHashCode(this));
+    isShutDown = true;
+    
     if (isZooKeeperAware()) {
       cancelCoreRecoveries();
     }
@@ -589,7 +603,6 @@ public class CoreContainer 
         if (shardHandlerFactory != null) {
           shardHandlerFactory.close();
         }
-        isShutDown = true;
       }
     }
   }
@@ -1153,7 +1166,7 @@ public class CoreContainer 
           for (int i = 0; i < nodes.getLength(); i++) {
             Node node = nodes.item(i);
             
-            String name = DOMUtil.getAttr(node, "name", null);
+            String name = DOMUtil.getAttr(node, CORE_NAME, null);
             if (origCoreName.equals(name)) {
               coreNode = node;
               if (coreName.equals(origCoreName)) {
@@ -1167,7 +1180,7 @@ public class CoreContainer 
             // see if we match with substitution
             for (int i = 0; i < nodes.getLength(); i++) {
               Node node = nodes.item(i);
-              String name = DOMUtil.getAttr(node, "name", null);
+              String name = DOMUtil.getAttr(node, CORE_NAME, null);
               if (origCoreName.equals(DOMUtil.substituteProperty(name,
                   loader.getCoreProperties()))) {
                 coreNode = node;
@@ -1180,28 +1193,28 @@ public class CoreContainer 
           }
         }
 
-        coreAttribs.put("name", coreName);
+        coreAttribs.put(CORE_NAME, coreName);
         
         String instanceDir = dcore.getInstanceDir();
-        addCoreProperty(coreAttribs, coreNode, "instanceDir", instanceDir, null);
+        addCoreProperty(coreAttribs, coreNode, CORE_INSTDIR, instanceDir, null);
         
         // write config 
         String configName = dcore.getConfigName();
-        addCoreProperty(coreAttribs, coreNode, "conf", configName, dcore.getDefaultConfigName());
+        addCoreProperty(coreAttribs, coreNode, CORE_CONFIG, configName, dcore.getDefaultConfigName());
         
         // write schema
         String schema = dcore.getSchemaName();
-        addCoreProperty(coreAttribs, coreNode, "schema", schema, dcore.getDefaultSchemaName());
+        addCoreProperty(coreAttribs, coreNode, CORE_SCHEMA, schema, dcore.getDefaultSchemaName());
         
         String dataDir = dcore.dataDir;
-        addCoreProperty(coreAttribs, coreNode, "dataDir", dataDir, null);
+        addCoreProperty(coreAttribs, coreNode, CORE_DATADIR, dataDir, null);
         
         CloudDescriptor cd = dcore.getCloudDescriptor();
         String shard = null;
         if (cd != null) {
           shard = cd.getShardId();
         }
-        addCoreProperty(coreAttribs, coreNode, "shard", shard, null);
+        addCoreProperty(coreAttribs, coreNode, CORE_SHARD, shard, null);
         
         String collection = null;
         // only write out the collection name if it's not the default (the
@@ -1211,12 +1224,12 @@ public class CoreContainer 
           collection = cd.getCollectionName();
         }
         
-        addCoreProperty(coreAttribs, coreNode, "collection", collection, dcore.name);
+        addCoreProperty(coreAttribs, coreNode, CORE_COLLECTION, collection, dcore.name);
         
         // we don't try and preserve sys prop defs in these
         String opt = dcore.getPropertiesName();
         if (opt != null) {
-          coreAttribs.put("properties", opt);
+          coreAttribs.put(CORE_PROPERTIES, opt);
         }
         
         SolrCoreXMLDef solrCoreXMLDef = new SolrCoreXMLDef();

Modified: lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/core/DirectoryFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/core/DirectoryFactory.java?rev=1363608&r1=1363607&r2=1363608&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/core/DirectoryFactory.java (original)
+++ lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/core/DirectoryFactory.java Fri Jul 20 01:01:39 2012
@@ -21,6 +21,7 @@ import java.io.Closeable;
 import java.io.IOException;
 
 import org.apache.lucene.store.Directory;
+import org.apache.solr.core.CachingDirectoryFactory.CloseListener;
 import org.apache.solr.util.plugin.NamedListInitializedPlugin;
 
 /**
@@ -31,6 +32,24 @@ public abstract class DirectoryFactory i
     Closeable {
   
   /**
+   * Indicates a Directory will no longer be used, and when it's ref count
+   * hits 0, it can be closed. On shutdown all directories will be closed
+   * with this has been called or not. This is simply to allow early cleanup.
+   * 
+   * @param directory
+   * @throws IOException 
+   */
+  public abstract void doneWithDirectory(Directory directory) throws IOException;
+  
+  /**
+   * Adds a close listener for a Directory.
+   * 
+   * @param dir
+   * @param closeListener
+   */
+  public abstract void addCloseListener(Directory dir, CloseListener closeListener);
+  
+  /**
    * Close the this and all of the Directories it contains.
    * 
    * @throws IOException
@@ -62,7 +81,9 @@ public abstract class DirectoryFactory i
   /**
    * Returns the Directory for a given path, using the specified rawLockType.
    * Will return the same Directory instance for the same path unless forceNew,
-   * in which case a new Directory is returned.
+   * in which case a new Directory is returned. There is no need to call
+   * {@link #doneWithDirectory(Directory)} in this case - the old Directory
+   * will be closed when it's ref count hits 0.
    * 
    * @throws IOException
    */

Modified: lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/core/SolrCore.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/core/SolrCore.java?rev=1363608&r1=1363607&r2=1363608&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/core/SolrCore.java (original)
+++ lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/core/SolrCore.java Fri Jul 20 01:01:39 2012
@@ -664,7 +664,8 @@ public final class SolrCore implements S
       latch.countDown();//release the latch, otherwise we block trying to do the close.  This should be fine, since counting down on a latch of 0 is still fine
       //close down the searcher and any other resources, if it exists, as this is not recoverable
       close();
-      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, null, e);
+      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, 
+                              e.getMessage(), e);
     } finally {
       // allow firstSearcher events to fire and make sure it is released
       latch.countDown();
@@ -1179,8 +1180,12 @@ public final class SolrCore implements S
 
         if (updateHandlerReopens) {
           // SolrCore.verbose("start reopen from",previousSearcher,"writer=",writer);
-          IndexWriter writer = getUpdateHandler().getSolrCoreState().getIndexWriter(this);
-          newReader = DirectoryReader.openIfChanged(currentReader, writer, true);
+          RefCounted<IndexWriter> writer = getUpdateHandler().getSolrCoreState().getIndexWriter(this);
+          try {
+            newReader = DirectoryReader.openIfChanged(currentReader, writer.get(), true);
+          } finally {
+            writer.decref();
+          }
 
         } else {
           // verbose("start reopen without writer, reader=", currentReader);

Modified: lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/handler/AnalysisRequestHandlerBase.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/handler/AnalysisRequestHandlerBase.java?rev=1363608&r1=1363607&r2=1363608&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/handler/AnalysisRequestHandlerBase.java (original)
+++ lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/handler/AnalysisRequestHandlerBase.java Fri Jul 20 01:01:39 2012
@@ -18,8 +18,6 @@
 package org.apache.solr.handler;
 
 import org.apache.lucene.analysis.Analyzer;
-import org.apache.lucene.analysis.CharReader;
-import org.apache.lucene.analysis.CharStream;
 import org.apache.lucene.analysis.TokenStream;
 import org.apache.lucene.analysis.tokenattributes.*;
 import org.apache.lucene.analysis.util.CharFilterFactory;
@@ -41,6 +39,7 @@ import org.apache.solr.response.SolrQuer
 import org.apache.solr.schema.FieldType;
 
 import java.io.IOException;
+import java.io.Reader;
 import java.io.StringReader;
 import java.util.*;
 import org.apache.commons.lang.ArrayUtils;
@@ -106,7 +105,7 @@ public abstract class AnalysisRequestHan
     if( cfiltfacs != null ){
       String source = value;
       for(CharFilterFactory cfiltfac : cfiltfacs ){
-        CharStream reader = CharReader.get(new StringReader(source));
+        Reader reader = new StringReader(source);
         reader = cfiltfac.create(reader);
         source = writeCharStream(namedList, reader);
       }
@@ -287,7 +286,7 @@ public abstract class AnalysisRequestHan
     return tokensNamedLists;
   }
   
-  private String writeCharStream(NamedList<Object> out, CharStream input ){
+  private String writeCharStream(NamedList<Object> out, Reader input ){
     final int BUFFER_SIZE = 1024;
     char[] buf = new char[BUFFER_SIZE];
     int len = 0;

Modified: lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/handler/SnapPuller.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/handler/SnapPuller.java?rev=1363608&r1=1363607&r2=1363608&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/handler/SnapPuller.java (original)
+++ lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/handler/SnapPuller.java Fri Jul 20 01:01:39 2012
@@ -19,6 +19,8 @@ package org.apache.solr.handler;
 import org.apache.commons.io.IOUtils;
 import org.apache.http.client.HttpClient;
 import org.apache.lucene.index.IndexCommit;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.store.Directory;
 import org.apache.solr.client.solrj.SolrServer;
 import org.apache.solr.client.solrj.SolrServerException;
 import org.apache.solr.client.solrj.impl.HttpClientUtil;
@@ -31,6 +33,7 @@ import org.apache.solr.common.params.Sol
 import org.apache.solr.common.util.FastInputStream;
 import org.apache.solr.util.FileUtils;
 import org.apache.solr.common.util.NamedList;
+import org.apache.solr.core.CachingDirectoryFactory.CloseListener;
 import org.apache.solr.core.SolrCore;
 import org.apache.solr.core.IndexDeletionPolicyWrapper;
 import static org.apache.solr.handler.ReplicationHandler.*;
@@ -276,7 +279,12 @@ public class SnapPuller {
         if (force && commit.getGeneration() != 0) {
           // since we won't get the files for an empty index,
           // we just clear ours and commit
-          core.getUpdateHandler().getSolrCoreState().getIndexWriter(core).deleteAll();
+          RefCounted<IndexWriter> iw = core.getUpdateHandler().getSolrCoreState().getIndexWriter(core);
+          try {
+            iw.get().deleteAll();
+          } finally {
+            iw.decref();
+          }
           SolrQueryRequest req = new LocalSolrQueryRequest(core,
               new ModifiableSolrParams());
           core.getUpdateHandler().commit(new CommitUpdateCommand(req, false));
@@ -315,9 +323,10 @@ public class SnapPuller {
       LOG.info("Starting download to " + tmpIndexDir + " fullCopy=" + isFullCopyNeeded);
       successfulInstall = false;
       boolean deleteTmpIdxDir = true;
-      File indexDir = null ;
+
+      final File indexDir = new File(core.getIndexDir());
+      Directory oldDirectory = null;
       try {
-        indexDir = new File(core.getIndexDir());
         downloadIndexFiles(isFullCopyNeeded, tmpIndexDir, latestGeneration);
         LOG.info("Total time taken for download : " + ((System.currentTimeMillis() - replicationStartTime) / 1000) + " secs");
         Collection<Map<String, Object>> modifiedConfFiles = getModifiedConfFiles(confFilesToDownload);
@@ -339,14 +348,42 @@ public class SnapPuller {
           if (isFullCopyNeeded) {
             successfulInstall = modifyIndexProps(tmpIndexDir.getName());
             deleteTmpIdxDir =  false;
+            RefCounted<IndexWriter> iw = core.getUpdateHandler().getSolrCoreState().getIndexWriter(core);
+            try {
+               oldDirectory = iw.get().getDirectory();
+            } finally {
+              iw.decref();
+            }
           } else {
             successfulInstall = copyIndexFiles(tmpIndexDir, indexDir);
           }
           if (successfulInstall) {
             logReplicationTimeAndConfFiles(modifiedConfFiles, successfulInstall);
-            doCommit();
           }
         }
+        
+        if (isFullCopyNeeded) {
+          // we have to do this before commit
+          core.getDirectoryFactory().addCloseListener(oldDirectory, new CloseListener(){
+
+            @Override
+            public void onClose() {
+              LOG.info("removing old index directory " + indexDir);
+              delTree(indexDir);
+            }
+            
+          });
+        }
+        
+        if (successfulInstall) {
+          if (isFullCopyNeeded) {
+            // let the system know we are changing dir's and the old one
+            // may be closed
+            core.getDirectoryFactory().doneWithDirectory(oldDirectory);
+          }
+          doCommit();
+        }
+        
         replicationStartTime = 0;
         return successfulInstall;
       } catch (ReplicationHandlerException e) {
@@ -362,10 +399,7 @@ public class SnapPuller {
         if (deleteTmpIdxDir) {
           LOG.info("removing temporary index download directory " + tmpIndexDir);
           delTree(tmpIndexDir);
-        } else {
-          LOG.info("removing old index directory " + indexDir);
-          delTree(indexDir);
-        }
+        } 
       }
     } finally {
       if (!successfulInstall) {