You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by sa...@apache.org on 2013/05/22 17:51:12 UTC
svn commit: r1485259 [4/5] - in /lucene/dev/branches/lucene4956: ./
dev-tools/ dev-tools/idea/.idea/ dev-tools/idea/.idea/libraries/
dev-tools/idea/lucene/replicator/ dev-tools/maven/ dev-tools/maven/lucene/
dev-tools/maven/lucene/replicator/ dev-tools...
Modified: lucene/dev/branches/lucene4956/lucene/test-framework/src/java/org/apache/lucene/store/MockDirectoryWrapper.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4956/lucene/test-framework/src/java/org/apache/lucene/store/MockDirectoryWrapper.java?rev=1485259&r1=1485258&r2=1485259&view=diff
==============================================================================
--- lucene/dev/branches/lucene4956/lucene/test-framework/src/java/org/apache/lucene/store/MockDirectoryWrapper.java (original)
+++ lucene/dev/branches/lucene4956/lucene/test-framework/src/java/org/apache/lucene/store/MockDirectoryWrapper.java Wed May 22 15:51:08 2013
@@ -60,7 +60,6 @@ import org.apache.lucene.util._TestUtil;
* refusing to write/delete to open files.
* </ul>
*/
-
public class MockDirectoryWrapper extends BaseDirectoryWrapper {
long maxSize;
@@ -159,6 +158,26 @@ public class MockDirectoryWrapper extend
this.throttling = throttling;
}
+ /**
+ * Returns true if {@link #getDelegate() delegate} must sync its files.
+ * Currently, only {@link NRTCachingDirectory} requires sync'ing its files
+ * because otherwise they are cached in an internal {@link RAMDirectory}. If
+ * other directories require that too, they should be added to this method.
+ */
+ private boolean mustSync() {
+ Directory delegate = this.delegate;
+ while (true) {
+ if (delegate instanceof RateLimitedDirectoryWrapper) {
+ delegate = ((RateLimitedDirectoryWrapper) delegate).getDelegate();
+ } else if (delegate instanceof TrackingDirectoryWrapper) {
+ delegate = ((TrackingDirectoryWrapper) delegate).getDelegate();
+ } else {
+ break;
+ }
+ }
+ return delegate instanceof NRTCachingDirectory;
+ }
+
@Override
public synchronized void sync(Collection<String> names) throws IOException {
maybeYield();
@@ -166,12 +185,16 @@ public class MockDirectoryWrapper extend
if (crashed) {
throw new IOException("cannot sync after crash");
}
- unSyncedFiles.removeAll(names);
- // TODO: need to improve hack to be OK w/
- // RateLimitingDirWrapper in between...
- if (true || LuceneTestCase.rarely(randomState) || delegate instanceof NRTCachingDirectory) {
- // don't wear out our hardware so much in tests.
- delegate.sync(names);
+ // don't wear out our hardware so much in tests.
+ if (LuceneTestCase.rarely(randomState) || mustSync()) {
+ for (String name : names) {
+ // randomly fail with IOE on any file
+ maybeThrowIOException(name);
+ delegate.sync(Collections.singleton(name));
+ unSyncedFiles.remove(name);
+ }
+ } else {
+ unSyncedFiles.removeAll(names);
}
}
@@ -343,30 +366,26 @@ public class MockDirectoryWrapper extend
return randomIOExceptionRateOnOpen;
}
- void maybeThrowIOException() throws IOException {
- maybeThrowIOException(null);
- }
-
void maybeThrowIOException(String message) throws IOException {
if (randomState.nextDouble() < randomIOExceptionRate) {
if (LuceneTestCase.VERBOSE) {
System.out.println(Thread.currentThread().getName() + ": MockDirectoryWrapper: now throw random exception" + (message == null ? "" : " (" + message + ")"));
new Throwable().printStackTrace(System.out);
}
- throw new IOException("a random IOException" + (message == null ? "" : "(" + message + ")"));
+ throw new IOException("a random IOException" + (message == null ? "" : " (" + message + ")"));
}
}
- void maybeThrowIOExceptionOnOpen() throws IOException {
+ void maybeThrowIOExceptionOnOpen(String name) throws IOException {
if (randomState.nextDouble() < randomIOExceptionRateOnOpen) {
if (LuceneTestCase.VERBOSE) {
- System.out.println(Thread.currentThread().getName() + ": MockDirectoryWrapper: now throw random exception during open");
+ System.out.println(Thread.currentThread().getName() + ": MockDirectoryWrapper: now throw random exception during open file=" + name);
new Throwable().printStackTrace(System.out);
}
if (randomState.nextBoolean()) {
- throw new IOException("a random IOException");
+ throw new IOException("a random IOException (" + name + ")");
} else {
- throw new FileNotFoundException("a random IOException");
+ throw new FileNotFoundException("a random IOException (" + name + ")");
}
}
}
@@ -432,7 +451,7 @@ public class MockDirectoryWrapper extend
@Override
public synchronized IndexOutput createOutput(String name, IOContext context) throws IOException {
maybeThrowDeterministicException();
- maybeThrowIOExceptionOnOpen();
+ maybeThrowIOExceptionOnOpen(name);
maybeYield();
if (failOnCreateOutput) {
maybeThrowDeterministicException();
@@ -486,7 +505,7 @@ public class MockDirectoryWrapper extend
if (throttling == Throttling.ALWAYS ||
(throttling == Throttling.SOMETIMES && randomState.nextInt(50) == 0) && !(delegate instanceof RateLimitedDirectoryWrapper)) {
if (LuceneTestCase.VERBOSE) {
- System.out.println("MockDirectoryWrapper: throttling indexOutput");
+ System.out.println("MockDirectoryWrapper: throttling indexOutput (" + name + ")");
}
return throttledOutput.newFromDelegate(io);
} else {
@@ -519,7 +538,7 @@ public class MockDirectoryWrapper extend
@Override
public synchronized IndexInput openInput(String name, IOContext context) throws IOException {
maybeThrowDeterministicException();
- maybeThrowIOExceptionOnOpen();
+ maybeThrowIOExceptionOnOpen(name);
maybeYield();
if (failOnOpenInput) {
maybeThrowDeterministicException();
@@ -632,7 +651,7 @@ public class MockDirectoryWrapper extend
if (LuceneTestCase.VERBOSE) {
System.out.println("\nNOTE: MockDirectoryWrapper: now crash");
}
- crash(); // corrumpt any unsynced-files
+ crash(); // corrupt any unsynced-files
if (LuceneTestCase.VERBOSE) {
System.out.println("\nNOTE: MockDirectoryWrapper: now run CheckIndex");
}
Modified: lucene/dev/branches/lucene4956/lucene/test-framework/src/java/org/apache/lucene/store/MockIndexOutputWrapper.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4956/lucene/test-framework/src/java/org/apache/lucene/store/MockIndexOutputWrapper.java?rev=1485259&r1=1485258&r2=1485259&view=diff
==============================================================================
--- lucene/dev/branches/lucene4956/lucene/test-framework/src/java/org/apache/lucene/store/MockIndexOutputWrapper.java (original)
+++ lucene/dev/branches/lucene4956/lucene/test-framework/src/java/org/apache/lucene/store/MockIndexOutputWrapper.java Wed May 22 15:51:08 2013
@@ -43,6 +43,50 @@ public class MockIndexOutputWrapper exte
this.delegate = delegate;
}
+ private void checkCrashed() throws IOException {
+ // If MockRAMDir crashed since we were opened, then don't write anything
+ if (dir.crashed) {
+ throw new IOException("MockRAMDirectory was crashed; cannot write to " + name);
+ }
+ }
+
+ private void checkDiskFull(byte[] b, int offset, DataInput in, long len) throws IOException {
+ long freeSpace = dir.maxSize == 0 ? 0 : dir.maxSize - dir.sizeInBytes();
+ long realUsage = 0;
+
+ // Enforce disk full:
+ if (dir.maxSize != 0 && freeSpace <= len) {
+ // Compute the real disk free. This will greatly slow
+ // down our test but makes it more accurate:
+ realUsage = dir.getRecomputedActualSizeInBytes();
+ freeSpace = dir.maxSize - realUsage;
+ }
+
+ if (dir.maxSize != 0 && freeSpace <= len) {
+ if (freeSpace > 0) {
+ realUsage += freeSpace;
+ if (b != null) {
+ delegate.writeBytes(b, offset, (int) freeSpace);
+ } else {
+ delegate.copyBytes(in, len);
+ }
+ }
+ if (realUsage > dir.maxUsedSize) {
+ dir.maxUsedSize = realUsage;
+ }
+ String message = "fake disk full at " + dir.getRecomputedActualSizeInBytes() + " bytes when writing " + name + " (file length=" + delegate.length();
+ if (freeSpace > 0) {
+ message += "; wrote " + freeSpace + " of " + len + " bytes";
+ }
+ message += ")";
+ if (LuceneTestCase.VERBOSE) {
+ System.out.println(Thread.currentThread().getName() + ": MDW: now throw fake disk full");
+ new Throwable().printStackTrace(System.out);
+ }
+ throw new IOException(message);
+ }
+ }
+
@Override
public void close() throws IOException {
try {
@@ -75,48 +119,16 @@ public class MockIndexOutputWrapper exte
@Override
public void writeBytes(byte[] b, int offset, int len) throws IOException {
- long freeSpace = dir.maxSize == 0 ? 0 : dir.maxSize - dir.sizeInBytes();
- long realUsage = 0;
- // If MockRAMDir crashed since we were opened, then
- // don't write anything:
- if (dir.crashed)
- throw new IOException("MockRAMDirectory was crashed; cannot write to " + name);
-
- // Enforce disk full:
- if (dir.maxSize != 0 && freeSpace <= len) {
- // Compute the real disk free. This will greatly slow
- // down our test but makes it more accurate:
- realUsage = dir.getRecomputedActualSizeInBytes();
- freeSpace = dir.maxSize - realUsage;
- }
-
- if (dir.maxSize != 0 && freeSpace <= len) {
- if (freeSpace > 0) {
- realUsage += freeSpace;
- delegate.writeBytes(b, offset, (int) freeSpace);
- }
- if (realUsage > dir.maxUsedSize) {
- dir.maxUsedSize = realUsage;
- }
- String message = "fake disk full at " + dir.getRecomputedActualSizeInBytes() + " bytes when writing " + name + " (file length=" + delegate.length();
- if (freeSpace > 0) {
- message += "; wrote " + freeSpace + " of " + len + " bytes";
- }
- message += ")";
- if (LuceneTestCase.VERBOSE) {
- System.out.println(Thread.currentThread().getName() + ": MDW: now throw fake disk full");
- new Throwable().printStackTrace(System.out);
- }
- throw new IOException(message);
+ checkCrashed();
+ checkDiskFull(b, offset, null, len);
+
+ if (dir.randomState.nextInt(200) == 0) {
+ final int half = len/2;
+ delegate.writeBytes(b, offset, half);
+ Thread.yield();
+ delegate.writeBytes(b, offset+half, len-half);
} else {
- if (dir.randomState.nextInt(200) == 0) {
- final int half = len/2;
- delegate.writeBytes(b, offset, half);
- Thread.yield();
- delegate.writeBytes(b, offset+half, len-half);
- } else {
- delegate.writeBytes(b, offset, len);
- }
+ delegate.writeBytes(b, offset, len);
}
dir.maybeThrowDeterministicException();
@@ -146,8 +158,10 @@ public class MockIndexOutputWrapper exte
@Override
public void copyBytes(DataInput input, long numBytes) throws IOException {
+ checkCrashed();
+ checkDiskFull(null, 0, input, numBytes);
+
delegate.copyBytes(input, numBytes);
- // TODO: we may need to check disk full here as well
dir.maybeThrowDeterministicException();
}
Modified: lucene/dev/branches/lucene4956/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4956/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java?rev=1485259&r1=1485258&r2=1485259&view=diff
==============================================================================
--- lucene/dev/branches/lucene4956/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java (original)
+++ lucene/dev/branches/lucene4956/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java Wed May 22 15:51:08 2013
@@ -23,6 +23,7 @@ import java.lang.reflect.Constructor;
import java.lang.reflect.Method;
import java.util.*;
import java.util.concurrent.*;
+import java.util.concurrent.atomic.AtomicReference;
import java.util.logging.Logger;
import org.apache.lucene.analysis.Analyzer;
@@ -148,10 +149,10 @@ public abstract class LuceneTestCase ext
public static final String SYSPROP_BADAPPLES = "tests.badapples";
/** @see #ignoreAfterMaxFailures*/
- private static final String SYSPROP_MAXFAILURES = "tests.maxfailures";
+ public static final String SYSPROP_MAXFAILURES = "tests.maxfailures";
/** @see #ignoreAfterMaxFailures*/
- private static final String SYSPROP_FAILFAST = "tests.failfast";
+ public static final String SYSPROP_FAILFAST = "tests.failfast";
/**
* Annotation for tests that should only be run during nightly builds.
@@ -356,9 +357,17 @@ public abstract class LuceneTestCase ext
new TestRuleMarkFailure();
/**
- * Ignore tests after hitting a designated number of initial failures.
+ * Ignore tests after hitting a designated number of initial failures. This
+ * is truly a "static" global singleton since it needs to span the lifetime of all
+ * test classes running inside this JVM (it cannot be part of a class rule).
+ *
+ * <p>This poses some problems for the test framework's tests because these sometimes
+ * trigger intentional failures which add up to the global count. This field contains
+ * a (possibly) changing reference to {@link TestRuleIgnoreAfterMaxFailures} and we
+ * dispatch to its current value from the {@link #classRules} chain using {@link TestRuleDelegate}.
*/
- final static TestRuleIgnoreAfterMaxFailures ignoreAfterMaxFailures;
+ private static final AtomicReference<TestRuleIgnoreAfterMaxFailures> ignoreAfterMaxFailuresDelegate;
+ private static final TestRule ignoreAfterMaxFailures;
static {
int maxFailures = systemPropertyAsInt(SYSPROP_MAXFAILURES, Integer.MAX_VALUE);
boolean failFast = systemPropertyAsBoolean(SYSPROP_FAILFAST, false);
@@ -373,7 +382,19 @@ public abstract class LuceneTestCase ext
}
}
- ignoreAfterMaxFailures = new TestRuleIgnoreAfterMaxFailures(maxFailures);
+ ignoreAfterMaxFailuresDelegate =
+ new AtomicReference<TestRuleIgnoreAfterMaxFailures>(
+ new TestRuleIgnoreAfterMaxFailures(maxFailures));
+ ignoreAfterMaxFailures = TestRuleDelegate.of(ignoreAfterMaxFailuresDelegate);
+ }
+
+ /**
+ * Temporarily substitute the global {@link TestRuleIgnoreAfterMaxFailures}. See
+ * {@link #ignoreAfterMaxFailuresDelegate} for some explanation why this method
+ * is needed.
+ */
+ public static TestRuleIgnoreAfterMaxFailures replaceMaxFailureRule(TestRuleIgnoreAfterMaxFailures newValue) {
+ return ignoreAfterMaxFailuresDelegate.getAndSet(newValue);
}
/**
Modified: lucene/dev/branches/lucene4956/lucene/tools/junit4/tests.policy
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4956/lucene/tools/junit4/tests.policy?rev=1485259&r1=1485258&r2=1485259&view=diff
==============================================================================
--- lucene/dev/branches/lucene4956/lucene/tools/junit4/tests.policy (original)
+++ lucene/dev/branches/lucene4956/lucene/tools/junit4/tests.policy Wed May 22 15:51:08 2013
@@ -54,6 +54,7 @@ grant {
// Solr needs those:
permission java.net.NetPermission "*";
+ permission java.sql.SQLPermission "*";
permission java.util.logging.LoggingPermission "control";
permission javax.management.MBeanPermission "*", "*";
permission javax.management.MBeanServerPermission "*";
Modified: lucene/dev/branches/lucene4956/solr/CHANGES.txt
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4956/solr/CHANGES.txt?rev=1485259&r1=1485258&r2=1485259&view=diff
==============================================================================
--- lucene/dev/branches/lucene4956/solr/CHANGES.txt (original)
+++ lucene/dev/branches/lucene4956/solr/CHANGES.txt Wed May 22 15:51:08 2013
@@ -38,6 +38,12 @@ TBD...
Detailed Change List
----------------------
+Other Changes
+----------------------
+
+* SOLR-4622: Hardcoded SolrCloud defaults for hostContext and hostPort that
+ were deprecated in 4.3 have been removed completely. (hossman)
+
================== 4.4.0 ==================
Versions of Major Components
@@ -54,7 +60,7 @@ Upgrading from Solr 4.3.0
* SOLR-4778: The signature of LogWatcher.registerListener has changed, from
(ListenerConfig, CoreContainer) to (ListenerConfig). Users implementing their
own LogWatcher classes will need to change their code accordingly.
-
+
Detailed Change List
----------------------
@@ -66,23 +72,38 @@ New Features
* SOLR-4761: Add option to plugin a merged segment warmer into solrconfig.xml
(Mark Miller, Mike McCandless, Robert Muir)
+* SOLR-3240: Add "spellcheck.collateMaxCollectDocs" option so that when testing
+ potential Collations against the index, SpellCheckComponent will only collect
+ n documents, thereby estimating the hit-count. This is a performance optimization
+ in cases where exact hit-counts are unnecessary. Also, when "collateExtendedResults"
+ is false, this optimization is always made (James Dyer).
+
+* SOLR-4785: New MaxScoreQParserPlugin returning max() instead of sum() of terms (janhoy)
+
+* SOLR-4234: Add support for binary files in ZooKeeper. (Eric Pugh via Mark Miller)
+
+* SOLR-4048: Add findRecursive method to NamedList. (Shawn Heisey)
+
Bug Fixes
----------------------
-* SOLR-4741: Deleting a collection should set DELETE_DATA_DIR to true.
- (Mark Miller)
-
-* SOLR-4333: edismax parser to not double-escape colons if already escaped by
+* SOLR-4333: edismax parser to not double-escape colons if already escaped by
the client application (James Dyer, Robert J. van der Boon)
-* SOLR-4752: There are some minor bugs in the Collections API parameter
- validation. (Mark Miller)
-
* SOLR-4776: Solrj doesn't return "between" count in range facets
(Philip K. Warren via shalin)
* SOLR-4616: HitRatio on caches is now exposed over JMX MBeans as a float.
(Greg Bowyer)
+
+* SOLR-4803: Fixed core discovery mode (ie: new style solr.xml) to treat
+ 'collection1' as the default core name. (hossman)
+
+* SOLR-4790: Throw an error if a core has the same name as another core, both old and
+ new style solr.xml
+
+* SOLR-4842: Fix facet.field local params from affecting other facet.field's.
+ (ehatcher, hossman)
Other Changes
----------------------
@@ -110,6 +131,74 @@ Other Changes
* SOLR-4784: Make class LuceneQParser public (janhoy)
+* SOLR-4448: Allow the solr internal load balancer to be more easily pluggable.
+ (Philip Hoy via Robert Muir)
+
+================== 4.3.1 ==================
+
+Versions of Major Components
+---------------------
+Apache Tika 1.3
+Carrot2 3.6.2
+Velocity 1.7 and Velocity Tools 2.0
+Apache UIMA 2.3.1
+Apache ZooKeeper 3.4.5
+
+Detailed Change List
+----------------------
+
+Bug Fixes
+----------------------
+
+* SOLR-4795: Sub shard leader should not accept any updates from parent after
+ it goes active (shalin)
+
+* SOLR-4798: shard splitting does not respect the router for the collection
+ when executing the index split. One effect of this is that documents
+ may be placed in the wrong shard when the default compositeId router
+ is used in conjunction with IDs containing "!". (yonik)
+
+* SOLR-4797: Shard splitting creates sub shards which have the wrong hash
+ range in cluster state. This happens when numShards is not a power of two
+ and router is compositeId. (shalin)
+
+* SOLR-4791: solr.xml sharedLib does not work in 4.3.0 (Ryan Ernst, Jan Høydahl via
+ Erick Erickson)
+
+* SOLR-4806: Shard splitting does not abort if WaitForState times out (shalin)
+
+* SOLR-4807: The zkcli script now works with log4j. The zkcli.bat script
+ was broken on Windows in 4.3.0, now it works. (Shawn Heisey)
+
+* SOLR-4813: Fix SynonymFilterFactory to allow init parameters for
+ tokenizer factory used when parsing synonyms file. (Shingo Sasaki, hossman)
+
+* SOLR-4829: Fix transaction log leaks (a failure to clean up some old logs)
+ on a shard leader, or when unexpected exceptions are thrown during log
+ recovery. (Steven Bower, Mark Miller, yonik)
+
+* SOLR-4751: Fix replication problem of files in sub directory of conf directory.
+ (Minoru Osuka via Koji)
+
+* SOLR-4741: Deleting a collection should set DELETE_DATA_DIR to true.
+ (Mark Miller)
+
+* SOLR-4752: There are some minor bugs in the Collections API parameter
+ validation. (Mark Miller)
+
+* SOLR-4563: RSS DIH-example not working (janhoy)
+
+* SOLR-4796: zkcli.sh should honor JAVA_HOME (Roman Shaposhnik via Mark Miller)
+
+* SOLR-4734: Leader election fails with an NPE if there is no UpdateLog.
+ (Mark Miller, Alexander Eibner)
+
+Other Changes
+----------------------
+
+* SOLR-4760: Include core name in logs when loading schema.
+ (Shawn Heisey)
+
================== 4.3.0 ==================
Versions of Major Components
Modified: lucene/dev/branches/lucene4956/solr/NOTICE.txt
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4956/solr/NOTICE.txt?rev=1485259&r1=1485258&r2=1485259&view=diff
==============================================================================
--- lucene/dev/branches/lucene4956/solr/NOTICE.txt (original)
+++ lucene/dev/branches/lucene4956/solr/NOTICE.txt Wed May 22 15:51:08 2013
@@ -69,11 +69,6 @@ Jean-Philippe Barrette-LaPierre. This li
see http://sites.google.com/site/rrettesite/moman and
http://bitbucket.org/jpbarrette/moman/overview/
-The class org.apache.lucene.util.SorterTemplate was inspired by CGLIB's class
-with the same name. The implementation part is mainly done using pre-existing
-Lucene sorting code. In-place stable mergesort was borrowed from CGLIB,
-which is Apache-licensed.
-
The class org.apache.lucene.util.WeakIdentityMap was derived from
the Apache CXF project and is Apache License 2.0.
@@ -151,8 +146,8 @@ LGPL and Creative Commons ShareAlike.
Morfologic includes data from BSD-licensed dictionary of Polish (SGJP)
(http://sgjp.pl/morfeusz/)
-Servlet-api.jar is under the CDDL license, the original source
-code for this can be found at http://www.eclipse.org/jetty/downloads.php
+Servlet-api.jar and javax.servlet-*.jar are under the CDDL license, the original
+source code for this can be found at http://www.eclipse.org/jetty/downloads.php
===========================================================================
Kuromoji Japanese Morphological Analyzer - Apache Lucene Integration
Modified: lucene/dev/branches/lucene4956/solr/build.xml
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4956/solr/build.xml?rev=1485259&r1=1485258&r2=1485259&view=diff
==============================================================================
--- lucene/dev/branches/lucene4956/solr/build.xml (original)
+++ lucene/dev/branches/lucene4956/solr/build.xml Wed May 22 15:51:08 2013
@@ -564,7 +564,7 @@
<check-missing-javadocs dir="${javadoc.dir}" level="package"/>
</target>
- <target name="-ecj-javadoc-lint" depends="compile,compile-test,jar-test-framework,-ecj-resolve">
+ <target name="-ecj-javadoc-lint" depends="compile,compile-test,jar-test-framework,-ecj-javadoc-lint-unsupported,-ecj-resolve" if="ecj-javadoc-lint.supported">
<subant target="-ecj-javadoc-lint" failonerror="true" inheritall="false">
<propertyset refid="uptodate.and.compiled.properties"/>
<fileset dir="core" includes="build.xml"/>
Modified: lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java?rev=1485259&r1=1485258&r2=1485259&view=diff
==============================================================================
--- lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java (original)
+++ lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java Wed May 22 15:51:08 2013
@@ -172,8 +172,6 @@ final class ShardLeaderElectionContext e
}
log.info("I may be the new leader - try and sync");
-
- UpdateLog ulog = core.getUpdateHandler().getUpdateLog();
// we are going to attempt to be the leader
@@ -187,14 +185,30 @@ final class ShardLeaderElectionContext e
success = false;
}
- if (!success && ulog.getRecentUpdates().getVersions(1).isEmpty()) {
- // we failed sync, but we have no versions - we can't sync in that case
- // - we were active
- // before, so become leader anyway
- log.info("We failed sync, but we have no versions - we can't sync in that case - we were active before, so become leader anyway");
- success = true;
+ UpdateLog ulog = core.getUpdateHandler().getUpdateLog();
+
+ if (!success) {
+ boolean hasRecentUpdates = false;
+ if (ulog != null) {
+ // TODO: we could optimize this if necessary
+ UpdateLog.RecentUpdates recentUpdates = ulog.getRecentUpdates();
+ try {
+ hasRecentUpdates = !recentUpdates.getVersions(1).isEmpty();
+ } finally {
+ recentUpdates.close();
+ }
+ }
+
+ if (!hasRecentUpdates) {
+ // we failed sync, but we have no versions - we can't sync in that case
+ // - we were active
+ // before, so become leader anyway
+ log.info("We failed sync, but we have no versions - we can't sync in that case - we were active before, so become leader anyway");
+ success = true;
+ }
}
-
+
+
// if !success but no one else is in active mode,
// we are the leader anyway
// TODO: should we also be leader if there is only one other active?
Modified: lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionProcessor.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionProcessor.java?rev=1485259&r1=1485258&r2=1485259&view=diff
==============================================================================
--- lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionProcessor.java (original)
+++ lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionProcessor.java Wed May 22 15:51:08 2013
@@ -320,21 +320,22 @@ public class OverseerCollectionProcessor
private boolean splitShard(ClusterState clusterState, ZkNodeProps message, NamedList results) {
log.info("Split shard invoked");
- String collection = message.getStr("collection");
+ String collectionName = message.getStr("collection");
String slice = message.getStr(ZkStateReader.SHARD_ID_PROP);
- Slice parentSlice = clusterState.getSlice(collection, slice);
+ Slice parentSlice = clusterState.getSlice(collectionName, slice);
if (parentSlice == null) {
- if(clusterState.getCollections().contains(collection)) {
+ if(clusterState.getCollections().contains(collectionName)) {
throw new SolrException(ErrorCode.BAD_REQUEST, "No shard with the specified name exists: " + slice);
} else {
- throw new SolrException(ErrorCode.BAD_REQUEST, "No collection with the specified name exists: " + collection);
+ throw new SolrException(ErrorCode.BAD_REQUEST, "No collection with the specified name exists: " + collectionName);
}
}
// find the leader for the shard
- Replica parentShardLeader = clusterState.getLeader(collection, slice);
-
+ Replica parentShardLeader = clusterState.getLeader(collectionName, slice);
+ DocCollection collection = clusterState.getCollection(collectionName);
+ DocRouter router = collection.getRouter() != null ? collection.getRouter() : DocRouter.DEFAULT;
DocRouter.Range range = parentSlice.getRange();
if (range == null) {
range = new PlainIdRouter().fullRange();
@@ -342,8 +343,7 @@ public class OverseerCollectionProcessor
// todo: fixed to two partitions?
// todo: accept the range as a param to api?
- // todo: handle randomizing subshard name in case a shard with the same name already exists.
- List<DocRouter.Range> subRanges = new PlainIdRouter().partitionRange(2, range);
+ List<DocRouter.Range> subRanges = router.partitionRange(2, range);
try {
List<String> subSlices = new ArrayList<String>(subRanges.size());
List<String> subShardNames = new ArrayList<String>(subRanges.size());
@@ -351,10 +351,10 @@ public class OverseerCollectionProcessor
for (int i = 0; i < subRanges.size(); i++) {
String subSlice = slice + "_" + i;
subSlices.add(subSlice);
- String subShardName = collection + "_" + subSlice + "_replica1";
+ String subShardName = collectionName + "_" + subSlice + "_replica1";
subShardNames.add(subShardName);
- Slice oSlice = clusterState.getSlice(collection, subSlice);
+ Slice oSlice = clusterState.getSlice(collectionName, subSlice);
if (oSlice != null) {
if (Slice.ACTIVE.equals(oSlice.getState())) {
throw new SolrException(ErrorCode.BAD_REQUEST, "Sub-shard: " + subSlice + " exists in active state. Aborting split shard.");
@@ -372,13 +372,10 @@ public class OverseerCollectionProcessor
}
}
- ShardResponse srsp;
- do {
- srsp = shardHandler.takeCompletedOrError();
- if (srsp != null) {
- processResponse(results, srsp);
- }
- } while (srsp != null);
+ // do not abort splitshard if the unloading fails
+ // this can happen because the replicas created previously may be down
+ // the only side effect of this is that the sub shard may end up having more replicas than we want
+ collectShardResponses(results, false, null);
for (int i=0; i<subRanges.size(); i++) {
String subSlice = subSlices.get(i);
@@ -386,14 +383,14 @@ public class OverseerCollectionProcessor
DocRouter.Range subRange = subRanges.get(i);
log.info("Creating shard " + subShardName + " as part of slice "
- + subSlice + " of collection " + collection + " on "
+ + subSlice + " of collection " + collectionName + " on "
+ nodeName);
ModifiableSolrParams params = new ModifiableSolrParams();
params.set(CoreAdminParams.ACTION, CoreAdminAction.CREATE.toString());
params.set(CoreAdminParams.NAME, subShardName);
- params.set(CoreAdminParams.COLLECTION, collection);
+ params.set(CoreAdminParams.COLLECTION, collectionName);
params.set(CoreAdminParams.SHARD, subSlice);
params.set(CoreAdminParams.SHARD_RANGE, subRange.toString());
params.set(CoreAdminParams.SHARD_STATE, Slice.CONSTRUCTION);
@@ -413,18 +410,14 @@ public class OverseerCollectionProcessor
sendShardRequest(nodeName, new ModifiableSolrParams(cmd.getParams()));
}
- do {
- srsp = shardHandler.takeCompletedOrError();
- if (srsp != null) {
- processResponse(results, srsp);
- }
- } while (srsp != null);
+ collectShardResponses(results, true,
+ "SPLTSHARD failed to create subshard leaders or timed out waiting for them to come up");
log.info("Successfully created all sub-shards for collection "
- + collection + " parent shard: " + slice + " on: " + parentShardLeader);
+ + collectionName + " parent shard: " + slice + " on: " + parentShardLeader);
log.info("Splitting shard " + parentShardLeader.getName() + " as part of slice "
- + slice + " of collection " + collection + " on "
+ + slice + " of collection " + collectionName + " on "
+ parentShardLeader);
ModifiableSolrParams params = new ModifiableSolrParams();
@@ -436,12 +429,7 @@ public class OverseerCollectionProcessor
}
sendShardRequest(parentShardLeader.getNodeName(), params);
- do {
- srsp = shardHandler.takeCompletedOrError();
- if (srsp != null) {
- processResponse(results, srsp);
- }
- } while (srsp != null);
+ collectShardResponses(results, true, "SPLITSHARD failed to invoke SPLIT core admin command");
log.info("Index on shard: " + nodeName + " split into two successfully");
@@ -458,12 +446,8 @@ public class OverseerCollectionProcessor
sendShardRequest(nodeName, params);
}
- do {
- srsp = shardHandler.takeCompletedOrError();
- if (srsp != null) {
- processResponse(results, srsp);
- }
- } while (srsp != null);
+ collectShardResponses(results, true,
+ "SPLITSHARD failed while asking sub shard leaders to apply buffered updates");
log.info("Successfully applied buffered updates on : " + subShardNames);
@@ -474,7 +458,7 @@ public class OverseerCollectionProcessor
// TODO: Have replication factor decided in some other way instead of numShards for the parent
- int repFactor = clusterState.getSlice(collection, slice).getReplicas().size();
+ int repFactor = clusterState.getSlice(collectionName, slice).getReplicas().size();
// we need to look at every node and see how many cores it serves
// add our new cores to existing nodes serving the least number of cores
@@ -501,10 +485,10 @@ public class OverseerCollectionProcessor
String sliceName = subSlices.get(i - 1);
for (int j = 2; j <= repFactor; j++) {
String subShardNodeName = nodeList.get((repFactor * (i - 1) + (j - 2)) % nodeList.size());
- String shardName = collection + "_" + sliceName + "_replica" + (j);
+ String shardName = collectionName + "_" + sliceName + "_replica" + (j);
log.info("Creating replica shard " + shardName + " as part of slice "
- + sliceName + " of collection " + collection + " on "
+ + sliceName + " of collection " + collectionName + " on "
+ subShardNodeName);
// Need to create new params for each request
@@ -512,7 +496,7 @@ public class OverseerCollectionProcessor
params.set(CoreAdminParams.ACTION, CoreAdminAction.CREATE.toString());
params.set(CoreAdminParams.NAME, shardName);
- params.set(CoreAdminParams.COLLECTION, collection);
+ params.set(CoreAdminParams.COLLECTION, collectionName);
params.set(CoreAdminParams.SHARD, sliceName);
// TODO: Figure the config used by the parent shard and use it.
//params.set("collection.configName", configName);
@@ -535,12 +519,9 @@ public class OverseerCollectionProcessor
}
}
- do {
- srsp = shardHandler.takeCompletedOrError();
- if (srsp != null) {
- processResponse(results, srsp);
- }
- } while (srsp != null);
+ collectShardResponses(results, true,
+ "SPLTSHARD failed to create subshard replicas or timed out waiting for them to come up");
+
log.info("Successfully created all replica shards for all sub-slices "
+ subSlices);
@@ -552,7 +533,7 @@ public class OverseerCollectionProcessor
for (String subSlice : subSlices) {
propMap.put(subSlice, Slice.ACTIVE);
}
- propMap.put(ZkStateReader.COLLECTION_PROP, collection);
+ propMap.put(ZkStateReader.COLLECTION_PROP, collectionName);
ZkNodeProps m = new ZkNodeProps(propMap);
inQueue.offer(ZkStateReader.toJSON(m));
@@ -560,11 +541,24 @@ public class OverseerCollectionProcessor
} catch (SolrException e) {
throw e;
} catch (Exception e) {
- log.error("Error executing split operation for collection: " + collection + " parent shard: " + slice, e);
+ log.error("Error executing split operation for collection: " + collectionName + " parent shard: " + slice, e);
throw new SolrException(ErrorCode.SERVER_ERROR, null, e);
}
}
+ private void collectShardResponses(NamedList results, boolean abortOnError, String msgOnError) {
+ ShardResponse srsp;
+ do {
+ srsp = shardHandler.takeCompletedOrError();
+ if (srsp != null) {
+ processResponse(results, srsp);
+ if (abortOnError && srsp.getException() != null) {
+ throw new SolrException(ErrorCode.SERVER_ERROR, msgOnError, srsp.getException());
+ }
+ }
+ } while (srsp != null);
+ }
+
private void sendShardRequest(String nodeName, ModifiableSolrParams params) {
ShardRequest sreq = new ShardRequest();
params.set("qt", adminPath);
Modified: lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/cloud/ZkController.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/cloud/ZkController.java?rev=1485259&r1=1485258&r2=1485259&view=diff
==============================================================================
--- lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/cloud/ZkController.java (original)
+++ lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/cloud/ZkController.java Wed May 22 15:51:08 2013
@@ -1251,7 +1251,7 @@ public final class ZkController {
byte[] data = zkClient.getData(zkPath + "/" + file, null, null, true);
dir.mkdirs();
log.info("Write file " + new File(dir, file));
- FileUtils.writeStringToFile(new File(dir, file), new String(data, "UTF-8"), "UTF-8");
+ FileUtils.writeByteArrayToFile(new File(dir, file), data);
} else {
downloadFromZK(zkClient, zkPath + "/" + file, new File(dir, file));
}
Modified: lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/core/ConfigSolrXmlOld.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/core/ConfigSolrXmlOld.java?rev=1485259&r1=1485258&r2=1485259&view=diff
==============================================================================
--- lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/core/ConfigSolrXmlOld.java (original)
+++ lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/core/ConfigSolrXmlOld.java Wed May 22 15:51:08 2013
@@ -178,16 +178,19 @@ public class ConfigSolrXmlOld extends Co
log.error(msg);
}
}
-
- if (dataDir != null) {
- if (!dirs.containsKey(dataDir)) {
- dirs.put(dataDir, name);
+
+ String instDir = DOMUtil.getAttr(node, CoreDescriptor.CORE_INSTDIR, null);
+ if (dataDir != null && instDir != null) { // this won't load anyway if instDir not specified.
+
+ String absData = new File(instDir, dataDir).getCanonicalPath();
+ if (!dirs.containsKey(absData)) {
+ dirs.put(absData, name);
} else {
String msg = String
.format(
Locale.ROOT,
"More than one core points to data dir %s. They are in %s and %s",
- dataDir, dirs.get(dataDir), name);
+ absData, dirs.get(absData), name);
log.warn(msg);
}
}
Modified: lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/core/CoreContainer.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/core/CoreContainer.java?rev=1485259&r1=1485258&r2=1485259&view=diff
==============================================================================
--- lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/core/CoreContainer.java (original)
+++ lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/core/CoreContainer.java Wed May 22 15:51:08 2013
@@ -270,7 +270,7 @@ public class CoreContainer
if (libDir != null) {
File f = FileUtils.resolvePath(new File(dir), libDir);
log.info("loading shared library: " + f.getAbsolutePath());
- loader.addToClassLoader(libDir);
+ loader.addToClassLoader(libDir, null, false);
loader.reloadLuceneSPI();
}
@@ -289,6 +289,7 @@ public class CoreContainer
adminPath = cfg.get(ConfigSolr.CfgProp.SOLR_ADMINPATH, "/admin/cores");
} else {
adminPath = "/admin/cores";
+ defaultCoreName = DEFAULT_DEFAULT_CORE_NAME;
}
zkHost = cfg.get(ConfigSolr.CfgProp.SOLR_ZKHOST, null);
coreLoadThreads = cfg.getInt(ConfigSolr.CfgProp.SOLR_CORELOADTHREADS, CORE_LOAD_THREADS);
@@ -580,6 +581,7 @@ public class CoreContainer
zkSys.close();
}
+ org.apache.lucene.util.IOUtils.closeWhileHandlingException(loader); // best effort
}
public void cancelCoreRecoveries() {
@@ -1113,12 +1115,12 @@ public class CoreContainer
coresAttribs.put("defaultCoreName", defaultCoreName);
}
- addCoresAttrib(coresAttribs, ConfigSolr.CfgProp.SOLR_HOSTPORT, "hostPort",zkSys.getHostPort(), ZkContainer.DEFAULT_HOST_PORT);
+ addCoresAttrib(coresAttribs, ConfigSolr.CfgProp.SOLR_HOSTPORT, "hostPort",zkSys.getHostPort(), null);
addCoresAttrib(coresAttribs, ConfigSolr.CfgProp.SOLR_ZKCLIENTTIMEOUT, "zkClientTimeout",
intToString(this.zkClientTimeout),
Integer.toString(DEFAULT_ZK_CLIENT_TIMEOUT));
addCoresAttrib(coresAttribs, ConfigSolr.CfgProp.SOLR_HOSTCONTEXT, "hostContext",
- zkSys.getHostContext(), ZkContainer.DEFAULT_HOST_CONTEXT);
+ zkSys.getHostContext(), null);
addCoresAttrib(coresAttribs, ConfigSolr.CfgProp.SOLR_LEADERVOTEWAIT, "leaderVoteWait",
zkSys.getLeaderVoteWait(), LEADER_VOTE_WAIT);
addCoresAttrib(coresAttribs, ConfigSolr.CfgProp.SOLR_CORELOADTHREADS, "coreLoadThreads",
Modified: lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/core/CoreDescriptor.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/core/CoreDescriptor.java?rev=1485259&r1=1485258&r2=1485259&view=diff
==============================================================================
--- lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/core/CoreDescriptor.java (original)
+++ lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/core/CoreDescriptor.java Wed May 22 15:51:08 2013
@@ -343,22 +343,4 @@ public class CoreDescriptor {
public void putProperty(String prop, String val) {
coreProperties.put(prop, val);
}
-
- // This is particularly useful for checking if any two cores have the same
- // data dir.
- public String getAbsoluteDataDir() {
- String dataDir = getDataDir();
- if (dataDir == null) return null; // No worse than before.
-
- if (new File(dataDir).isAbsolute()) {
- return SolrResourceLoader.normalizeDir(
- SolrResourceLoader.normalizeDir(dataDir));
- }
-
- if (coreContainer == null) return null;
-
- return SolrResourceLoader.normalizeDir(coreContainer.getSolrHome() +
- SolrResourceLoader.normalizeDir(dataDir));
-
- }
}
Modified: lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/core/SolrConfig.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/core/SolrConfig.java?rev=1485259&r1=1485258&r2=1485259&view=diff
==============================================================================
--- lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/core/SolrConfig.java (original)
+++ lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/core/SolrConfig.java Wed May 22 15:51:08 2013
@@ -19,9 +19,9 @@ package org.apache.solr.core;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
-import org.apache.solr.schema.IndexSchema;
import org.apache.solr.schema.IndexSchemaFactory;
import org.apache.solr.util.DOMUtil;
+import org.apache.solr.util.FileUtils;
import org.apache.solr.util.RegexFileFilter;
import org.apache.solr.handler.component.SearchComponent;
import org.apache.solr.request.SolrRequestHandler;
@@ -51,6 +51,7 @@ import org.xml.sax.SAXException;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.xpath.XPathConstants;
+import java.io.File;
import java.util.*;
import java.util.regex.Pattern;
import java.util.regex.Matcher;
@@ -453,6 +454,7 @@ public class SolrConfig extends Config {
if (nodes == null || nodes.getLength() == 0) return;
log.info("Adding specified lib dirs to ClassLoader");
+ SolrResourceLoader loader = getResourceLoader();
try {
for (int i = 0; i < nodes.getLength(); i++) {
@@ -464,16 +466,22 @@ public class SolrConfig extends Config {
// :TODO: add support for a simpler 'glob' mutually exclusive of regex
String regex = DOMUtil.getAttr(node, "regex");
FileFilter filter = (null == regex) ? null : new RegexFileFilter(regex);
- getResourceLoader().addToClassLoader(baseDir, filter, false);
+ loader.addToClassLoader(baseDir, filter, false);
} else if (null != path) {
- getResourceLoader().addToClassLoader(path);
+ final File file = FileUtils.resolvePath(new File(loader.getInstanceDir()), path);
+ loader.addToClassLoader(file.getParent(), new FileFilter() {
+ @Override
+ public boolean accept(File pathname) {
+ return pathname.equals(file);
+ }
+ }, false);
} else {
throw new RuntimeException(
"lib: missing mandatory attributes: 'dir' or 'path'");
}
}
} finally {
- getResourceLoader().reloadLuceneSPI();
+ loader.reloadLuceneSPI();
}
}
}
Modified: lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/core/SolrCoreDiscoverer.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/core/SolrCoreDiscoverer.java?rev=1485259&r1=1485258&r2=1485259&view=diff
==============================================================================
--- lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/core/SolrCoreDiscoverer.java (original)
+++ lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/core/SolrCoreDiscoverer.java Wed May 22 15:51:08 2013
@@ -9,6 +9,7 @@ import java.util.Map;
import java.util.Properties;
import org.apache.commons.io.IOUtils;
+import org.apache.solr.common.SolrException;
import org.apache.solr.util.PropertiesUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -89,6 +90,11 @@ public class SolrCoreDiscoverer {
props.setProperty(CoreDescriptor.CORE_NAME, childFile.getName());
}
CoreDescriptor desc = new CoreDescriptor(container, props);
+ CoreDescriptor check = coreDescriptorMap.get(desc.getName());
+ if (check != null) {
+ throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Core " + desc.getName() +
+ " defined more than once, once in " + desc.getInstanceDir() + " and once in " + check.getInstanceDir());
+ }
coreDescriptorMap.put(desc.getName(), desc);
}
}
Modified: lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java?rev=1485259&r1=1485258&r2=1485259&view=diff
==============================================================================
--- lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java (original)
+++ lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java Wed May 22 15:51:08 2013
@@ -17,6 +17,7 @@
package org.apache.solr.core;
+import java.io.Closeable;
import java.io.File;
import java.io.FileFilter;
import java.io.FileInputStream;
@@ -37,6 +38,7 @@ import org.apache.lucene.analysis.util.T
import org.apache.lucene.codecs.Codec;
import org.apache.lucene.codecs.PostingsFormat;
import org.apache.lucene.codecs.DocValuesFormat;
+import org.apache.lucene.util.IOUtils;
import org.apache.lucene.analysis.util.WordlistLoader;
import org.apache.solr.common.ResourceLoader;
import org.apache.solr.handler.admin.CoreAdminHandler;
@@ -68,7 +70,7 @@ import org.apache.solr.search.QParserPlu
/**
* @since solr 1.3
*/
-public class SolrResourceLoader implements ResourceLoader
+public class SolrResourceLoader implements ResourceLoader,Closeable
{
public static final Logger log = LoggerFactory.getLogger(SolrResourceLoader.class);
@@ -152,9 +154,11 @@ public class SolrResourceLoader implemen
File base = FileUtils.resolvePath(new File(getInstanceDir()), baseDir);
if (base != null && base.exists() && base.isDirectory()) {
File[] files = base.listFiles(filter);
- if (!quiet && (files == null || files.length == 0)) {
- log.warn("No files added to classloader from lib: "
- + baseDir + " (resolved as: " + base.getAbsolutePath() + ").");
+ if (files == null || files.length == 0) {
+ if (!quiet) {
+ log.warn("No files added to classloader from lib: "
+ + baseDir + " (resolved as: " + base.getAbsolutePath() + ").");
+ }
} else {
this.classLoader = replaceClassLoader(classLoader, base, filter);
}
@@ -165,35 +169,10 @@ public class SolrResourceLoader implemen
}
}
}
-
- /**
- * Adds the specific file/dir specified to the ClassLoader used by this
- * ResourceLoader. This method <b>MUST</b>
- * only be called prior to using this ResourceLoader to get any resources, otherwise
- * it's behavior will be non-deterministic. You also have to {link #reloadLuceneSPI()}
- * before using this ResourceLoader.
- *
- * @param path A jar file (or directory of classes) to be added to the classpath,
- * will be resolved relative the instance dir.
- */
- void addToClassLoader(final String path) {
- final File file = FileUtils.resolvePath(new File(getInstanceDir()), path);
- if (file.canRead()) {
- this.classLoader = replaceClassLoader(classLoader, file.getParentFile(),
- new FileFilter() {
- @Override
- public boolean accept(File pathname) {
- return pathname.equals(file);
- }
- });
- } else {
- log.error("Can't find (or read) file to add to classloader: " + file);
- }
- }
/**
* Reloads all Lucene SPI implementations using the new classloader.
- * This method must be called after {@link #addToClassLoader(String)}
+ * This method must be called after {@link #addToClassLoader(String, FileFilter, boolean)}
* and {@link #addToClassLoader(String,FileFilter,boolean)} before using
* this ResourceLoader.
*/
@@ -229,7 +208,9 @@ public class SolrResourceLoader implemen
SolrException.log(log, "Can't add element to classloader: " + files[j], e);
}
}
- return URLClassLoader.newInstance(elements, oldLoader.getParent());
+ ClassLoader oldParent = oldLoader.getParent();
+ IOUtils.closeWhileHandlingException(oldLoader); // best effort
+ return URLClassLoader.newInstance(elements, oldParent);
}
// are we still here?
return oldLoader;
@@ -778,4 +759,9 @@ public class SolrResourceLoader implemen
}
throw new SolrException( SolrException.ErrorCode.SERVER_ERROR, builder.toString() );
}
+
+ @Override
+ public void close() throws IOException {
+ IOUtils.close(classLoader);
+ }
}
Modified: lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/core/ZkContainer.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/core/ZkContainer.java?rev=1485259&r1=1485258&r2=1485259&view=diff
==============================================================================
--- lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/core/ZkContainer.java (original)
+++ lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/core/ZkContainer.java Wed May 22 15:51:08 2013
@@ -43,11 +43,6 @@ import org.xml.sax.InputSource;
public class ZkContainer {
protected static Logger log = LoggerFactory.getLogger(ZkContainer.class);
- /** @deprecated will be remove in Solr 5.0 (SOLR-4622) */
- public static final String DEFAULT_HOST_CONTEXT = "solr";
- /** @deprecated will be remove in Solr 5.0 (SOLR-4622) */
- public static final String DEFAULT_HOST_PORT = "8983";
-
protected ZkController zkController;
private SolrZkServer zkServer;
private int zkClientTimeout;
@@ -119,21 +114,14 @@ public class ZkContainer {
if (zkRun == null && zookeeperHost == null)
return; // not in zk mode
-
- // BEGIN: SOLR-4622: deprecated hardcoded defaults for hostPort & hostContext
if (null == hostPort) {
- // throw new ZooKeeperException(SolrException.ErrorCode.SERVER_ERROR,
- // "'hostPort' must be configured to run SolrCloud");
- log.warn("Solr 'hostPort' has not be explicitly configured, using hardcoded default of " + DEFAULT_HOST_PORT + ". This default has been deprecated and will be removed in future versions of Solr, please configure this value explicitly");
- hostPort = DEFAULT_HOST_PORT;
+ throw new ZooKeeperException(SolrException.ErrorCode.SERVER_ERROR,
+ "'hostPort' must be configured to run SolrCloud");
}
if (null == hostContext) {
- // throw new ZooKeeperException(SolrException.ErrorCode.SERVER_ERROR,
- // "'hostContext' must be configured to run SolrCloud");
- log.warn("Solr 'hostContext' has not be explicitly configured, using hardcoded default of " + DEFAULT_HOST_CONTEXT + ". This default has been deprecated and will be removed in future versions of Solr, please configure this value explicitly");
- hostContext = DEFAULT_HOST_CONTEXT;
+ throw new ZooKeeperException(SolrException.ErrorCode.SERVER_ERROR,
+ "'hostContext' must be configured to run SolrCloud");
}
- // END: SOLR-4622
// zookeeper in quorum mode currently causes a failure when trying to
// register log4j mbeans. See SOLR-2369
Modified: lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/handler/SnapPuller.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/handler/SnapPuller.java?rev=1485259&r1=1485258&r2=1485259&view=diff
==============================================================================
--- lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/handler/SnapPuller.java (original)
+++ lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/handler/SnapPuller.java Wed May 22 15:51:08 2013
@@ -798,21 +798,53 @@ public class SnapPuller {
}
/**
+ * Make file list
+ */
+ private List<File> makeTmpConfDirFileList(File dir, List<File> fileList) {
+ File[] files = dir.listFiles();
+ for (File file : files) {
+ if (file.isFile()) {
+ fileList.add(file);
+ } else if (file.isDirectory()) {
+ fileList = makeTmpConfDirFileList(file, fileList);
+ }
+ }
+ return fileList;
+ }
+
+ /**
* The conf files are copied to the tmp dir to the conf dir. A backup of the old file is maintained
*/
private void copyTmpConfFiles2Conf(File tmpconfDir) {
+ boolean status = false;
File confDir = new File(solrCore.getResourceLoader().getConfigDir());
- for (File file : tmpconfDir.listFiles()) {
- File oldFile = new File(confDir, file.getName());
+ for (File file : makeTmpConfDirFileList(tmpconfDir, new ArrayList<File>())) {
+ File oldFile = new File(confDir, file.getPath().substring(tmpconfDir.getPath().length(), file.getPath().length()));
+ if (!oldFile.getParentFile().exists()) {
+ status = oldFile.getParentFile().mkdirs();
+ if (status) {
+ } else {
+ throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
+ "Unable to mkdirs: " + oldFile.getParentFile());
+ }
+ }
if (oldFile.exists()) {
- File backupFile = new File(confDir, oldFile.getName() + "." + getDateAsStr(new Date(oldFile.lastModified())));
- boolean status = oldFile.renameTo(backupFile);
+ File backupFile = new File(oldFile.getPath() + "." + getDateAsStr(new Date(oldFile.lastModified())));
+ if (!backupFile.getParentFile().exists()) {
+ status = backupFile.getParentFile().mkdirs();
+ if (status) {
+ } else {
+ throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
+ "Unable to mkdirs: " + backupFile.getParentFile());
+ }
+ }
+ status = oldFile.renameTo(backupFile);
if (!status) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
"Unable to rename: " + oldFile + " to: " + backupFile);
}
}
- boolean status = file.renameTo(oldFile);
+ status = file.renameTo(oldFile);
if (status) {
} else {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
Modified: lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminHandler.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminHandler.java?rev=1485259&r1=1485258&r2=1485259&view=diff
==============================================================================
--- lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminHandler.java (original)
+++ lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminHandler.java Wed May 22 15:51:08 2013
@@ -252,6 +252,7 @@ public class CoreAdminHandler extends Re
List<String> paths = null;
int partitions = pathsArr != null ? pathsArr.length : newCoreNames.length;
+ DocRouter router = null;
if (coreContainer.isZooKeeperAware()) {
ClusterState clusterState = coreContainer.getZkController().getClusterState();
String collectionName = req.getCore().getCoreDescriptor().getCloudDescriptor().getCollectionName();
@@ -259,8 +260,8 @@ public class CoreAdminHandler extends Re
String sliceName = req.getCore().getCoreDescriptor().getCloudDescriptor().getShardId();
Slice slice = clusterState.getSlice(collectionName, sliceName);
DocRouter.Range currentRange = slice.getRange();
- DocRouter hp = collection.getRouter() != null ? collection.getRouter() : DocRouter.DEFAULT;
- ranges = currentRange != null ? hp.partitionRange(partitions, currentRange) : null;
+ router = collection.getRouter() != null ? collection.getRouter() : DocRouter.DEFAULT;
+ ranges = currentRange != null ? router.partitionRange(partitions, currentRange) : null;
}
if (pathsArr == null) {
@@ -278,7 +279,7 @@ public class CoreAdminHandler extends Re
}
- SplitIndexCommand cmd = new SplitIndexCommand(req, paths, newCores, ranges);
+ SplitIndexCommand cmd = new SplitIndexCommand(req, paths, newCores, ranges, router);
core.getUpdateHandler().split(cmd);
// After the split has completed, someone (here?) should start the process of replaying the buffered updates.
Modified: lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/handler/admin/ShowFileRequestHandler.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/handler/admin/ShowFileRequestHandler.java?rev=1485259&r1=1485258&r2=1485259&view=diff
==============================================================================
--- lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/handler/admin/ShowFileRequestHandler.java (original)
+++ lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/handler/admin/ShowFileRequestHandler.java Wed May 22 15:51:08 2013
@@ -191,9 +191,7 @@ public class ShowFileRequestHandler exte
ModifiableSolrParams params = new ModifiableSolrParams(req.getParams());
params.set(CommonParams.WT, "raw");
req.setParams(params);
-
- ContentStreamBase content = new ContentStreamBase.StringStream(
- new String(zkClient.getData(adminFile, null, null, true), "UTF-8"));
+ ContentStreamBase content = new ContentStreamBase.ByteArrayStream(zkClient.getData(adminFile, null, null, true), adminFile);
content.setContentType(req.getParams().get(USE_CONTENT_TYPE));
rsp.add(RawResponseWriter.CONTENT, content);
Modified: lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/handler/component/HttpShardHandlerFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/handler/component/HttpShardHandlerFactory.java?rev=1485259&r1=1485258&r2=1485259&view=diff
==============================================================================
--- lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/handler/component/HttpShardHandlerFactory.java (original)
+++ lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/handler/component/HttpShardHandlerFactory.java Wed May 22 15:51:08 2013
@@ -150,17 +150,23 @@ public class HttpShardHandlerFactory ext
clientParams.set(HttpClientUtil.PROP_CONNECTION_TIMEOUT, connectionTimeout);
clientParams.set(HttpClientUtil.PROP_USE_RETRY, false);
this.defaultClient = HttpClientUtil.createClient(clientParams);
+ this.loadbalancer = createLoadbalancer(defaultClient);
+ }
+
+ protected ThreadPoolExecutor getThreadPoolExecutor(){
+ return this.commExecutor;
+ }
+ protected LBHttpSolrServer createLoadbalancer(HttpClient httpClient){
try {
- loadbalancer = new LBHttpSolrServer(defaultClient);
+ return new LBHttpSolrServer(httpClient);
} catch (MalformedURLException e) {
// should be impossible since we're not passing any URLs here
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
}
-
}
- private <T> T getParameter(NamedList initArgs, String configKey, T defaultValue) {
+ protected <T> T getParameter(NamedList initArgs, String configKey, T defaultValue) {
T toReturn = defaultValue;
if (initArgs != null) {
T temp = (T) initArgs.get(configKey);
Modified: lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java?rev=1485259&r1=1485258&r2=1485259&view=diff
==============================================================================
--- lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java (original)
+++ lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java Wed May 22 15:51:08 2013
@@ -554,13 +554,13 @@ public class RealTimeGetComponent extend
List<String> versions = StrUtils.splitSmart(versionsStr, ",", true);
- // TODO: get this from cache instead of rebuilding?
- UpdateLog.RecentUpdates recentUpdates = ulog.getRecentUpdates();
List<Object> updates = new ArrayList<Object>(versions.size());
long minVersion = Long.MAX_VALUE;
-
+
+ // TODO: get this from cache instead of rebuilding?
+ UpdateLog.RecentUpdates recentUpdates = ulog.getRecentUpdates();
try {
for (String versionStr : versions) {
long version = Long.parseLong(versionStr);
Modified: lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/handler/component/SpellCheckComponent.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/handler/component/SpellCheckComponent.java?rev=1485259&r1=1485258&r2=1485259&view=diff
==============================================================================
--- lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/handler/component/SpellCheckComponent.java (original)
+++ lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/handler/component/SpellCheckComponent.java Wed May 22 15:51:08 2013
@@ -214,10 +214,20 @@ public class SpellCheckComponent extends
int maxCollationTries = params.getInt(SPELLCHECK_MAX_COLLATION_TRIES, 0);
int maxCollationEvaluations = params.getInt(SPELLCHECK_MAX_COLLATION_EVALUATIONS, 10000);
boolean collationExtendedResults = params.getBool(SPELLCHECK_COLLATE_EXTENDED_RESULTS, false);
+ int maxCollationCollectDocs = params.getInt(SPELLCHECK_COLLATE_MAX_COLLECT_DOCS, 0);
+ // If not reporting hits counts, don't bother collecting more than 1 document per try.
+ if (!collationExtendedResults) {
+ maxCollationCollectDocs = 1;
+ }
boolean shard = params.getBool(ShardParams.IS_SHARD, false);
-
- SpellCheckCollator collator = new SpellCheckCollator();
- List<SpellCheckCollation> collations = collator.collate(spellingResult, q, rb, maxCollations, maxCollationTries, maxCollationEvaluations, suggestionsMayOverlap);
+ SpellCheckCollator collator = new SpellCheckCollator()
+ .setMaxCollations(maxCollations)
+ .setMaxCollationTries(maxCollationTries)
+ .setMaxCollationEvaluations(maxCollationEvaluations)
+ .setSuggestionsMayOverlap(suggestionsMayOverlap)
+ .setDocCollectionLimit(maxCollationCollectDocs)
+ ;
+ List<SpellCheckCollation> collations = collator.collate(spellingResult, q, rb);
//by sorting here we guarantee a non-distributed request returns all
//results in the same order as a distributed request would,
//even in cases when the internal rank is the same.
Modified: lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/request/SimpleFacets.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/request/SimpleFacets.java?rev=1485259&r1=1485258&r2=1485259&view=diff
==============================================================================
--- lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/request/SimpleFacets.java (original)
+++ lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/request/SimpleFacets.java Wed May 22 15:51:08 2013
@@ -149,6 +149,8 @@ public class SimpleFacets {
threads = -1;
if (localParams == null) {
+ params = orig;
+ required = new RequiredSolrParams(params);
return;
}
params = SolrParams.wrapDefaults(localParams, orig);
Modified: lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/schema/IndexSchema.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/schema/IndexSchema.java?rev=1485259&r1=1485258&r2=1485259&view=diff
==============================================================================
--- lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/schema/IndexSchema.java (original)
+++ lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/schema/IndexSchema.java Wed May 22 15:51:08 2013
@@ -432,11 +432,25 @@ public class IndexSchema {
final XPath xpath = schemaConf.getXPath();
String expression = stepsToPath(SCHEMA, AT + NAME);
Node nd = (Node) xpath.evaluate(expression, document, XPathConstants.NODE);
+ StringBuilder sb = new StringBuilder();
+ // Another case where the initialization from the test harness is different than the "real world"
+ sb.append("[");
+ if (loader.getCoreProperties() != null) {
+ sb.append(loader.getCoreProperties().getProperty(NAME));
+ } else {
+ sb.append("null");
+ }
+ sb.append("] ");
if (nd==null) {
- log.warn("schema has no name!");
+ sb.append("schema has no name!");
+ log.warn(sb.toString());
} else {
name = nd.getNodeValue();
- log.info("Schema " + NAME + "=" + name);
+ sb.append("Schema ");
+ sb.append(NAME);
+ sb.append("=");
+ sb.append(name);
+ log.info(sb.toString());
}
// /schema/@version
Modified: lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/search/QParserPlugin.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/search/QParserPlugin.java?rev=1485259&r1=1485258&r2=1485259&view=diff
==============================================================================
--- lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/search/QParserPlugin.java (original)
+++ lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/search/QParserPlugin.java Wed May 22 15:51:08 2013
@@ -43,6 +43,7 @@ public abstract class QParserPlugin impl
JoinQParserPlugin.NAME, JoinQParserPlugin.class,
SurroundQParserPlugin.NAME, SurroundQParserPlugin.class,
SwitchQParserPlugin.NAME, SwitchQParserPlugin.class,
+ MaxScoreQParserPlugin.NAME, MaxScoreQParserPlugin.class
};
/** return a {@link QParser} */
Modified: lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java?rev=1485259&r1=1485258&r2=1485259&view=diff
==============================================================================
--- lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java (original)
+++ lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java Wed May 22 15:51:08 2013
@@ -99,6 +99,7 @@ import org.apache.solr.request.UnInverte
import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.schema.IndexSchema;
import org.apache.solr.schema.SchemaField;
+import org.apache.solr.spelling.QueryConverter;
import org.apache.solr.update.SolrIndexConfig;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -1235,7 +1236,7 @@ public class SolrIndexSearcher extends I
public static final int GET_DOCSET = 0x40000000;
static final int NO_CHECK_FILTERCACHE = 0x20000000;
static final int NO_SET_QCACHE = 0x10000000;
-
+ public static final int TERMINATE_EARLY = 0x04;
public static final int GET_DOCLIST = 0x02; // get the documents actually returned in a response
public static final int GET_SCORES = 0x01;
@@ -1394,7 +1395,8 @@ public class SolrIndexSearcher extends I
float[] scores;
boolean needScores = (cmd.getFlags() & GET_SCORES) != 0;
-
+ boolean terminateEarly = (cmd.getFlags() & TERMINATE_EARLY) == TERMINATE_EARLY;
+
Query query = QueryUtils.makeQueryable(cmd.getQuery());
ProcessedFilter pf = getProcessedFilter(cmd.getFilter(), cmd.getFilterList());
@@ -1446,7 +1448,9 @@ public class SolrIndexSearcher extends I
}
};
}
-
+ if (terminateEarly) {
+ collector = new EarlyTerminatingCollector(collector, cmd.len);
+ }
if( timeAllowed > 0 ) {
collector = new TimeLimitingCollector(collector, TimeLimitingCollector.getGlobalCounter(), timeAllowed);
}
@@ -1481,6 +1485,9 @@ public class SolrIndexSearcher extends I
topCollector = TopFieldCollector.create(weightSort(cmd.getSort()), len, false, needScores, needScores, true);
}
Collector collector = topCollector;
+ if (terminateEarly) {
+ collector = new EarlyTerminatingCollector(collector, cmd.len);
+ }
if( timeAllowed > 0 ) {
collector = new TimeLimitingCollector(collector, TimeLimitingCollector.getGlobalCounter(), timeAllowed);
}
@@ -1529,6 +1536,7 @@ public class SolrIndexSearcher extends I
DocSet set;
boolean needScores = (cmd.getFlags() & GET_SCORES) != 0;
+ boolean terminateEarly = (cmd.getFlags() & TERMINATE_EARLY) == TERMINATE_EARLY;
int maxDoc = maxDoc();
int smallSetSize = maxDoc>>6;
@@ -1568,7 +1576,9 @@ public class SolrIndexSearcher extends I
}
});
}
-
+ if (terminateEarly) {
+ collector = new EarlyTerminatingCollector(collector, cmd.len);
+ }
if( timeAllowed > 0 ) {
collector = new TimeLimitingCollector(collector, TimeLimitingCollector.getGlobalCounter(), timeAllowed);
}
@@ -1604,7 +1614,9 @@ public class SolrIndexSearcher extends I
DocSetCollector setCollector = new DocSetDelegateCollector(maxDoc>>6, maxDoc, topCollector);
Collector collector = setCollector;
-
+ if (terminateEarly) {
+ collector = new EarlyTerminatingCollector(collector, cmd.len);
+ }
if( timeAllowed > 0 ) {
collector = new TimeLimitingCollector(collector, TimeLimitingCollector.getGlobalCounter(), timeAllowed );
}
Modified: lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/spelling/SpellCheckCollator.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/spelling/SpellCheckCollator.java?rev=1485259&r1=1485258&r2=1485259&view=diff
==============================================================================
--- lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/spelling/SpellCheckCollator.java (original)
+++ lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/spelling/SpellCheckCollator.java Wed May 22 15:51:08 2013
@@ -22,7 +22,9 @@ import java.util.Iterator;
import java.util.List;
import org.apache.lucene.analysis.Token;
+import org.apache.lucene.index.IndexReader;
import org.apache.solr.common.params.CommonParams;
+import org.apache.solr.common.params.DisMaxParams;
import org.apache.solr.common.params.GroupParams;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.params.SolrParams;
@@ -33,15 +35,22 @@ import org.apache.solr.handler.component
import org.apache.solr.handler.component.SearchComponent;
import org.apache.solr.request.LocalSolrQueryRequest;
import org.apache.solr.response.SolrQueryResponse;
+import org.apache.solr.search.EarlyTerminatingCollectorException;
+import org.apache.solr.search.SolrIndexSearcher;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class SpellCheckCollator {
private static final Logger LOG = LoggerFactory.getLogger(SpellCheckCollator.class);
-
- public List<SpellCheckCollation> collate(SpellingResult result, String originalQuery, ResponseBuilder ultimateResponse,
- int maxCollations, int maxTries, int maxEvaluations, boolean suggestionsMayOverlap) {
- List<SpellCheckCollation> collations = new ArrayList<SpellCheckCollation>();
+ private int maxCollations = 1;
+ private int maxCollationTries = 0;
+ private int maxCollationEvaluations = 10000;
+ private boolean suggestionsMayOverlap = false;
+ private int docCollectionLimit = 0;
+
+ public List<SpellCheckCollation> collate(SpellingResult result,
+ String originalQuery, ResponseBuilder ultimateResponse) {
+ List<SpellCheckCollation> collations = new ArrayList<SpellCheckCollation>();
QueryComponent queryComponent = null;
if (ultimateResponse.components != null) {
@@ -54,6 +63,7 @@ public class SpellCheckCollator {
}
boolean verifyCandidateWithQuery = true;
+ int maxTries = maxCollationTries;
int maxNumberToIterate = maxTries;
if (maxTries < 1) {
maxTries = 1;
@@ -65,10 +75,17 @@ public class SpellCheckCollator {
maxTries = 1;
verifyCandidateWithQuery = false;
}
+ docCollectionLimit = docCollectionLimit > 0 ? docCollectionLimit : 0;
+ int maxDocId = -1;
+ if (verifyCandidateWithQuery && docCollectionLimit > 0) {
+ IndexReader reader = ultimateResponse.req.getSearcher().getIndexReader();
+ maxDocId = reader.maxDoc();
+ }
int tryNo = 0;
int collNo = 0;
- PossibilityIterator possibilityIter = new PossibilityIterator(result.getSuggestions(), maxNumberToIterate, maxEvaluations, suggestionsMayOverlap);
+ PossibilityIterator possibilityIter = new PossibilityIterator(result.getSuggestions(),
+ maxNumberToIterate, maxCollationEvaluations, suggestionsMayOverlap);
while (tryNo < maxTries && collNo < maxCollations && possibilityIter.hasNext()) {
PossibilityIterator.RankedSpellPossibility possibility = possibilityIter.next();
@@ -96,12 +113,25 @@ public class SpellCheckCollator {
}
params.set(CommonParams.Q, collationQueryStr);
params.remove(CommonParams.START);
+ params.set(CommonParams.ROWS, "" + docCollectionLimit);
+ // we don't want any stored fields
params.set(CommonParams.FL, "id");
- params.set(CommonParams.ROWS, "0");
+ // we'll sort by doc id to ensure no scoring is done.
+ params.set(CommonParams.SORT, "_docid_ asc");
+ // If a dismax query, don't add unnecessary clauses for scoring
+ params.remove(DisMaxParams.TIE);
+ params.remove(DisMaxParams.PF);
+ params.remove(DisMaxParams.PF2);
+ params.remove(DisMaxParams.PF3);
+ params.remove(DisMaxParams.BQ);
+ params.remove(DisMaxParams.BF);
+ // Collate testing does not support Grouping (see SOLR-2577)
params.remove(GroupParams.GROUP);
// creating a request here... make sure to close it!
- ResponseBuilder checkResponse = new ResponseBuilder(new LocalSolrQueryRequest(ultimateResponse.req.getCore(), params),new SolrQueryResponse(), Arrays.<SearchComponent>asList(queryComponent));
+ ResponseBuilder checkResponse = new ResponseBuilder(
+ new LocalSolrQueryRequest(ultimateResponse.req.getCore(), params),
+ new SolrQueryResponse(), Arrays.<SearchComponent> asList(queryComponent));
checkResponse.setQparser(ultimateResponse.getQparser());
checkResponse.setFilters(ultimateResponse.getFilters());
checkResponse.setQueryString(collationQueryStr);
@@ -109,8 +139,19 @@ public class SpellCheckCollator {
try {
queryComponent.prepare(checkResponse);
+ if (docCollectionLimit > 0) {
+ int f = checkResponse.getFieldFlags();
+ checkResponse.setFieldFlags(f |= SolrIndexSearcher.TERMINATE_EARLY);
+ }
queryComponent.process(checkResponse);
hits = (Integer) checkResponse.rsp.getToLog().get("hits");
+ } catch (EarlyTerminatingCollectorException etce) {
+ assert (docCollectionLimit > 0);
+ if (etce.getLastDocId() + 1 == maxDocId) {
+ hits = docCollectionLimit;
+ } else {
+ hits = maxDocId / ((etce.getLastDocId() + 1) / docCollectionLimit);
+ }
} catch (Exception e) {
LOG.warn("Exception trying to re-query to check if a spell check possibility would return any hits.", e);
} finally {
@@ -191,6 +232,27 @@ public class SpellCheckCollator {
offset += corr.length() - oneForReqOrProhib - (tok.endOffset() - tok.startOffset());
}
return collation.toString();
- }
-
+ }
+ public SpellCheckCollator setMaxCollations(int maxCollations) {
+ this.maxCollations = maxCollations;
+ return this;
+ }
+ public SpellCheckCollator setMaxCollationTries(int maxCollationTries) {
+ this.maxCollationTries = maxCollationTries;
+ return this;
+ }
+ public SpellCheckCollator setMaxCollationEvaluations(
+ int maxCollationEvaluations) {
+ this.maxCollationEvaluations = maxCollationEvaluations;
+ return this;
+ }
+ public SpellCheckCollator setSuggestionsMayOverlap(
+ boolean suggestionsMayOverlap) {
+ this.suggestionsMayOverlap = suggestionsMayOverlap;
+ return this;
+ }
+ public SpellCheckCollator setDocCollectionLimit(int docCollectionLimit) {
+ this.docCollectionLimit = docCollectionLimit;
+ return this;
+ }
}
Modified: lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/update/SolrIndexSplitter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/update/SolrIndexSplitter.java?rev=1485259&r1=1485258&r2=1485259&view=diff
==============================================================================
--- lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/update/SolrIndexSplitter.java (original)
+++ lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/update/SolrIndexSplitter.java Wed May 22 15:51:08 2013
@@ -32,9 +32,11 @@ import org.apache.lucene.util.CharsRef;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.OpenBitSet;
import org.apache.solr.common.cloud.DocRouter;
+import org.apache.solr.common.cloud.HashBasedRouter;
import org.apache.solr.common.util.Hash;
import org.apache.solr.core.SolrCore;
import org.apache.solr.schema.SchemaField;
+import org.apache.solr.schema.StrField;
import org.apache.solr.search.SolrIndexSearcher;
import org.apache.solr.util.RefCounted;
import org.slf4j.Logger;
@@ -53,6 +55,8 @@ public class SolrIndexSplitter {
DocRouter.Range[] rangesArr; // same as ranges list, but an array for extra speed in inner loops
List<String> paths;
List<SolrCore> cores;
+ DocRouter router;
+ HashBasedRouter hashRouter;
int numPieces;
int currPartition = 0;
@@ -62,6 +66,9 @@ public class SolrIndexSplitter {
ranges = cmd.ranges;
paths = cmd.paths;
cores = cmd.cores;
+ router = cmd.router;
+ hashRouter = router instanceof HashBasedRouter ? (HashBasedRouter)router : null;
+
if (ranges == null) {
numPieces = paths != null ? paths.size() : cores.size();
} else {
@@ -151,16 +158,24 @@ public class SolrIndexSplitter {
BytesRef term = null;
DocsEnum docsEnum = null;
+ CharsRef idRef = new CharsRef(100);
for (;;) {
term = termsEnum.next();
if (term == null) break;
// figure out the hash for the term
- // TODO: hook in custom hashes (or store hashes)
- // TODO: performance implications of using indexedToReadable?
- CharsRef ref = new CharsRef(term.length);
- ref = field.getType().indexedToReadable(term, ref);
- int hash = Hash.murmurhash3_x86_32(ref, ref.offset, ref.length, 0);
+
+ // FUTURE: if conversion to strings costs too much, we could
+ // specialize and use the hash function that can work over bytes.
+ idRef = field.getType().indexedToReadable(term, idRef);
+ String idString = idRef.toString();
+
+ int hash = 0;
+ if (hashRouter != null) {
+ hash = hashRouter.sliceHash(idString, null, null);
+ }
+ // int hash = Hash.murmurhash3_x86_32(ref, ref.offset, ref.length, 0);
+
docsEnum = termsEnum.docs(liveDocs, docsEnum, DocsEnum.FLAG_NONE);
for (;;) {
int doc = docsEnum.nextDoc();
Modified: lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/update/SplitIndexCommand.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/update/SplitIndexCommand.java?rev=1485259&r1=1485258&r2=1485259&view=diff
==============================================================================
--- lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/update/SplitIndexCommand.java (original)
+++ lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/update/SplitIndexCommand.java Wed May 22 15:51:08 2013
@@ -34,13 +34,14 @@ public class SplitIndexCommand extends U
public List<String> paths;
public List<SolrCore> cores; // either paths or cores should be specified
public List<DocRouter.Range> ranges;
- // TODO: allow specification of custom hash function
+ public DocRouter router;
- public SplitIndexCommand(SolrQueryRequest req, List<String> paths, List<SolrCore> cores, List<DocRouter.Range> ranges) {
+ public SplitIndexCommand(SolrQueryRequest req, List<String> paths, List<SolrCore> cores, List<DocRouter.Range> ranges, DocRouter router) {
super(req);
this.paths = paths;
this.cores = cores;
this.ranges = ranges;
+ this.router = router;
}
@Override
@@ -54,6 +55,7 @@ public class SplitIndexCommand extends U
sb.append(",paths=" + paths);
sb.append(",cores=" + cores);
sb.append(",ranges=" + ranges);
+ sb.append(",router=" + router);
sb.append('}');
return sb.toString();
}
Modified: lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/update/UpdateLog.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/update/UpdateLog.java?rev=1485259&r1=1485258&r2=1485259&view=diff
==============================================================================
--- lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/update/UpdateLog.java (original)
+++ lucene/dev/branches/lucene4956/solr/core/src/java/org/apache/solr/update/UpdateLog.java Wed May 22 15:51:08 2013
@@ -989,7 +989,7 @@ public class UpdateLog implements Plugin
}
}
-
+ /** The RecentUpdates object returned must be closed after use */
public RecentUpdates getRecentUpdates() {
Deque<TransactionLog> logList;
synchronized (this) {
@@ -1009,9 +1009,21 @@ public class UpdateLog implements Plugin
// TODO: what if I hand out a list of updates, then do an update, then hand out another list (and
// one of the updates I originally handed out fell off the list). Over-request?
- RecentUpdates recentUpdates = new RecentUpdates();
- recentUpdates.logList = logList;
- recentUpdates.update();
+
+ boolean success = false;
+ RecentUpdates recentUpdates = null;
+ try {
+ recentUpdates = new RecentUpdates();
+ recentUpdates.logList = logList;
+ recentUpdates.update();
+ success = true;
+ } finally {
+ // defensive: if some unknown exception is thrown,
+ // make sure we close so that the tlogs are decref'd
+ if (!success && recentUpdates != null) {
+ recentUpdates.close();
+ }
+ }
return recentUpdates;
}
@@ -1132,14 +1144,15 @@ public class UpdateLog implements Plugin
class LogReplayer implements Runnable {
private Logger loglog = log; // set to something different?
- List<TransactionLog> translogs;
+ Deque<TransactionLog> translogs;
TransactionLog.LogReader tlogReader;
boolean activeLog;
boolean finishing = false; // state where we lock out other updates and finish those updates that snuck in before we locked
boolean debug = loglog.isDebugEnabled();
public LogReplayer(List<TransactionLog> translogs, boolean activeLog) {
- this.translogs = translogs;
+ this.translogs = new LinkedList<TransactionLog>();
+ this.translogs.addAll(translogs);
this.activeLog = activeLog;
}
@@ -1159,7 +1172,9 @@ public class UpdateLog implements Plugin
SolrRequestInfo.setRequestInfo(new SolrRequestInfo(req, rsp)); // setting request info will help logging
try {
- for (TransactionLog translog : translogs) {
+ for(;;) {
+ TransactionLog translog = translogs.pollFirst();
+ if (translog == null) break;
doReplay(translog);
}
} catch (SolrException e) {
@@ -1179,6 +1194,13 @@ public class UpdateLog implements Plugin
if (finishing) {
versionInfo.unblockUpdates();
}
+
+ // clean up in case we hit some unexpected exception and didn't get
+ // to more transaction logs
+ for (TransactionLog translog : translogs) {
+ log.error("ERROR: didn't get to recover from tlog " + translog);
+ translog.decref();
+ }
}
loglog.warn("Log replay finished. recoveryInfo=" + recoveryInfo);