You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by ma...@apache.org on 2020/07/14 01:46:43 UTC
[lucene-solr] 01/02: #126 - Tweaking on long tail tests.
This is an automated email from the ASF dual-hosted git repository.
markrmiller pushed a commit to branch reference_impl
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git
commit e7557a7d27097a74b11a21c12c3e00513509f3cc
Author: markrmiller@gmail.com <ma...@gmail.com>
AuthorDate: Mon Jul 13 20:44:24 2020 -0500
#126 - Tweaking on long tail tests.
---
.../src/java/org/apache/solr/core/SolrCore.java | 5 +-
.../handler/TestSolrConfigHandlerConcurrent.java | 29 ++++--
.../component/DistributedFacetPivotLargeTest.java | 102 ++++++++++++++++++---
.../solrj/impl/ConcurrentUpdateSolrClient.java | 2 +-
.../org/apache/solr/SolrIgnoredThreadsFilter.java | 4 +
.../src/java/org/apache/solr/SolrTestCaseJ4.java | 45 ++++++++-
.../solr/cloud/AbstractFullDistribZkTestBase.java | 2 +-
7 files changed, 158 insertions(+), 31 deletions(-)
diff --git a/solr/core/src/java/org/apache/solr/core/SolrCore.java b/solr/core/src/java/org/apache/solr/core/SolrCore.java
index 5213b55..e3cc6c1 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrCore.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrCore.java
@@ -403,8 +403,8 @@ public final class SolrCore implements SolrInfoBean, Closeable {
}
lastNewIndexDir = result;
return result;
- } catch (IOException e) {
- SolrException.log(log, "getNewIndexDir", e);
+ } catch (Exception e) {
+ ParWork.propegateInterrupt(e);
// See SOLR-11687. It is inadvisable to assume we can do the right thing for any but a small
// number of exceptions that ware caught and swallowed in getIndexProperty.
throw new SolrException(ErrorCode.SERVER_ERROR, "Error in getNewIndexDir, exception: ", e);
@@ -414,6 +414,7 @@ public final class SolrCore implements SolrInfoBean, Closeable {
getDirectoryFactory().release(dir);
} catch (Exception e) {
ParWork.propegateInterrupt( "Error releasing directory", e);
+ throw new SolrException(ErrorCode.SERVER_ERROR, "Error releasing directory: ", e);
}
}
}
diff --git a/solr/core/src/test/org/apache/solr/handler/TestSolrConfigHandlerConcurrent.java b/solr/core/src/test/org/apache/solr/handler/TestSolrConfigHandlerConcurrent.java
index 24d0fff..d5a34bd 100644
--- a/solr/core/src/test/org/apache/solr/handler/TestSolrConfigHandlerConcurrent.java
+++ b/solr/core/src/test/org/apache/solr/handler/TestSolrConfigHandlerConcurrent.java
@@ -24,6 +24,7 @@ import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
+import java.util.concurrent.Callable;
import java.util.concurrent.TimeUnit;
import org.apache.http.HttpEntity;
@@ -59,7 +60,7 @@ public class TestSolrConfigHandlerConcurrent extends AbstractFullDistribZkTestBa
Map caches = (Map) editable_prop_map.get("query");
setupRestTestHarnesses();
- List<Thread> threads = new ArrayList<>(caches.size());
+ List<Callable<Object>> threads = new ArrayList<>(caches.size());
final List<List> collectErrors = new ArrayList<>();
for (Object o : caches.entrySet()) {
@@ -68,25 +69,33 @@ public class TestSolrConfigHandlerConcurrent extends AbstractFullDistribZkTestBa
List<String> errs = new ArrayList<>();
collectErrors.add(errs);
Map value = (Map) e.getValue();
- Thread t = new Thread(() -> {
- try {
- invokeBulkCall((String)e.getKey() , errs, value);
- } catch (Exception e1) {
- e1.printStackTrace();
+ Callable t = new Callable() {
+ @Override
+ public Object call() {
+ try {
+ invokeBulkCall((String) e.getKey(), errs, value);
+ } catch (Exception e1) {
+ e1.printStackTrace();
+ }
+ return null;
}
- });
+ };
threads.add(t);
- t.start();
+ if (!TEST_NIGHTLY) {
+ if (threads.size() > 10) {
+ break;
+ }
+ }
}
}
- for (Thread thread : threads) thread.join();
+ testExecutor.invokeAll(threads);
boolean success = true;
for (List e : collectErrors) {
- if(!e.isEmpty()){
+ if (!e.isEmpty()) {
success = false;
log.error("{}", e);
}
diff --git a/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotLargeTest.java b/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotLargeTest.java
index 1c2d773..6a6dcb8 100644
--- a/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotLargeTest.java
+++ b/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotLargeTest.java
@@ -22,6 +22,7 @@ import java.util.Date;
import java.util.List;
import junit.framework.AssertionFailedError;
+import org.apache.lucene.util.LuceneTestCase;
import org.apache.solr.BaseDistributedSearchTestCase;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrServerException;
@@ -29,11 +30,13 @@ import org.apache.solr.client.solrj.response.FieldStatsInfo;
import org.apache.solr.client.solrj.response.PivotField;
import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.client.solrj.response.RangeFacet;
+import org.apache.solr.common.ParWork;
import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.common.params.FacetParams;
import org.apache.solr.common.params.SolrParams;
import org.junit.Test;
+@LuceneTestCase.Nightly
public class DistributedFacetPivotLargeTest extends BaseDistributedSearchTestCase {
public static final String SPECIAL = "";
@@ -146,7 +149,7 @@ public class DistributedFacetPivotLargeTest extends BaseDistributedSearchTestCas
rsp = query( p );
pivots = rsp.getFacetPivot().get("place_s,company_t");
assertEquals(20, pivots.size()); // limit
- for (int i = 0; i < 10; i++) {
+ for (int i = 0; i < (TEST_NIGHTLY ? 10 : 5); i++) {
PivotField place = pivots.get(i);
assertTrue(place.toString(), place.getValue().toString().endsWith("placeholder"));
assertEquals(3, place.getPivot().size());
@@ -157,7 +160,7 @@ public class DistributedFacetPivotLargeTest extends BaseDistributedSearchTestCas
assertPivot("place_s", "cardiff", 257, pivots.get(10));
assertPivot("place_s", "krakaw", 1, pivots.get(11));
assertPivot("place_s", "medical staffing network holdings, inc.", 51, pivots.get(12));
- for (int i = 13; i < 20; i++) {
+ for (int i = 13; i < (TEST_NIGHTLY ? 20 : 15); i++) {
PivotField place = pivots.get(i);
assertTrue(place.toString(), place.getValue().toString().startsWith("placeholder"));
assertEquals(1, place.getPivot().size());
@@ -1009,17 +1012,92 @@ public class DistributedFacetPivotLargeTest extends BaseDistributedSearchTestCas
}
- addPivotDoc(oneShard, "id", getDocNum(), "place_s", "cardiff", "company_t", "microsoft","pay_i",4367,"hiredate_dt", "2012-11-01T12:30:00Z");
- addPivotDoc(oneShard, "id", getDocNum(), "place_s", "cardiff", "company_t", "microsoft bbc","pay_i",8742,"hiredate_dt", "2012-11-01T12:30:00Z");
- addPivotDoc(oneShard, "id", getDocNum(), "place_s", "cardiff", "company_t", "microsoft polecat","pay_i",5824,"hiredate_dt", "2012-11-01T12:30:00Z");
- addPivotDoc(oneShard, "id", getDocNum(), "place_s", "cardiff", "company_t", "microsoft ","pay_i",6539,"hiredate_dt", "2012-11-01T12:30:00Z");
- addPivotDoc(oneShard, "id", getDocNum(), "place_s", "medical staffing network holdings, inc.", "company_t", "microsoft ","pay_i",6539,"hiredate_dt", "2012-11-01T12:30:00Z", "special_s", "xxx");
- addPivotDoc(oneShard, "id", getDocNum(), "place_s", "cardiff", "company_t", "polecat","pay_i",4352,"hiredate_dt", "2012-01-01T12:30:00Z", "special_s", "xxx");
- addPivotDoc(oneShard, "id", getDocNum(), "place_s", "krakaw", "company_t", "polecat","pay_i",4352,"hiredate_dt", "2012-11-01T12:30:00Z", "special_s", SPECIAL);
-
- addPivotDoc(twoShard, "id", getDocNum(), "place_s", "cardiff", "company_t", "microsoft","pay_i",12,"hiredate_dt", "2012-11-01T12:30:00Z", "special_s", SPECIAL);
- addPivotDoc(twoShard, "id", getDocNum(), "place_s", "cardiff", "company_t", "microsoft","pay_i",543,"hiredate_dt", "2012-11-01T12:30:00Z", "special_s", SPECIAL);
+ try (ParWork adder = new ParWork(this)) {
+ adder.collect(() -> {
+ try {
+ addPivotDoc(oneShard, "id", getDocNum(), "place_s", "cardiff", "company_t", "microsoft", "pay_i", 4367, "hiredate_dt", "2012-11-01T12:30:00Z");
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ } catch (SolrServerException e) {
+ throw new RuntimeException(e);
+ }
+ });
+ adder.collect(() -> {
+ try {
+ addPivotDoc(oneShard, "id", getDocNum(), "place_s", "cardiff", "company_t", "microsoft bbc", "pay_i", 8742, "hiredate_dt", "2012-11-01T12:30:00Z");
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ } catch (SolrServerException e) {
+ throw new RuntimeException(e);
+ }
+ });
+ adder.collect(() -> {
+ try {
+ addPivotDoc(oneShard, "id", getDocNum(), "place_s", "cardiff", "company_t", "microsoft polecat", "pay_i", 5824, "hiredate_dt", "2012-11-01T12:30:00Z");
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ } catch (SolrServerException e) {
+ throw new RuntimeException(e);
+ }
+ });
+ adder.collect(() -> {
+ try {
+ addPivotDoc(oneShard, "id", getDocNum(), "place_s", "cardiff", "company_t", "microsoft ", "pay_i", 6539, "hiredate_dt", "2012-11-01T12:30:00Z");
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ } catch (SolrServerException e) {
+ throw new RuntimeException(e);
+ }
+ });
+ adder.collect(() -> {
+ try {
+ addPivotDoc(oneShard, "id", getDocNum(), "place_s", "medical staffing network holdings, inc.", "company_t", "microsoft ", "pay_i", 6539, "hiredate_dt", "2012-11-01T12:30:00Z", "special_s", "xxx");
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ } catch (SolrServerException e) {
+ throw new RuntimeException(e);
+ }
+ });
+
+ adder.collect(() -> {
+ try {
+ addPivotDoc(oneShard, "id", getDocNum(), "place_s", "cardiff", "company_t", "polecat", "pay_i", 4352, "hiredate_dt", "2012-01-01T12:30:00Z", "special_s", "xxx");
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ } catch (SolrServerException e) {
+ throw new RuntimeException(e);
+ }
+ });
+ adder.collect(() -> {
+ try {
+ addPivotDoc(oneShard, "id", getDocNum(), "place_s", "krakaw", "company_t", "polecat", "pay_i", 4352, "hiredate_dt", "2012-11-01T12:30:00Z", "special_s", SPECIAL);
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ } catch (SolrServerException e) {
+ throw new RuntimeException(e);
+ }
+ });
+ adder.collect(() -> {
+ try {
+ addPivotDoc(twoShard, "id", getDocNum(), "place_s", "cardiff", "company_t", "microsoft", "pay_i", 12, "hiredate_dt", "2012-11-01T12:30:00Z", "special_s", SPECIAL);
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ } catch (SolrServerException e) {
+ throw new RuntimeException(e);
+ }
+ });
+ adder.collect(() -> {
+ try {
+ addPivotDoc(twoShard, "id", getDocNum(), "place_s", "cardiff", "company_t", "microsoft", "pay_i", 543, "hiredate_dt", "2012-11-01T12:30:00Z", "special_s", SPECIAL);
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ } catch (SolrServerException e) {
+ throw new RuntimeException(e);
+ }
+ });
+ adder.addCollect("addDocs");
+ }
// two really trivial documents, unrelated to the rest of the tests,
// for the purpose of demoing the porblem with mincount=0
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ConcurrentUpdateSolrClient.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ConcurrentUpdateSolrClient.java
index c95cbd7..47e0251 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ConcurrentUpdateSolrClient.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ConcurrentUpdateSolrClient.java
@@ -84,7 +84,7 @@ public class ConcurrentUpdateSolrClient extends SolrClient {
volatile CountDownLatch lock = null; // used to block everything
final int threadCount;
boolean shutdownExecutor = false;
- int pollQueueTime = 250;
+ int pollQueueTime = 5;
int stallTime;
private final boolean streamDeletes;
private boolean internalHttpClient;
diff --git a/solr/test-framework/src/java/org/apache/solr/SolrIgnoredThreadsFilter.java b/solr/test-framework/src/java/org/apache/solr/SolrIgnoredThreadsFilter.java
index 3f6dcd5..6179c8b 100644
--- a/solr/test-framework/src/java/org/apache/solr/SolrIgnoredThreadsFilter.java
+++ b/solr/test-framework/src/java/org/apache/solr/SolrIgnoredThreadsFilter.java
@@ -83,6 +83,10 @@ public class SolrIgnoredThreadsFilter implements ThreadFilter {
if (threadName.contains("-SendThread")) {
return true;
}
+ if (threadName.startsWith("ConnnectionExpirer")) {
+ return true;
+ }
+
diff --git a/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java b/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java
index 554d184..272d02b 100644
--- a/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java
+++ b/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java
@@ -58,6 +58,10 @@ import java.util.Map.Entry;
import java.util.Optional;
import java.util.Properties;
import java.util.Set;
+import java.util.concurrent.ArrayBlockingQueue;
+import java.util.concurrent.RejectedExecutionHandler;
+import java.util.concurrent.ThreadFactory;
+import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.CopyOnWriteArraySet;
import java.util.concurrent.ExecutorService;
@@ -89,6 +93,7 @@ import org.apache.solr.client.solrj.response.SolrResponseBase;
import org.apache.solr.client.solrj.util.ClientUtils;
import org.apache.solr.cloud.IpTables;
import org.apache.solr.cloud.MiniSolrCloudCluster;
+import org.apache.solr.common.AlreadyClosedException;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrDocumentList;
import org.apache.solr.common.SolrException;
@@ -232,11 +237,41 @@ public abstract class SolrTestCaseJ4 extends SolrTestCase {
initClassLogLevels();
resetExceptionIgnores();
- testExecutor = new ExecutorUtil.MDCAwareThreadPoolExecutor(0, Integer.MAX_VALUE,
- 5L, TimeUnit.SECONDS,
- new SynchronousQueue<>(),
- new SolrNamedThreadFactory("testExecutor"),
- true);
+ testExecutor = new ThreadPoolExecutor(0, Math.max(1, Runtime.getRuntime().availableProcessors()),
+ 3000, TimeUnit.SECONDS,
+ new ArrayBlockingQueue<>(30), // size?
+ new ThreadFactory() {
+ AtomicInteger threadNumber = new AtomicInteger(1);
+ ThreadGroup group;
+
+ {
+ SecurityManager s = System.getSecurityManager();
+ group = (s != null) ? s.getThreadGroup() : Thread.currentThread().getThreadGroup();
+ }
+
+ @Override
+ public Thread newThread(Runnable r) {
+ Thread t = new Thread(group, r, "testExecutor" + threadNumber.getAndIncrement(), 0);
+ t.setDaemon(false);
+ return t;
+ }
+ }, new RejectedExecutionHandler() {
+
+ @Override
+ public void rejectedExecution(Runnable r, ThreadPoolExecutor executor) {
+ log.warn("Task was rejected, running in caller thread");
+ if (executor.isShutdown() || executor.isTerminated() || executor.isTerminating()) {
+ throw new AlreadyClosedException();
+ }
+// try {
+// Thread.sleep(1000);
+// } catch (InterruptedException e) {
+// Thread.currentThread().interrupt();
+// }
+// executor.execute(r);
+ r.run();
+ }
+ });
// set solr.install.dir needed by some test configs outside of the test sandbox (!)
System.setProperty("solr.install.dir", ExternalPaths.SOURCE_HOME);
diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java
index bf550f8..b438df3 100644
--- a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java
+++ b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java
@@ -2307,7 +2307,7 @@ public abstract class AbstractFullDistribZkTestBase extends AbstractDistribZkTes
return state;
}
try {
- Thread.sleep(1000);
+ Thread.sleep(100);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new RuntimeException("Interrupted whie waiting for request completion. Last state seen: " + state, e);