You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by dw...@apache.org on 2021/03/10 09:49:37 UTC

[lucene] 07/09: Added custom merge scheduler which prints MDC logging context in merge scheduler threads Adding more docs before commit so that old commit points can be cleared

This is an automated email from the ASF dual-hosted git repository.

dweiss pushed a commit to branch jira/solr-12730
in repository https://gitbox.apache.org/repos/asf/lucene.git

commit 2138b8392d3e989a3ab513a631dac0993b385fad
Author: Shalin Shekhar Mangar <sh...@apache.org>
AuthorDate: Thu Nov 1 19:41:51 2018 +0530

    Added custom merge scheduler which prints MDC logging context in merge scheduler threads
    Adding more docs before commit so that old commit points can be cleared
---
 .../org/apache/solr/update/SolrIndexConfig.java    | 50 +++++++++++-----------
 .../cloud/autoscaling/IndexSizeTriggerTest.java    | 13 +++++-
 2 files changed, 36 insertions(+), 27 deletions(-)

diff --git a/solr/core/src/java/org/apache/solr/update/SolrIndexConfig.java b/solr/core/src/java/org/apache/solr/update/SolrIndexConfig.java
index 38f16c2..0df2b16 100644
--- a/solr/core/src/java/org/apache/solr/update/SolrIndexConfig.java
+++ b/solr/core/src/java/org/apache/solr/update/SolrIndexConfig.java
@@ -286,7 +286,30 @@ public class SolrIndexConfig implements MapSerializable {
 
   private MergeScheduler buildMergeScheduler(IndexSchema schema) {
     String msClassName = mergeSchedulerInfo == null ? SolrIndexConfig.DEFAULT_MERGE_SCHEDULER_CLASSNAME : mergeSchedulerInfo.className;
-    MergeScheduler scheduler = new TestCMS(); // todo nocommit
+    // todo nocommit -- remove this scheduler instance with proper MDC logging support inside merge scheduler threads
+    MergeScheduler scheduler = new ConcurrentMergeScheduler() {
+      @Override
+      protected synchronized MergeThread getMergeThread(IndexWriter writer, MergePolicy.OneMerge merge) throws IOException {
+        MergeThread mergeThread = super.getMergeThread(writer, merge);
+        final Map<String, String> submitterContext = MDC.getCopyOfContextMap();
+        StringBuilder contextString = new StringBuilder();
+        if (submitterContext != null) {
+          Collection<String> values = submitterContext.values();
+
+          for (String value : values) {
+            contextString.append(value + " ");
+          }
+          if (contextString.length() > 1) {
+            contextString.setLength(contextString.length() - 1);
+          }
+        }
+
+        String ctxStr = contextString.toString().replace("/", "//");
+        final String submitterContextStr = ctxStr.length() <= 512 ? ctxStr : ctxStr.substring(0, 512);
+        mergeThread.setName(mergeThread.getName() + "-processing-" + submitterContextStr);
+        return mergeThread;
+      }
+    };
 
     if (mergeSchedulerInfo != null) {
       // LUCENE-5080: these two setters are removed, so we have to invoke setMaxMergesAndThreads
@@ -314,29 +337,4 @@ public class SolrIndexConfig implements MapSerializable {
 
     return scheduler;
   }
-
-  static class TestCMS extends ConcurrentMergeScheduler {
-    @Override
-    protected synchronized MergeThread getMergeThread(IndexWriter writer, MergePolicy.OneMerge merge) throws IOException {
-      MergeThread mergeThread = super.getMergeThread(writer, merge);
-      final Map<String, String> submitterContext = MDC.getCopyOfContextMap();
-      StringBuilder contextString = new StringBuilder();
-      if (submitterContext != null) {
-        Collection<String> values = submitterContext.values();
-
-        for (String value : values) {
-          contextString.append(value + " ");
-        }
-        if (contextString.length() > 1) {
-          contextString.setLength(contextString.length() - 1);
-        }
-      }
-
-      String ctxStr = contextString.toString().replace("/", "//");
-      final String submitterContextStr = ctxStr.length() <= 512 ? ctxStr : ctxStr.substring(0, 512);
-      mergeThread.setName(mergeThread.getName() + "-processing-" + submitterContextStr);
-      return mergeThread;
-    }
-  }
-
 }
diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/IndexSizeTriggerTest.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/IndexSizeTriggerTest.java
index 84833dc..76b1a55 100644
--- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/IndexSizeTriggerTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/IndexSizeTriggerTest.java
@@ -67,7 +67,7 @@ import static org.apache.solr.common.cloud.ZkStateReader.SOLR_AUTOSCALING_CONF_P
 /**
  *
  */
-@LogLevel("org.apache.solr.cloud.autoscaling=DEBUG;org.apache.solr.handler.admin.MetricsHandler=DEBUG;org.apache.solr.core.SolrDeletionPolicy=DEBUG;org.apache.solr.core.IndexDeletionPolicyWrapper=DEBUG;org.apache.solr.client.solrj.cloud.autoscaling=DEBUG;org.apache.solr.cloud.api.collections=DEBUG;org.apache.solr.cloud.Overseer=DEBUG;org.apache.solr.cloud.overseer=DEBUGorg.apache.solr.client.solrj.cloud.autoscaling=DEBUG;org.apache.solr.cloud.api.collections=DEBUG;org.apache.solr.cloud.O [...]
+//@LogLevel("org.apache.solr.cloud.autoscaling=DEBUG;org.apache.solr.handler.admin.MetricsHandler=DEBUG;org.apache.solr.core.SolrDeletionPolicy=DEBUG;org.apache.solr.core.IndexDeletionPolicyWrapper=DEBUG;org.apache.solr.client.solrj.cloud.autoscaling=DEBUG;org.apache.solr.cloud.api.collections=DEBUG;org.apache.solr.cloud.Overseer=DEBUG;org.apache.solr.cloud.overseer=DEBUGorg.apache.solr.client.solrj.cloud.autoscaling=DEBUG;org.apache.solr.cloud.api.collections=DEBUG;org.apache.solr.cloud [...]
 public class IndexSizeTriggerTest extends SolrCloudTestCase {
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
@@ -652,6 +652,17 @@ public class IndexSizeTriggerTest extends SolrCloudTestCase {
     // wait for the segments to merge to reduce the index size
     cloudManager.getTimeSource().sleep(50000);
 
+    // add some docs so that every shard gets an update
+    // we can reduce the number of docs here but this also works
+    for (int j = 0; j < 1; j++) {
+      UpdateRequest ureq = new UpdateRequest();
+      ureq.setParam("collection", collectionName);
+      for (int i = 0; i < 98; i++) {
+        ureq.add("id", "id-" + (i * 100) + "-" + j);
+      }
+      solrClient.request(ureq);
+    }
+
     log.info("-- requesting commit");
     solrClient.commit(collectionName, true, true);