You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by ma...@apache.org on 2020/10/09 04:46:37 UTC

[lucene-solr] branch reference_impl_dev updated: @970 Take this out, perhaps causing rare bad version fail.

This is an automated email from the ASF dual-hosted git repository.

markrmiller pushed a commit to branch reference_impl_dev
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git


The following commit(s) were added to refs/heads/reference_impl_dev by this push:
     new 53fba78  @970 Take this out, perhaps causing rare bad version fail.
53fba78 is described below

commit 53fba785162525db4d1a18652b8035a360ba12ae
Author: markrmiller@gmail.com <ma...@gmail.com>
AuthorDate: Thu Oct 8 23:46:15 2020 -0500

    @970 Take this out, perhaps causing rare bad version fail.
---
 solr/core/src/java/org/apache/solr/cloud/Overseer.java | 14 +++++++-------
 1 file changed, 7 insertions(+), 7 deletions(-)

diff --git a/solr/core/src/java/org/apache/solr/cloud/Overseer.java b/solr/core/src/java/org/apache/solr/cloud/Overseer.java
index dcef0d6..6f5ed79 100644
--- a/solr/core/src/java/org/apache/solr/cloud/Overseer.java
+++ b/solr/core/src/java/org/apache/solr/cloud/Overseer.java
@@ -432,14 +432,14 @@ public class Overseer implements SolrCloseable {
       ClusterState state;
       LinkedHashMap<String,ClusterState.CollectionRef> collStates;
       ClusterState prevState = null;
-      if (itemsQueued.sum() == 1) {
-        log.info("First queue item for Overseer, pull cluster state ...");
-        zkClient.printLayout();
-        zkController.getZkStateReader().forciblyRefreshAllClusterStateSlow();
-        prevState = state = reader.getClusterState();
-      } else {
+//      if (itemsQueued.sum() == 1) {
+//        log.info("First queue item for Overseer, pull cluster state ...");
+//        zkClient.printLayout();
+//        zkController.getZkStateReader().forciblyRefreshAllClusterStateSlow();
+//        prevState = state = reader.getClusterState();
+//      } else {
         state = clusterState;
-      }
+//      }
       collStates = new LinkedHashMap<>(state.getCollectionStates());
       for (DocCollection docCollection : updatesToWrite.values()) {
         Map<String,Slice> slicesMap = docCollection.getSlicesMap();