You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by ab...@apache.org on 2017/12/11 18:52:20 UTC

[01/21] lucene-solr:jira/solr-11285-sim: LUCENE-8082: Fix NPE in TopFieldCollectors that don't track total hit count

Repository: lucene-solr
Updated Branches:
  refs/heads/jira/solr-11285-sim e4fed2145 -> 9e1c2490f


LUCENE-8082: Fix NPE in TopFieldCollectors that don't track total hit count


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/68d16c2a
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/68d16c2a
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/68d16c2a

Branch: refs/heads/jira/solr-11285-sim
Commit: 68d16c2a65b4acd0ce1ca543ae53a82e2516f1e5
Parents: 4fc5a87
Author: Jim Ferenczi <ji...@apache.org>
Authored: Thu Dec 7 14:08:46 2017 +0100
Committer: Jim Ferenczi <ji...@apache.org>
Committed: Thu Dec 7 14:08:46 2017 +0100

----------------------------------------------------------------------
 .../apache/lucene/search/TopFieldCollector.java | 12 +++++++---
 .../lucene/search/TestTopFieldCollector.java    | 25 ++++++++++++++++++++
 2 files changed, 34 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/68d16c2a/lucene/core/src/java/org/apache/lucene/search/TopFieldCollector.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/TopFieldCollector.java b/lucene/core/src/java/org/apache/lucene/search/TopFieldCollector.java
index c3597e9..3d85277 100644
--- a/lucene/core/src/java/org/apache/lucene/search/TopFieldCollector.java
+++ b/lucene/core/src/java/org/apache/lucene/search/TopFieldCollector.java
@@ -121,9 +121,11 @@ public abstract class TopFieldCollector extends TopDocsCollector<Entry> {
 
       final LeafFieldComparator[] comparators = queue.getComparators(context);
       final int[] reverseMul = queue.getReverseMul();
+      final Sort indexSort = context.reader().getMetaData().getSort();
       final boolean canEarlyTerminate = trackTotalHits == false &&
           trackMaxScore == false &&
-          canEarlyTerminate(sort, context.reader().getMetaData().getSort());
+          indexSort != null &&
+          canEarlyTerminate(sort, indexSort);
       final int initialTotalHits = totalHits;
 
       return new MultiComparatorLeafCollector(comparators, reverseMul, mayNeedScoresTwice) {
@@ -212,7 +214,9 @@ public abstract class TopFieldCollector extends TopDocsCollector<Entry> {
       this.trackTotalHits = trackTotalHits;
 
       // Must set maxScore to NEG_INF, or otherwise Math.max always returns NaN.
-      maxScore = Float.NEGATIVE_INFINITY;
+      if (trackMaxScore) {
+        maxScore = Float.NEGATIVE_INFINITY;
+      }
 
       FieldComparator<?>[] comparators = queue.comparators;
       // Tell all comparators their top value:
@@ -227,9 +231,11 @@ public abstract class TopFieldCollector extends TopDocsCollector<Entry> {
     public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException {
       docBase = context.docBase;
       final int afterDoc = after.doc - docBase;
+      final Sort indexSort = context.reader().getMetaData().getSort();
       final boolean canEarlyTerminate = trackTotalHits == false &&
           trackMaxScore == false &&
-          canEarlyTerminate(sort, context.reader().getMetaData().getSort());
+          indexSort != null &&
+          canEarlyTerminate(sort, indexSort);
       final int initialTotalHits = totalHits;
       return new MultiComparatorLeafCollector(queue.getComparators(context), queue.getReverseMul(), mayNeedScoresTwice) {
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/68d16c2a/lucene/core/src/test/org/apache/lucene/search/TestTopFieldCollector.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestTopFieldCollector.java b/lucene/core/src/test/org/apache/lucene/search/TestTopFieldCollector.java
index 0b7dc5b..d8363f7 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestTopFieldCollector.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestTopFieldCollector.java
@@ -102,6 +102,31 @@ public class TestTopFieldCollector extends LuceneTestCase {
       assertTrue(Float.isNaN(td.getMaxScore()));
     }
   }
+
+  public void testSortWithoutTotalHitTracking() throws Exception {
+    Sort sort = new Sort(SortField.FIELD_DOC);
+    for(int i = 0; i < 2; i++) {
+      Query q = new MatchAllDocsQuery();
+      // check that setting trackTotalHits to false does not throw an NPE because
+      // the index is not sorted
+      TopDocsCollector<Entry> tdc;
+      if (i % 2 == 0) {
+        tdc =  TopFieldCollector.create(sort, 10, true, false, false, false);
+      } else {
+        FieldDoc fieldDoc = new FieldDoc(1, Float.NaN, new Object[] { 1 });
+        tdc = TopFieldCollector.create(sort, 10, fieldDoc, true, false, false, false);
+      }
+
+      is.search(q, tdc);
+
+      TopDocs td = tdc.topDocs();
+      ScoreDoc[] sd = td.scoreDocs;
+      for(int j = 0; j < sd.length; j++) {
+        assertTrue(Float.isNaN(sd[j].score));
+      }
+      assertTrue(Float.isNaN(td.getMaxScore()));
+    }
+  }
   
   public void testSortWithScoreNoMaxScoreTracking() throws Exception {
     


[18/21] lucene-solr:jira/solr-11285-sim: LUCENE-8090: Prevent stale threadstate reads in DocumentsWriterFlushControl

Posted by ab...@apache.org.
LUCENE-8090: Prevent stale threadstate reads in DocumentsWriterFlushControl


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/9ad84fea
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/9ad84fea
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/9ad84fea

Branch: refs/heads/jira/solr-11285-sim
Commit: 9ad84fea80a459be4e85b6ff6ef0a1976bcffe38
Parents: 952f4c4
Author: Simon Willnauer <si...@apache.org>
Authored: Mon Dec 11 14:45:26 2017 +0100
Committer: Simon Willnauer <si...@apache.org>
Committed: Mon Dec 11 14:46:02 2017 +0100

----------------------------------------------------------------------
 .../java/org/apache/lucene/index/DocumentsWriterFlushControl.java  | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9ad84fea/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterFlushControl.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterFlushControl.java b/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterFlushControl.java
index 761db0e..bf55991 100644
--- a/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterFlushControl.java
+++ b/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterFlushControl.java
@@ -716,7 +716,7 @@ final class DocumentsWriterFlushControl implements Accountable {
     return infoStream;
   }
 
-  ThreadState findLargestNonPendingWriter() {
+  synchronized ThreadState findLargestNonPendingWriter() {
     ThreadState maxRamUsingThreadState = null;
     long maxRamSoFar = 0;
     Iterator<ThreadState> activePerThreadsIterator = allActiveThreadStates();


[20/21] lucene-solr:jira/solr-11285-sim: Merge branch 'master' into jira/solr-11285-sim

Posted by ab...@apache.org.
Merge branch 'master' into jira/solr-11285-sim


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/403812cf
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/403812cf
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/403812cf

Branch: refs/heads/jira/solr-11285-sim
Commit: 403812cf505f3031b877a26a354209cde7ce4c62
Parents: 8ec50f9 9ad84fe
Author: Andrzej Bialecki <ab...@apache.org>
Authored: Mon Dec 11 14:50:22 2017 +0100
Committer: Andrzej Bialecki <ab...@apache.org>
Committed: Mon Dec 11 14:50:22 2017 +0100

----------------------------------------------------------------------
 lucene/CHANGES.txt                              |  26 +++--
 .../apache/lucene/index/DocumentsWriter.java    |   5 +-
 .../index/DocumentsWriterFlushControl.java      |   2 +-
 .../apache/lucene/index/IndexWriterConfig.java  |   5 +
 .../lucene/index/LiveIndexWriterConfig.java     |  27 +++++
 .../apache/lucene/search/TopFieldCollector.java |  12 ++-
 .../apache/lucene/index/TestIndexWriter.java    | 102 +++++++++++++++++++
 .../lucene/index/TestIndexWriterConfig.java     |   2 +
 .../lucene/search/TestTopFieldCollector.java    |  25 +++++
 .../TestTopFieldCollectorEarlyTermination.java  |   5 +-
 .../lucene/spatial3d/geom/GeoExactCircle.java   |  15 +--
 .../lucene/spatial3d/geom/PlanetModel.java      |   7 ++
 .../spatial3d/geom/GeoExactCircleTest.java      |  34 +++++--
 .../geom/RandomGeo3dShapeGenerator.java         |   2 +-
 .../org/apache/lucene/util/LuceneTestCase.java  |   3 +
 solr/CHANGES.txt                                |   6 +-
 .../org/apache/solr/cloud/CreateAliasCmd.java   |  36 +++++--
 .../apache/solr/search/ExportQParserPlugin.java |   3 +-
 .../apache/solr/search/HashQParserPlugin.java   |   3 +-
 .../apache/solr/cloud/AliasIntegrationTest.java |  10 +-
 solr/solr-ref-guide/src/collections-api.adoc    |  11 ++
 .../src/solrcloud-autoscaling-api.adoc          |  51 ++++++++++
 22 files changed, 339 insertions(+), 53 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/403812cf/solr/CHANGES.txt
----------------------------------------------------------------------


[10/21] lucene-solr:jira/solr-11285-sim: SOLR-9743: documentation

Posted by ab...@apache.org.
SOLR-9743: documentation


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/25f24e09
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/25f24e09
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/25f24e09

Branch: refs/heads/jira/solr-11285-sim
Commit: 25f24e094e8c7d47356ed15ab5957e3fb7e2bec8
Parents: b327394
Author: Noble Paul <no...@apache.org>
Authored: Fri Dec 8 19:30:57 2017 +1100
Committer: Noble Paul <no...@apache.org>
Committed: Fri Dec 8 19:55:27 2017 +1100

----------------------------------------------------------------------
 solr/solr-ref-guide/src/collections-api.adoc | 11 +++++++++++
 1 file changed, 11 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/25f24e09/solr/solr-ref-guide/src/collections-api.adoc
----------------------------------------------------------------------
diff --git a/solr/solr-ref-guide/src/collections-api.adoc b/solr/solr-ref-guide/src/collections-api.adoc
index 4879175..784e2cf 100644
--- a/solr/solr-ref-guide/src/collections-api.adoc
+++ b/solr/solr-ref-guide/src/collections-api.adoc
@@ -1912,6 +1912,17 @@ The name of the destination node. This parameter is required.
 `async`::
 Request ID to track this action which will be <<Asynchronous Calls,processed asynchronously>>.
 
+[[utilizenode]]
+== UTILIZENODE: Utilize a new node
+
+This command can be used to move some replicas from the existing nodes to a new node or lightly loaded node and reduce the load on them. This uses your autoscaling policies and preferences to identify which replica needs to be moved. It tries to fix any policy violations first and then it tries to move some load off of the most loaded nodes according to the preferences.
+
+`/admin/collections?action=UTILIZENODE&node=nodeName`
+=== UTILIZENODE Parameters
+
+`node`:: The name of the node that needs to be utilized. This parameter is required
+
+
 == Asynchronous Calls
 
 Since some collection API calls can be long running tasks (such as SPLITSHARD), you can optionally have the calls run asynchronously. Specifying `async=<request-id>` enables you to make an asynchronous call, the status of which can be requested using the <<requeststatus,REQUESTSTATUS>> call at any time.


[21/21] lucene-solr:jira/solr-11285-sim: SOLR-11403: Keep track of /live_nodes entries and ephemeral nodeAdded / nodeLost nodes. Port over NodeAdded and NodeLost trigger tests.

Posted by ab...@apache.org.
SOLR-11403: Keep track of /live_nodes entries and ephemeral nodeAdded / nodeLost nodes.
Port over NodeAdded and NodeLost trigger tests.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/9e1c2490
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/9e1c2490
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/9e1c2490

Branch: refs/heads/jira/solr-11285-sim
Commit: 9e1c2490f33b3d215fd90d2ee1c6262380180ac1
Parents: 403812c
Author: Andrzej Bialecki <ab...@apache.org>
Authored: Mon Dec 11 19:51:28 2017 +0100
Committer: Andrzej Bialecki <ab...@apache.org>
Committed: Mon Dec 11 19:51:28 2017 +0100

----------------------------------------------------------------------
 .../cloud/autoscaling/ExecutePlanAction.java    |   8 +-
 .../cloud/autoscaling/sim/LiveNodesSet.java     |  99 ++++++
 .../cloud/autoscaling/sim/SimCloudManager.java  |  98 +++---
 .../sim/SimClusterStateProvider.java            |  70 +++-
 .../autoscaling/sim/SimDistribStateManager.java |  17 +
 .../autoscaling/sim/SimNodeStateProvider.java   |   8 +-
 .../sim/TestClusterStateProvider.java           |  25 +-
 .../autoscaling/sim/TestNodeAddedTrigger.java   | 306 +++++++++++++++++
 .../autoscaling/sim/TestNodeLostTrigger.java    | 334 +++++++++++++++++++
 .../autoscaling/sim/TestTriggerIntegration.java | 146 ++++----
 10 files changed, 949 insertions(+), 162 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9e1c2490/solr/core/src/java/org/apache/solr/cloud/autoscaling/ExecutePlanAction.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/ExecutePlanAction.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/ExecutePlanAction.java
index 841856e..1e1b0ac 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/ExecutePlanAction.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/ExecutePlanAction.java
@@ -108,15 +108,13 @@ public class ExecutePlanAction extends TriggerActionBase {
         } catch (IOException e) {
           throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
               "Unexpected exception executing operation: " + operation.getParams(), e);
-//        } catch (InterruptedException e) {
-//          Thread.currentThread().interrupt();
-//          throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "ExecutePlanAction was interrupted", e);
+        } catch (InterruptedException e) {
+          Thread.currentThread().interrupt();
+          throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "ExecutePlanAction was interrupted", e);
         } catch (Exception e) {
           throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
               "Unexpected exception executing operation: " + operation.getParams(), e);
         }
-
-//        counter++;
       }
     } catch (Exception e) {
       throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9e1c2490/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/LiveNodesSet.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/LiveNodesSet.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/LiveNodesSet.java
new file mode 100644
index 0000000..45cd66b
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/LiveNodesSet.java
@@ -0,0 +1,99 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.cloud.autoscaling.sim;
+
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Set;
+import java.util.SortedSet;
+import java.util.TreeSet;
+import java.util.concurrent.ConcurrentHashMap;
+
+import org.apache.solr.common.cloud.LiveNodesListener;
+
+/**
+ * This class represents a set of live nodes and allows adding listeners to track their state.
+ */
+public class LiveNodesSet {
+
+  private final Set<String> set = ConcurrentHashMap.newKeySet();
+  private final Set<LiveNodesListener> listeners = ConcurrentHashMap.newKeySet();
+
+  public Set<String> get() {
+    return Collections.unmodifiableSet(set);
+  }
+
+  public void registerLiveNodesListener(LiveNodesListener listener) {
+    listeners.add(listener);
+  }
+
+  public void removeLiveNodesListener(LiveNodesListener listener) {
+    listeners.remove(listener);
+  }
+
+  private void fireListeners(SortedSet<String> oldNodes, SortedSet<String> newNodes) {
+    for (LiveNodesListener listener : listeners) {
+      listener.onChange(oldNodes, newNodes);
+    }
+  }
+
+  public boolean isEmpty() {
+    return set.isEmpty();
+  }
+
+  public boolean contains(String id) {
+    return set.contains(id);
+  }
+
+  public synchronized boolean add(String id) {
+    if (set.contains(id)) {
+      return false;
+    }
+    TreeSet<String> oldNodes = new TreeSet<>(set);
+    set.add(id);
+    TreeSet<String> newNodes = new TreeSet<>(set);
+    fireListeners(oldNodes, newNodes);
+    return true;
+  }
+
+  public synchronized boolean addAll(Collection<String> nodes) {
+    TreeSet<String> oldNodes = new TreeSet<>(set);
+    boolean changed = set.addAll(nodes);
+    TreeSet<String> newNodes = new TreeSet<>(set);
+    if (changed) {
+      fireListeners(oldNodes, newNodes);
+    }
+    return changed;
+  }
+
+  public synchronized boolean remove(String id) {
+    if (!set.contains(id)) {
+      return false;
+    }
+    TreeSet<String> oldNodes = new TreeSet<>(set);
+    set.remove(id);
+    TreeSet<String> newNodes = new TreeSet<>(set);
+    fireListeners(oldNodes, newNodes);
+    return true;
+  }
+
+  public synchronized void clear() {
+    TreeSet<String> oldNodes = new TreeSet<>(set);
+    set.clear();
+    fireListeners(oldNodes, Collections.emptySortedSet());
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9e1c2490/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/SimCloudManager.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/SimCloudManager.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/SimCloudManager.java
index 9c8cc29..92840c8 100644
--- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/SimCloudManager.java
+++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/SimCloudManager.java
@@ -26,7 +26,6 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Random;
-import java.util.Set;
 import java.util.concurrent.Callable;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.ExecutorService;
@@ -91,11 +90,10 @@ public class SimCloudManager implements SolrCloudManager {
   private final SimClusterStateProvider clusterStateProvider;
   private final SimNodeStateProvider nodeStateProvider;
   private final AutoScalingHandler autoScalingHandler;
-  private final Set<String> liveNodes = ConcurrentHashMap.newKeySet();
+  private final LiveNodesSet liveNodesSet = new LiveNodesSet();
   private final DistributedQueueFactory queueFactory;
   private final ObjectCache objectCache = new ObjectCache();
   private TimeSource timeSource;
-  private SolrClient solrClient;
 
   private final List<SolrInputDocument> systemColl = Collections.synchronizedList(new ArrayList<>());
   private final ExecutorService simCloudManagerPool;
@@ -103,11 +101,14 @@ public class SimCloudManager implements SolrCloudManager {
 
 
   private Overseer.OverseerThread triggerThread;
+  private ThreadGroup triggerThreadGroup;
+  private SolrResourceLoader loader;
 
   private static int nodeIdPort = 10000;
 
   public SimCloudManager(TimeSource timeSource) throws Exception {
     this.stateManager = new SimDistribStateManager();
+    this.loader = new SolrResourceLoader();
     // init common paths
     stateManager.makePath(ZkStateReader.CLUSTER_STATE);
     stateManager.makePath(ZkStateReader.CLUSTER_PROPS);
@@ -120,22 +121,18 @@ public class SimCloudManager implements SolrCloudManager {
     stateManager.makePath(ZkStateReader.SOLR_AUTOSCALING_NODE_ADDED_PATH);
 
     this.timeSource = timeSource != null ? timeSource : TimeSource.NANO_TIME;
-    this.clusterStateProvider = new SimClusterStateProvider(liveNodes, this);
-    this.nodeStateProvider = new SimNodeStateProvider(liveNodes, this.stateManager, this.clusterStateProvider, null);
+    this.clusterStateProvider = new SimClusterStateProvider(liveNodesSet, this);
+    this.nodeStateProvider = new SimNodeStateProvider(liveNodesSet, this.stateManager, this.clusterStateProvider, null);
     this.queueFactory = new GenericDistributedQueueFactory(stateManager);
     this.simCloudManagerPool = ExecutorUtil.newMDCAwareFixedThreadPool(200, new DefaultSolrThreadFactory("simCloudManagerPool"));
-    this.autoScalingHandler = new AutoScalingHandler(this, new SolrResourceLoader());
-    ThreadGroup triggerThreadGroup = new ThreadGroup("Simulated Overseer autoscaling triggers");
-    OverseerTriggerThread trigger = new OverseerTriggerThread(new SolrResourceLoader(), this,
+    this.autoScalingHandler = new AutoScalingHandler(this, loader);
+    triggerThreadGroup = new ThreadGroup("Simulated Overseer autoscaling triggers");
+    OverseerTriggerThread trigger = new OverseerTriggerThread(loader, this,
         new CloudConfig.CloudConfigBuilder("nonexistent", 0, "sim").build());
     triggerThread = new Overseer.OverseerThread(triggerThreadGroup, trigger, "Simulated OverseerAutoScalingTriggerThread");
     triggerThread.start();
   }
 
-  public void setSolrClient(SolrClient solrClient) {
-    this.solrClient = solrClient;
-  }
-
   // ---------- simulator setup methods -----------
 
   public static SimCloudManager createCluster(int numNodes, TimeSource timeSource) throws Exception {
@@ -213,6 +210,10 @@ public class SimCloudManager implements SolrCloudManager {
     return values;
   }
 
+  public SolrResourceLoader getLoader() {
+    return loader;
+  }
+
   /**
    * Add a new node and initialize its node values (metrics).
    * @return new node id
@@ -250,7 +251,7 @@ public class SimCloudManager implements SolrCloudManager {
    * @param random random
    */
   public void simRemoveRandomNodes(int number, boolean withValues, Random random) throws Exception {
-    List<String> nodes = new ArrayList<>(liveNodes);
+    List<String> nodes = new ArrayList<>(liveNodesSet.get());
     Collections.shuffle(nodes, random);
     int count = Math.min(number, nodes.size());
     for (int i = 0; i < count; i++) {
@@ -279,22 +280,37 @@ public class SimCloudManager implements SolrCloudManager {
    * @return simulated SolrClient.
    */
   public SolrClient simGetSolrClient() {
-    if (solrClient != null) {
-      return solrClient;
-    } else {
-      return new SolrClient() {
-        @Override
-        public NamedList<Object> request(SolrRequest request, String collection) throws SolrServerException, IOException {
-          SolrResponse rsp = SimCloudManager.this.request(request);
-          return rsp.getResponse();
-        }
-
-        @Override
-        public void close() throws IOException {
-
-        }
-      };
+    return new SolrClient() {
+      @Override
+      public NamedList<Object> request(SolrRequest request, String collection) throws SolrServerException, IOException {
+        SolrResponse rsp = SimCloudManager.this.request(request);
+        return rsp.getResponse();
+      }
+
+      @Override
+      public void close() throws IOException {
+
+      }
+    };
+  }
+
+  /**
+   * Simulate the effect of restarting Overseer leader - in this case this means restarting the
+   * OverseerTriggerThread.
+   * @param killNodeId optional nodeId to kill. If null then don't kill any node, just restart the thread
+   */
+  public void simRestartOverseer(String killNodeId) throws Exception {
+    LOG.info("=== Restarting OverseerTriggerThread...");
+    IOUtils.closeQuietly(triggerThread);
+    triggerThread.interrupt();
+    if (killNodeId != null) {
+      simRemoveNode(killNodeId, true);
     }
+    OverseerTriggerThread trigger = new OverseerTriggerThread(loader, this,
+        new CloudConfig.CloudConfigBuilder("nonexistent", 0, "sim").build());
+    triggerThread = new Overseer.OverseerThread(triggerThreadGroup, trigger, "Simulated OverseerAutoScalingTriggerThread");
+    triggerThread.start();
+
   }
 
   /**
@@ -319,6 +335,10 @@ public class SimCloudManager implements SolrCloudManager {
     return stateManager;
   }
 
+  public LiveNodesSet getLiveNodesSet() {
+    return liveNodesSet;
+  }
+
   public Map<String, AtomicLong> simGetOpCounts() {
     return opCounts;
   }
@@ -363,19 +383,11 @@ public class SimCloudManager implements SolrCloudManager {
 
   @Override
   public SolrResponse request(SolrRequest req) throws IOException {
-    if (solrClient != null) {
-      try {
-        return req.process(solrClient);
-      } catch (SolrServerException e) {
-        throw new IOException(e);
-      }
-    } else {
-      try {
-        Future<SolrResponse> res = submit(() -> simHandleSolrRequest(req));
-        return res.get();
-      } catch (Exception e) {
-        throw new IOException(e);
-      }
+    try {
+      Future<SolrResponse> res = submit(() -> simHandleSolrRequest(req));
+      return res.get();
+    } catch (Exception e) {
+      throw new IOException(e);
     }
   }
 
@@ -499,8 +511,8 @@ public class SimCloudManager implements SolrCloudManager {
           if (req.getParams().get(CommonAdminParams.ASYNC) != null) {
             results.add(REQUESTID, req.getParams().get(CommonAdminParams.ASYNC));
           }
-          if (!liveNodes.isEmpty()) {
-            results.add("leader", liveNodes.iterator().next());
+          if (!liveNodesSet.get().isEmpty()) {
+            results.add("leader", liveNodesSet.get().iterator().next());
           }
           results.add("overseer_queue_size", 0);
           results.add("overseer_work_queue_size", 0);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9e1c2490/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/SimClusterStateProvider.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/SimClusterStateProvider.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/SimClusterStateProvider.java
index d51c059..e63e8ba 100644
--- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/SimClusterStateProvider.java
+++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/SimClusterStateProvider.java
@@ -38,10 +38,13 @@ import java.util.concurrent.atomic.AtomicLong;
 import java.util.concurrent.atomic.AtomicReference;
 import java.util.concurrent.locks.ReentrantLock;
 
+import org.apache.solr.client.solrj.cloud.autoscaling.AlreadyExistsException;
+import org.apache.solr.client.solrj.cloud.autoscaling.AutoScalingConfig;
 import org.apache.solr.client.solrj.cloud.autoscaling.DistribStateManager;
 import org.apache.solr.client.solrj.cloud.autoscaling.PolicyHelper;
 import org.apache.solr.client.solrj.cloud.autoscaling.ReplicaInfo;
 import org.apache.solr.client.solrj.cloud.autoscaling.Suggestion;
+import org.apache.solr.client.solrj.cloud.autoscaling.TriggerEventType;
 import org.apache.solr.client.solrj.cloud.autoscaling.VersionedData;
 import org.apache.solr.client.solrj.impl.ClusterStateProvider;
 import org.apache.solr.cloud.ActionThrottle;
@@ -67,6 +70,7 @@ import org.apache.solr.common.params.CommonAdminParams;
 import org.apache.solr.common.params.CoreAdminParams;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.Utils;
+import org.apache.zookeeper.CreateMode;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -82,8 +86,8 @@ public class SimClusterStateProvider implements ClusterStateProvider {
   private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   private final Map<String, List<ReplicaInfo>> nodeReplicaMap = new ConcurrentHashMap<>();
-  private final Set<String> liveNodes;
-  private final DistribStateManager stateManager;
+  private final LiveNodesSet liveNodes;
+  private final SimDistribStateManager stateManager;
   private final SimCloudManager cloudManager;
 
   private final Map<String, Object> clusterProperties = new ConcurrentHashMap<>();
@@ -109,10 +113,13 @@ public class SimClusterStateProvider implements ClusterStateProvider {
    * The instance needs to be initialized using the <code>sim*</code> methods in order
    * to ensure proper behavior, otherwise it will behave as a cluster with zero replicas.
    */
-  public SimClusterStateProvider(Set<String> liveNodes, SimCloudManager cloudManager) {
+  public SimClusterStateProvider(LiveNodesSet liveNodes, SimCloudManager cloudManager) throws Exception {
     this.liveNodes = liveNodes;
+    for (String nodeId : liveNodes.get()) {
+      createEphemeralLiveNode(nodeId);
+    }
     this.cloudManager = cloudManager;
-    this.stateManager = cloudManager.getDistribStateManager();
+    this.stateManager = cloudManager.getSimDistribStateManager();
     this.leaderThrottle = new ActionThrottle("leader", 5000, cloudManager.getTimeSource());
     // names are CollectionAction names, delays are in ms (simulated time)
     defaultOpDelays.put(CollectionParams.CollectionAction.MOVEREPLICA.name(), 5000L);
@@ -138,7 +145,18 @@ public class SimClusterStateProvider implements ClusterStateProvider {
       sliceProperties.clear();
       nodeReplicaMap.clear();
       liveNodes.clear();
+      for (String nodeId : stateManager.listData(ZkStateReader.LIVE_NODES_ZKNODE)) {
+        if (stateManager.hasData(ZkStateReader.LIVE_NODES_ZKNODE + "/" + nodeId)) {
+          stateManager.removeData(ZkStateReader.LIVE_NODES_ZKNODE + "/" + nodeId, -1);
+        }
+        if (stateManager.hasData(ZkStateReader.SOLR_AUTOSCALING_NODE_ADDED_PATH + "/" + nodeId)) {
+          stateManager.removeData(ZkStateReader.SOLR_AUTOSCALING_NODE_ADDED_PATH + "/" + nodeId, -1);
+        }
+      }
       liveNodes.addAll(initialState.getLiveNodes());
+      for (String nodeId : liveNodes.get()) {
+        createEphemeralLiveNode(nodeId);
+      }
       initialState.forEachCollection(dc -> {
         collProperties.computeIfAbsent(dc.getName(), name -> new ConcurrentHashMap<>()).putAll(dc.getProperties());
         opDelays.computeIfAbsent(dc.getName(), c -> new HashMap<>()).putAll(defaultOpDelays);
@@ -147,7 +165,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
               .computeIfAbsent(s.getName(), name -> new HashMap<>()).putAll(s.getProperties());
           s.getReplicas().forEach(r -> {
             ReplicaInfo ri = new ReplicaInfo(r.getName(), r.getCoreName(), dc.getName(), s.getName(), r.getType(), r.getNodeName(), r.getProperties());
-            if (liveNodes.contains(r.getNodeName())) {
+            if (liveNodes.get().contains(r.getNodeName())) {
               nodeReplicaMap.computeIfAbsent(r.getNodeName(), rn -> new ArrayList<>()).add(ri);
             }
           });
@@ -175,11 +193,10 @@ public class SimClusterStateProvider implements ClusterStateProvider {
     if (liveNodes.isEmpty()) {
       return null;
     }
-    List<String> nodes = new ArrayList<>(liveNodes);
+    List<String> nodes = new ArrayList<>(liveNodes.get());
     return nodes.get(random.nextInt(nodes.size()));
   }
 
-  // todo: maybe hook up DistribStateManager /live_nodes ?
   // todo: maybe hook up DistribStateManager /clusterstate.json watchers?
 
   /**
@@ -191,6 +208,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
       throw new Exception("Node " + nodeId + " already exists");
     }
     liveNodes.add(nodeId);
+    createEphemeralLiveNode(nodeId);
     nodeReplicaMap.putIfAbsent(nodeId, new ArrayList<>());
   }
 
@@ -218,7 +236,6 @@ public class SimClusterStateProvider implements ClusterStateProvider {
     }
   }
 
-  // todo: maybe hook up DistribStateManager /live_nodes ?
   // todo: maybe hook up DistribStateManager /clusterstate.json watchers?
 
   /**
@@ -234,6 +251,13 @@ public class SimClusterStateProvider implements ClusterStateProvider {
       // mark every replica on that node as down
       setReplicaStates(nodeId, Replica.State.DOWN, collections);
       boolean res = liveNodes.remove(nodeId);
+      // remove ephemeral nodes
+      stateManager.getRoot().removeEphemeralChildren(nodeId);
+      // create a nodeLost marker if needed
+      AutoScalingConfig cfg = stateManager.getAutoScalingConfig(null);
+      if (cfg.hasTriggerForEvents(TriggerEventType.NODELOST)) {
+        stateManager.makePath(ZkStateReader.SOLR_AUTOSCALING_NODE_LOST_PATH + "/" + nodeId);
+      }
       if (!collections.isEmpty()) {
         cloudManager.submit(new LeaderElection(collections, true));
       }
@@ -254,8 +278,19 @@ public class SimClusterStateProvider implements ClusterStateProvider {
     }
   }
 
+  // this method needs to be called under a lock
+  private void createEphemeralLiveNode(String nodeId) throws Exception {
+    DistribStateManager mgr = stateManager.withEphemeralId(nodeId);
+    mgr.makePath(ZkStateReader.LIVE_NODES_ZKNODE + "/" + nodeId, null, CreateMode.EPHEMERAL, true);
+    AutoScalingConfig cfg = stateManager.getAutoScalingConfig(null);
+    if (cfg.hasTriggerForEvents(TriggerEventType.NODEADDED)) {
+      mgr.makePath(ZkStateReader.SOLR_AUTOSCALING_NODE_ADDED_PATH + "/" + nodeId, null, CreateMode.EPHEMERAL, true);
+    }
+  }
+
   public boolean simRestoreNode(String nodeId) throws Exception {
     liveNodes.add(nodeId);
+    createEphemeralLiveNode(nodeId);
     Set<String> collections = new HashSet<>();
     lock.lock();
     try {
@@ -307,8 +342,6 @@ public class SimClusterStateProvider implements ClusterStateProvider {
     simAddReplica(message.getStr(CoreAdminParams.NODE), ri, true);
   }
 
-  // todo: maybe hook up DistribStateManager /clusterstate.json watchers?
-
   /**
    * Add a replica. Note that all details of the replica must be present here, including
    * node, coreNodeName and SolrCore name.
@@ -376,8 +409,6 @@ public class SimClusterStateProvider implements ClusterStateProvider {
     }
   }
 
-  // todo: maybe hook up DistribStateManager /clusterstate.json watchers?
-
   /**
    * Remove replica.
    * @param nodeId node id
@@ -485,7 +516,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
               if (ri.getVariables().remove(ZkStateReader.LEADER_PROP) != null) {
                 stateChanged.set(true);
               }
-              if (r.isActive(liveNodes)) {
+              if (r.isActive(liveNodes.get())) {
                 active.add(ri);
               } else { // if it's on a node that is not live mark it down
                 if (!liveNodes.contains(r.getNodeName())) {
@@ -1055,10 +1086,15 @@ public class SimClusterStateProvider implements ClusterStateProvider {
   /**
    * Return all replica infos for a node.
    * @param node node id
-   * @return list of replicas on that node
+   * @return list of replicas on that node, or empty list if none
    */
   public List<ReplicaInfo> simGetReplicaInfos(String node) {
-    return nodeReplicaMap.get(node);
+    List<ReplicaInfo> replicas = nodeReplicaMap.get(node);
+    if (replicas == null) {
+      return Collections.emptyList();
+    } else {
+      return replicas;
+    }
   }
 
   /**
@@ -1091,7 +1127,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
 
   @Override
   public Set<String> getLiveNodes() {
-    return liveNodes;
+    return liveNodes.get();
   }
 
   @Override
@@ -1101,7 +1137,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
 
   @Override
   public ClusterState getClusterState() throws IOException {
-    return new ClusterState(0, liveNodes, getCollectionStates());
+    return new ClusterState(0, liveNodes.get(), getCollectionStates());
   }
 
   private Map<String, DocCollection> getCollectionStates() {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9e1c2490/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/SimDistribStateManager.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/SimDistribStateManager.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/SimDistribStateManager.java
index a570e6d..12b38e7 100644
--- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/SimDistribStateManager.java
+++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/SimDistribStateManager.java
@@ -239,6 +239,23 @@ public class SimDistribStateManager implements DistribStateManager {
     this.errorRef.set(actionError);
   }
 
+  private SimDistribStateManager(String id, ExecutorService watchersPool, Node root, ActionThrottle actionThrottle,
+                                 ActionError actionError) {
+    this.id = id;
+    this.watchersPool = watchersPool;
+    this.root = root;
+    this.throttleRef.set(actionThrottle);
+    this.errorRef.set(actionError);
+  }
+
+  public SimDistribStateManager withEphemeralId(String id) {
+    return new SimDistribStateManager(id, watchersPool, root, throttleRef.get(), errorRef.get());
+  }
+
+  public Node getRoot() {
+    return root;
+  }
+
   public void clear() {
     root.clear();
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9e1c2490/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/SimNodeStateProvider.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/SimNodeStateProvider.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/SimNodeStateProvider.java
index 65fb5b9..a9b3b5b 100644
--- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/SimNodeStateProvider.java
+++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/SimNodeStateProvider.java
@@ -47,12 +47,12 @@ public class SimNodeStateProvider implements NodeStateProvider {
   private final Map<String, Map<String, Object>> nodeValues = new ConcurrentHashMap<>();
   private final SimClusterStateProvider clusterStateProvider;
   private final SimDistribStateManager stateManager;
-  private final Set<String> liveNodes;
+  private final LiveNodesSet liveNodesSet;
 
-  public SimNodeStateProvider(Set<String> liveNodes, SimDistribStateManager stateManager,
+  public SimNodeStateProvider(LiveNodesSet liveNodesSet, SimDistribStateManager stateManager,
                               SimClusterStateProvider clusterStateProvider,
                               Map<String, Map<String, Object>> nodeValues) {
-    this.liveNodes = liveNodes;
+    this.liveNodesSet = liveNodesSet;
     this.stateManager = stateManager;
     this.clusterStateProvider = clusterStateProvider;
     if (nodeValues != null) {
@@ -226,7 +226,7 @@ public class SimNodeStateProvider implements NodeStateProvider {
   @Override
   public Map<String, Object> getNodeValues(String node, Collection<String> tags) {
     LOG.trace("-- requested values for " + node + ": " + tags);
-    if (!liveNodes.contains(node)) {
+    if (!liveNodesSet.contains(node)) {
       nodeValues.remove(node);
       return Collections.emptyMap();
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9e1c2490/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestClusterStateProvider.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestClusterStateProvider.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestClusterStateProvider.java
index 38dd7dc..cb3bb4c 100644
--- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestClusterStateProvider.java
+++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestClusterStateProvider.java
@@ -169,15 +169,30 @@ public class TestClusterStateProvider extends SolrCloudTestCase {
   @Test
   public void testAddRemoveNode() throws Exception {
     Set<String> lastNodes = new HashSet<>(cloudManager.getClusterStateProvider().getLiveNodes());
+    List<String> liveNodes = cloudManager.getDistribStateManager().listData(ZkStateReader.LIVE_NODES_ZKNODE);
+    assertEquals(lastNodes.size(), liveNodes.size());
+    liveNodes.removeAll(lastNodes);
+    assertTrue(liveNodes.isEmpty());
+
     String node = addNode();
-    Thread.sleep(2000);
+    cloudManager.getTimeSource().sleep(2000);
     assertFalse(lastNodes.contains(node));
-    assertTrue(cloudManager.getClusterStateProvider().getLiveNodes().contains(node));
+    lastNodes = new HashSet<>(cloudManager.getClusterStateProvider().getLiveNodes());
+    assertTrue(lastNodes.contains(node));
+    liveNodes = cloudManager.getDistribStateManager().listData(ZkStateReader.LIVE_NODES_ZKNODE);
+    assertEquals(lastNodes.size(), liveNodes.size());
+    liveNodes.removeAll(lastNodes);
+    assertTrue(liveNodes.isEmpty());
+
     node = deleteNode();
-    Thread.sleep(2000);
+    cloudManager.getTimeSource().sleep(2000);
     assertTrue(lastNodes.contains(node));
-    assertFalse(cloudManager.getClusterStateProvider().getLiveNodes().contains(node));
-  }
+    lastNodes = new HashSet<>(cloudManager.getClusterStateProvider().getLiveNodes());
+    assertFalse(lastNodes.contains(node));
+    liveNodes = cloudManager.getDistribStateManager().listData(ZkStateReader.LIVE_NODES_ZKNODE);
+    assertEquals(lastNodes.size(), liveNodes.size());
+    liveNodes.removeAll(lastNodes);
+    assertTrue(liveNodes.isEmpty());  }
 
   @Test
   public void testAutoScalingConfig() throws Exception {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9e1c2490/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestNodeAddedTrigger.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestNodeAddedTrigger.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestNodeAddedTrigger.java
new file mode 100644
index 0000000..c1f10d0
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestNodeAddedTrigger.java
@@ -0,0 +1,306 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.cloud.autoscaling.sim;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.atomic.AtomicReference;
+
+import org.apache.solr.cloud.autoscaling.ActionContext;
+import org.apache.solr.cloud.autoscaling.AutoScaling;
+import org.apache.solr.cloud.autoscaling.NodeAddedTrigger;
+import org.apache.solr.cloud.autoscaling.TriggerAction;
+import org.apache.solr.cloud.autoscaling.TriggerEvent;
+import org.apache.solr.common.util.TimeSource;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+/**
+ * Test for {@link NodeAddedTrigger}
+ */
+public class TestNodeAddedTrigger extends SimSolrCloudTestCase {
+  private static AtomicBoolean actionConstructorCalled = new AtomicBoolean(false);
+  private static AtomicBoolean actionInitCalled = new AtomicBoolean(false);
+  private static AtomicBoolean actionCloseCalled = new AtomicBoolean(false);
+
+  private AutoScaling.TriggerEventProcessor noFirstRunProcessor = event -> {
+    fail("Did not expect the listener to fire on first run!");
+    return true;
+  };
+
+  private static int SPEED = 50;
+
+  // currentTimeMillis is not as precise so to avoid false positives while comparing time of fire, we add some delta
+  private static final long WAIT_FOR_DELTA_NANOS = TimeUnit.MILLISECONDS.toNanos(2);
+
+  private static TimeSource timeSource;
+
+  @BeforeClass
+  public static void setupCluster() throws Exception {
+    cluster = SimCloudManager.createCluster(1, TimeSource.get("simTime:" + SPEED));
+    timeSource = cluster.getTimeSource();
+  }
+
+  @Before
+  public void beforeTest() throws Exception {
+    actionConstructorCalled = new AtomicBoolean(false);
+    actionInitCalled = new AtomicBoolean(false);
+    actionCloseCalled = new AtomicBoolean(false);
+  }
+
+  @Test
+  public void testTrigger() throws Exception {
+    long waitForSeconds = 1 + random().nextInt(5);
+    Map<String, Object> props = createTriggerProps(waitForSeconds);
+
+    try (NodeAddedTrigger trigger = new NodeAddedTrigger("node_added_trigger", props, cluster.getLoader(), cluster)) {
+      trigger.setProcessor(noFirstRunProcessor);
+      trigger.run();
+
+      String newNode1 = cluster.simAddNode();
+      String newNode2 = cluster.simAddNode();
+      AtomicBoolean fired = new AtomicBoolean(false);
+      AtomicReference<TriggerEvent> eventRef = new AtomicReference<>();
+      trigger.setProcessor(event -> {
+        if (fired.compareAndSet(false, true)) {
+          eventRef.set(event);
+          long currentTimeNanos = timeSource.getTime();
+          long eventTimeNanos = event.getEventTime();
+          long waitForNanos = TimeUnit.NANOSECONDS.convert(waitForSeconds, TimeUnit.SECONDS) - WAIT_FOR_DELTA_NANOS;
+          if (currentTimeNanos - eventTimeNanos <= waitForNanos) {
+            fail("NodeAddedListener was fired before the configured waitFor period: currentTimeNanos=" + currentTimeNanos + ", eventTimeNanos=" +  eventTimeNanos + ",waitForNanos=" + waitForNanos);
+          }
+        } else {
+          fail("NodeAddedTrigger was fired more than once!");
+        }
+        return true;
+      });
+      int counter = 0;
+      do {
+        trigger.run();
+        timeSource.sleep(1000);
+        if (counter++ > 10) {
+          fail("Newly added node was not discovered by trigger even after 10 seconds");
+        }
+      } while (!fired.get());
+
+      TriggerEvent nodeAddedEvent = eventRef.get();
+      assertNotNull(nodeAddedEvent);
+      List<String> nodeNames = (List<String>)nodeAddedEvent.getProperty(TriggerEvent.NODE_NAMES);
+      assertTrue(nodeNames.contains(newNode1));
+      assertTrue(nodeNames.contains(newNode2));
+    }
+
+    // add a new node but remove it before the waitFor period expires
+    // and assert that the trigger doesn't fire at all
+    try (NodeAddedTrigger trigger = new NodeAddedTrigger("node_added_trigger", props, cluster.getLoader(), cluster)) {
+      final long waitTime = 2;
+      props.put("waitFor", waitTime);
+      trigger.setProcessor(noFirstRunProcessor);
+      trigger.run();
+
+      String newNode = cluster.simAddNode();
+      AtomicBoolean fired = new AtomicBoolean(false);
+      trigger.setProcessor(event -> {
+        if (fired.compareAndSet(false, true)) {
+          long currentTimeNanos = timeSource.getTime();
+          long eventTimeNanos = event.getEventTime();
+          long waitForNanos = TimeUnit.NANOSECONDS.convert(waitForSeconds, TimeUnit.SECONDS) - WAIT_FOR_DELTA_NANOS;
+          if (currentTimeNanos - eventTimeNanos <= waitForNanos) {
+            fail("NodeAddedListener was fired before the configured waitFor period: currentTimeNanos=" + currentTimeNanos + ", eventTimeNanos=" +  eventTimeNanos + ",waitForNanos=" + waitForNanos);
+          }
+        } else {
+          fail("NodeAddedTrigger was fired more than once!");
+        }
+        return true;
+      });
+      trigger.run(); // first run should detect the new node
+      cluster.simRemoveNode(newNode, true);
+      int counter = 0;
+      do {
+        trigger.run();
+        timeSource.sleep(1000);
+        if (counter++ > waitTime + 1) { // run it a little more than the wait time
+          break;
+        }
+      } while (true);
+
+      // ensure the event was not fired
+      assertFalse(fired.get());
+    }
+  }
+
+  public void testActionLifecycle() throws Exception {
+    Map<String, Object> props = createTriggerProps(0);
+    List<Map<String, String>> actions = (List<Map<String, String>>) props.get("actions");
+    Map<String, String> action = new HashMap<>(2);
+    action.put("name", "testActionInit");
+    action.put("class", TestNodeAddedTrigger.AssertInitTriggerAction.class.getName());
+    actions.add(action);
+    try (NodeAddedTrigger trigger = new NodeAddedTrigger("node_added_trigger", props, cluster.getLoader(), cluster)) {
+      assertEquals(true, actionConstructorCalled.get());
+      assertEquals(false, actionInitCalled.get());
+      assertEquals(false, actionCloseCalled.get());
+      trigger.init();
+      assertEquals(true, actionInitCalled.get());
+      assertEquals(false, actionCloseCalled.get());
+    }
+    assertEquals(true, actionCloseCalled.get());
+  }
+
+  public static class AssertInitTriggerAction implements TriggerAction  {
+    public AssertInitTriggerAction() {
+      actionConstructorCalled.set(true);
+    }
+
+    @Override
+    public String getName() {
+      return "";
+    }
+
+    @Override
+    public void process(TriggerEvent event, ActionContext actionContext) {
+
+    }
+
+    @Override
+    public void close() throws IOException {
+      actionCloseCalled.compareAndSet(false, true);
+    }
+
+    @Override
+    public void init(Map<String, String> args) {
+      actionInitCalled.compareAndSet(false, true);
+    }
+  }
+
+  @Test
+  public void testListenerAcceptance() throws Exception {
+    Map<String, Object> props = createTriggerProps(0);
+    try (NodeAddedTrigger trigger = new NodeAddedTrigger("node_added_trigger", props, cluster.getLoader(), cluster)) {
+      trigger.setProcessor(noFirstRunProcessor);
+      trigger.run(); // starts tracking live nodes
+
+      String newNode = cluster.simAddNode();
+      AtomicInteger callCount = new AtomicInteger(0);
+      AtomicBoolean fired = new AtomicBoolean(false);
+
+      trigger.setProcessor(event -> {
+        if (callCount.incrementAndGet() < 2) {
+          return false;
+        } else  {
+          fired.compareAndSet(false, true);
+          return true;
+        }
+      });
+
+      trigger.run(); // first run should detect the new node and fire immediately but listener isn't ready
+      assertEquals(1, callCount.get());
+      assertFalse(fired.get());
+      trigger.run(); // second run should again fire
+      assertEquals(2, callCount.get());
+      assertTrue(fired.get());
+      trigger.run(); // should not fire
+      assertEquals(2, callCount.get());
+    }
+  }
+
+  @Test
+  public void testRestoreState() throws Exception {
+    long waitForSeconds = 1 + random().nextInt(5);
+    Map<String, Object> props = createTriggerProps(waitForSeconds);
+
+    // add a new node but update the trigger before the waitFor period expires
+    // and assert that the new trigger still fires
+    NodeAddedTrigger trigger = new NodeAddedTrigger("node_added_trigger", props, cluster.getLoader(), cluster);
+    trigger.setProcessor(noFirstRunProcessor);
+    trigger.run();
+
+    String newNode = cluster.simAddNode();
+    trigger.run(); // this run should detect the new node
+    trigger.close(); // close the old trigger
+
+    try (NodeAddedTrigger newTrigger = new NodeAddedTrigger("some_different_name", props, cluster.getLoader(), cluster))  {
+      try {
+        newTrigger.restoreState(trigger);
+        fail("Trigger should only be able to restore state from an old trigger of the same name");
+      } catch (AssertionError e) {
+        // expected
+      }
+    }
+
+    try (NodeAddedTrigger newTrigger = new NodeAddedTrigger("node_added_trigger", props, cluster.getLoader(), cluster))  {
+      AtomicBoolean fired = new AtomicBoolean(false);
+      AtomicReference<TriggerEvent> eventRef = new AtomicReference<>();
+      newTrigger.setProcessor(event -> {
+        if (fired.compareAndSet(false, true)) {
+          eventRef.set(event);
+          long currentTimeNanos = timeSource.getTime();
+          long eventTimeNanos = event.getEventTime();
+          long waitForNanos = TimeUnit.NANOSECONDS.convert(waitForSeconds, TimeUnit.SECONDS) - WAIT_FOR_DELTA_NANOS;
+          if (currentTimeNanos - eventTimeNanos <= waitForNanos) {
+            fail("NodeAddedListener was fired before the configured waitFor period: currentTimeNanos=" + currentTimeNanos + ", eventTimeNanos=" +  eventTimeNanos + ",waitForNanos=" + waitForNanos);
+          }
+        } else {
+          fail("NodeAddedTrigger was fired more than once!");
+        }
+        return true;
+      });
+      newTrigger.restoreState(trigger); // restore state from the old trigger
+      int counter = 0;
+      do {
+        newTrigger.run();
+        timeSource.sleep(1000);
+        if (counter++ > 10) {
+          fail("Newly added node was not discovered by trigger even after 10 seconds");
+        }
+      } while (!fired.get());
+
+      // ensure the event was fired
+      assertTrue(fired.get());
+      TriggerEvent nodeAddedEvent = eventRef.get();
+      assertNotNull(nodeAddedEvent);
+      //TODO assertEquals("", newNode.getNodeName(), nodeAddedEvent.getProperty(NodeAddedTrigger.NodeAddedEvent.NODE_NAME));
+    }
+  }
+
+  private Map<String, Object> createTriggerProps(long waitForSeconds) {
+    Map<String, Object> props = new HashMap<>();
+    props.put("event", "nodeLost");
+    props.put("waitFor", waitForSeconds);
+    props.put("enabled", true);
+    List<Map<String, String>> actions = new ArrayList<>(3);
+    Map<String, String> map = new HashMap<>(2);
+    map.put("name", "compute_plan");
+    map.put("class", "solr.ComputePlanAction");
+    actions.add(map);
+    map = new HashMap<>(2);
+    map.put("name", "execute_plan");
+    map.put("class", "solr.ExecutePlanAction");
+    actions.add(map);
+    props.put("actions", actions);
+    return props;
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9e1c2490/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestNodeLostTrigger.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestNodeLostTrigger.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestNodeLostTrigger.java
new file mode 100644
index 0000000..18ee355
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestNodeLostTrigger.java
@@ -0,0 +1,334 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.cloud.autoscaling.sim;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.atomic.AtomicReference;
+
+import org.apache.solr.client.solrj.embedded.JettySolrRunner;
+import org.apache.solr.cloud.SolrCloudTestCase;
+import org.apache.solr.cloud.autoscaling.ActionContext;
+import org.apache.solr.cloud.autoscaling.AutoScaling;
+import org.apache.solr.cloud.autoscaling.NodeLostTrigger;
+import org.apache.solr.cloud.autoscaling.TriggerAction;
+import org.apache.solr.cloud.autoscaling.TriggerEvent;
+import org.apache.solr.common.util.TimeSource;
+import org.apache.solr.core.CoreContainer;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+/**
+ * Test for {@link NodeLostTrigger}
+ */
+public class TestNodeLostTrigger extends SimSolrCloudTestCase {
+  private static AtomicBoolean actionConstructorCalled = new AtomicBoolean(false);
+  private static AtomicBoolean actionInitCalled = new AtomicBoolean(false);
+  private static AtomicBoolean actionCloseCalled = new AtomicBoolean(false);
+
+  private AutoScaling.TriggerEventProcessor noFirstRunProcessor = event -> {
+    fail("Did not expect the listener to fire on first run!");
+    return true;
+  };
+
+  private static final int SPEED = 50;
+  // use the same time source as the trigger
+  private static TimeSource timeSource;
+  // currentTimeMillis is not as precise so to avoid false positives while comparing time of fire, we add some delta
+  private static final long WAIT_FOR_DELTA_NANOS = TimeUnit.MILLISECONDS.toNanos(5);
+
+  @BeforeClass
+  public static void setupCluster() throws Exception {
+    cluster = SimCloudManager.createCluster(5, TimeSource.get("simTime:" + SPEED));
+    timeSource = cluster.getTimeSource();
+  }
+
+  @Before
+  public void beforeTest() throws Exception {
+    actionConstructorCalled = new AtomicBoolean(false);
+    actionInitCalled = new AtomicBoolean(false);
+    actionCloseCalled = new AtomicBoolean(false);
+  }
+
+  @Test
+  public void testTrigger() throws Exception {
+    long waitForSeconds = 1 + random().nextInt(5);
+    Map<String, Object> props = createTriggerProps(waitForSeconds);
+
+    try (NodeLostTrigger trigger = new NodeLostTrigger("node_lost_trigger", props, cluster.getLoader(), cluster)) {
+      trigger.setProcessor(noFirstRunProcessor);
+      trigger.run();
+      Iterator<String> it = cluster.getLiveNodesSet().get().iterator();
+      String lostNodeName1 = it.next();
+      String lostNodeName2 = it.next();
+      cluster.simRemoveNode(lostNodeName1, true);
+      cluster.simRemoveNode(lostNodeName2, true);
+      timeSource.sleep(1000);
+
+      AtomicBoolean fired = new AtomicBoolean(false);
+      AtomicReference<TriggerEvent> eventRef = new AtomicReference<>();
+      trigger.setProcessor(event -> {
+        if (fired.compareAndSet(false, true)) {
+          eventRef.set(event);
+          long currentTimeNanos = timeSource.getTime();
+          long eventTimeNanos = event.getEventTime();
+          long waitForNanos = TimeUnit.NANOSECONDS.convert(waitForSeconds, TimeUnit.SECONDS) - WAIT_FOR_DELTA_NANOS;
+          if (currentTimeNanos - eventTimeNanos <= waitForNanos) {
+            fail("NodeLostListener was fired before the configured waitFor period: currentTimeNanos=" + currentTimeNanos + ", eventTimeNanos=" +  eventTimeNanos + ",waitForNanos=" + waitForNanos);
+          }
+        } else {
+          fail("NodeLostListener was fired more than once!");
+        }
+        return true;
+      });
+      int counter = 0;
+      do {
+        trigger.run();
+        timeSource.sleep(1000);
+        if (counter++ > 10) {
+          fail("Lost node was not discovered by trigger even after 10 seconds");
+        }
+      } while (!fired.get());
+
+      TriggerEvent nodeLostEvent = eventRef.get();
+      assertNotNull(nodeLostEvent);
+      List<String> nodeNames = (List<String>)nodeLostEvent.getProperty(TriggerEvent.NODE_NAMES);
+      assertTrue(nodeNames + " doesn't contain " + lostNodeName1, nodeNames.contains(lostNodeName1));
+      assertTrue(nodeNames + " doesn't contain " + lostNodeName2, nodeNames.contains(lostNodeName2));
+
+    }
+
+    // remove a node but add it back before the waitFor period expires
+    // and assert that the trigger doesn't fire at all
+    try (NodeLostTrigger trigger = new NodeLostTrigger("node_lost_trigger", props, cluster.getLoader(), cluster)) {
+      final long waitTime = 2;
+      props.put("waitFor", waitTime);
+      trigger.setProcessor(noFirstRunProcessor);
+      trigger.run();
+
+      String lostNode = cluster.getSimClusterStateProvider().simGetRandomNode(random());
+      cluster.simRemoveNode(lostNode, false);
+      AtomicBoolean fired = new AtomicBoolean(false);
+      trigger.setProcessor(event -> {
+        if (fired.compareAndSet(false, true)) {
+          long currentTimeNanos = timeSource.getTime();
+          long eventTimeNanos = event.getEventTime();
+          long waitForNanos = TimeUnit.NANOSECONDS.convert(waitTime, TimeUnit.SECONDS) - WAIT_FOR_DELTA_NANOS;
+          if (currentTimeNanos - eventTimeNanos <= waitForNanos) {
+            fail("NodeLostListener was fired before the configured waitFor period: currentTimeNanos=" + currentTimeNanos + ", eventTimeNanos=" +  eventTimeNanos + ",waitForNanos=" + waitForNanos);
+          }
+        } else {
+          fail("NodeLostListener was fired more than once!");
+        }
+        return true;
+      });
+      trigger.run(); // first run should detect the lost node
+      int counter = 0;
+      do {
+        if (cluster.getLiveNodesSet().get().size() == 2) {
+          break;
+        }
+        timeSource.sleep(100);
+        if (counter++ > 20) {
+          fail("Live nodes not updated!");
+        }
+      } while (true);
+      counter = 0;
+      cluster.getSimClusterStateProvider().simRestoreNode(lostNode);
+      do {
+        trigger.run();
+        timeSource.sleep(1000);
+        if (counter++ > waitTime + 1) { // run it a little more than the wait time
+          break;
+        }
+      } while (true);
+
+      // ensure the event was not fired
+      assertFalse(fired.get());
+    }
+  }
+
+  public void testActionLifecycle() throws Exception {
+    Map<String, Object> props = createTriggerProps(0);
+    List<Map<String, String>> actions = (List<Map<String, String>>) props.get("actions");
+    Map<String, String> action = new HashMap<>(2);
+    action.put("name", "testActionInit");
+    action.put("class", AssertInitTriggerAction.class.getName());
+    actions.add(action);
+    try (NodeLostTrigger trigger = new NodeLostTrigger("node_added_trigger", props, cluster.getLoader(), cluster)) {
+      assertEquals(true, actionConstructorCalled.get());
+      assertEquals(false, actionInitCalled.get());
+      assertEquals(false, actionCloseCalled.get());
+      trigger.init();
+      assertEquals(true, actionInitCalled.get());
+      assertEquals(false, actionCloseCalled.get());
+    }
+    assertEquals(true, actionCloseCalled.get());
+  }
+
+  public static class AssertInitTriggerAction implements TriggerAction  {
+    public AssertInitTriggerAction() {
+      actionConstructorCalled.set(true);
+    }
+
+    @Override
+    public String getName() {
+      return "";
+    }
+
+    @Override
+    public void process(TriggerEvent event, ActionContext actionContext) {
+
+    }
+
+    @Override
+    public void close() throws IOException {
+      actionCloseCalled.compareAndSet(false, true);
+    }
+
+    @Override
+    public void init(Map<String, String> args) {
+      actionInitCalled.compareAndSet(false, true);
+    }
+  }
+
+  @Test
+  public void testListenerAcceptance() throws Exception {
+    Map<String, Object> props = createTriggerProps(0);
+    try (NodeLostTrigger trigger = new NodeLostTrigger("node_added_trigger", props, cluster.getLoader(), cluster)) {
+      trigger.setProcessor(noFirstRunProcessor);
+
+      String newNode = cluster.simAddNode();
+
+      trigger.run(); // starts tracking live nodes
+
+      // stop the newly created node
+      cluster.simRemoveNode(newNode, true);
+
+      AtomicInteger callCount = new AtomicInteger(0);
+      AtomicBoolean fired = new AtomicBoolean(false);
+
+      trigger.setProcessor(event -> {
+        if (callCount.incrementAndGet() < 2) {
+          return false;
+        } else  {
+          fired.compareAndSet(false, true);
+          return true;
+        }
+      });
+
+      trigger.run(); // first run should detect the lost node and fire immediately but listener isn't ready
+      assertEquals(1, callCount.get());
+      assertFalse(fired.get());
+      trigger.run(); // second run should again fire
+      assertEquals(2, callCount.get());
+      assertTrue(fired.get());
+      trigger.run(); // should not fire
+      assertEquals(2, callCount.get());
+    }
+  }
+
+  @Test
+  public void testRestoreState() throws Exception {
+    long waitForSeconds = 1 + random().nextInt(5);
+    Map<String, Object> props = createTriggerProps(waitForSeconds);
+
+    String newNode = cluster.simAddNode();
+
+    // remove a node but update the trigger before the waitFor period expires
+    // and assert that the new trigger still fires
+
+    NodeLostTrigger trigger = new NodeLostTrigger("node_lost_trigger", props, cluster.getLoader(), cluster);
+    trigger.setProcessor(noFirstRunProcessor);
+    trigger.run();
+
+    // stop the newly created node
+    cluster.simRemoveNode(newNode, true);
+
+    trigger.run(); // this run should detect the lost node
+    trigger.close(); // close the old trigger
+
+    try (NodeLostTrigger newTrigger = new NodeLostTrigger("some_different_name", props, cluster.getLoader(), cluster))  {
+      try {
+        newTrigger.restoreState(trigger);
+        fail("Trigger should only be able to restore state from an old trigger of the same name");
+      } catch (AssertionError e) {
+        // expected
+      }
+    }
+
+    try (NodeLostTrigger newTrigger = new NodeLostTrigger("node_lost_trigger", props, cluster.getLoader(), cluster)) {
+      AtomicBoolean fired = new AtomicBoolean(false);
+      AtomicReference<TriggerEvent> eventRef = new AtomicReference<>();
+      newTrigger.setProcessor(event -> {
+        if (fired.compareAndSet(false, true)) {
+          eventRef.set(event);
+          long currentTimeNanos = timeSource.getTime();
+          long eventTimeNanos = event.getEventTime();
+          long waitForNanos = TimeUnit.NANOSECONDS.convert(waitForSeconds, TimeUnit.SECONDS) - WAIT_FOR_DELTA_NANOS;
+          if (currentTimeNanos - eventTimeNanos <= waitForNanos) {
+            fail("NodeLostListener was fired before the configured waitFor period: currentTimeNanos=" + currentTimeNanos + ", eventTimeNanos=" + eventTimeNanos + ",waitForNanos=" + waitForNanos);
+          }
+        } else {
+          fail("NodeLostListener was fired more than once!");
+        }
+        return true;
+      });
+      newTrigger.restoreState(trigger); // restore state from the old trigger
+      int counter = 0;
+      do {
+        newTrigger.run();
+        timeSource.sleep(1000);
+        if (counter++ > 10) {
+          fail("Lost node was not discovered by trigger even after 10 seconds");
+        }
+      } while (!fired.get());
+
+      TriggerEvent nodeLostEvent = eventRef.get();
+      assertNotNull(nodeLostEvent);
+      List<String> nodeNames = (List<String>)nodeLostEvent.getProperty(TriggerEvent.NODE_NAMES);
+      assertTrue(nodeNames.contains(newNode));
+    }
+  }
+
+  private Map<String, Object> createTriggerProps(long waitForSeconds) {
+    Map<String, Object> props = new HashMap<>();
+    props.put("event", "nodeLost");
+    props.put("waitFor", waitForSeconds);
+    props.put("enabled", true);
+    List<Map<String, String>> actions = new ArrayList<>(3);
+    Map<String, String> map = new HashMap<>(2);
+    map.put("name", "compute_plan");
+    map.put("class", "solr.ComputePlanAction");
+    actions.add(map);
+    map = new HashMap<>(2);
+    map.put("name", "execute_plan");
+    map.put("class", "solr.ExecutePlanAction");
+    actions.add(map);
+    props.put("actions", actions);
+    return props;
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9e1c2490/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestTriggerIntegration.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestTriggerIntegration.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestTriggerIntegration.java
index 179af76..e05b789 100644
--- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestTriggerIntegration.java
+++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestTriggerIntegration.java
@@ -20,9 +20,11 @@ package org.apache.solr.cloud.autoscaling.sim;
 import java.lang.invoke.MethodHandles;
 import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
+import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
+import java.util.SortedSet;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.TimeUnit;
@@ -37,14 +39,17 @@ import org.apache.solr.client.solrj.cloud.autoscaling.AutoScalingConfig;
 import org.apache.solr.client.solrj.cloud.autoscaling.ReplicaInfo;
 import org.apache.solr.client.solrj.cloud.autoscaling.SolrCloudManager;
 import org.apache.solr.client.solrj.cloud.autoscaling.TriggerEventProcessorStage;
+import org.apache.solr.client.solrj.cloud.autoscaling.TriggerEventType;
 import org.apache.solr.client.solrj.request.CollectionAdminRequest;
 import org.apache.solr.cloud.autoscaling.ActionContext;
 import org.apache.solr.cloud.autoscaling.NodeLostTrigger;
 import org.apache.solr.cloud.autoscaling.ScheduledTriggers;
 import org.apache.solr.cloud.autoscaling.TriggerActionBase;
 import org.apache.solr.cloud.autoscaling.TriggerEvent;
+import org.apache.solr.cloud.autoscaling.TriggerEventQueue;
 import org.apache.solr.cloud.autoscaling.TriggerListenerBase;
 import org.apache.solr.cloud.autoscaling.CapturedEvent;
+import org.apache.solr.common.cloud.LiveNodesListener;
 import org.apache.solr.common.cloud.ZkStateReader;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.TimeSource;
@@ -602,12 +607,10 @@ public class TestTriggerIntegration extends SimSolrCloudTestCase {
 
   public static long eventQueueActionWait = 5000;
 
-  // simulation framework doesn't support overseer
-  /*
   @Test
   public void testEventQueue() throws Exception {
     waitForSeconds = 1;
-    CloudSolrClient solrClient = cluster.getSolrClient();
+    SolrClient solrClient = cluster.simGetSolrClient();
     String setTriggerCommand = "{" +
         "'set-trigger' : {" +
         "'name' : 'node_added_trigger1'," +
@@ -616,56 +619,49 @@ public class TestTriggerIntegration extends SimSolrCloudTestCase {
         "'enabled' : true," +
         "'actions' : [{'name':'test','class':'" + TestEventQueueAction.class.getName() + "'}]" +
         "}}";
-    NamedList<Object> overSeerStatus = cluster.getSolrClient().request(CollectionAdminRequest.getOverseerStatus());
-    String overseerLeader = (String) overSeerStatus.get("leader");
-    int overseerLeaderIndex = 0;
-    for (int i = 0; i < cluster.getJettySolrRunners().size(); i++) {
-      JettySolrRunner jetty = cluster.getJettySolrRunner(i);
-      if (jetty.getNodeName().equals(overseerLeader)) {
-        overseerLeaderIndex = i;
-        break;
-      }
-    }
+
+    String overseerLeader = cluster.getSimClusterStateProvider().simGetRandomNode(random());
+
     SolrRequest req = createAutoScalingRequest(SolrRequest.METHOD.POST, setTriggerCommand);
     NamedList<Object> response = solrClient.request(req);
     assertEquals(response.get("result").toString(), "success");
 
-    if (!actionInitCalled.await(3, TimeUnit.SECONDS))  {
+    if (!actionInitCalled.await(3000 / SPEED, TimeUnit.MILLISECONDS))  {
       fail("The TriggerAction should have been created by now");
     }
 
     // add node to generate the event
-    JettySolrRunner newNode = cluster.startJettySolrRunner();
-    boolean await = actionStarted.await(60, TimeUnit.SECONDS);
+    String newNode = cluster.simAddNode();
+    boolean await = actionStarted.await(60000 / SPEED, TimeUnit.MILLISECONDS);
     assertTrue("action did not start", await);
     // event should be there
-    NodeAddedTrigger.NodeAddedEvent nodeAddedEvent = (NodeAddedTrigger.NodeAddedEvent) events.iterator().next();
+    TriggerEvent nodeAddedEvent = events.iterator().next();
     assertNotNull(nodeAddedEvent);
     // but action did not complete yet so the event is still enqueued
     assertFalse(triggerFired.get());
     events.clear();
     actionStarted = new CountDownLatch(1);
     eventQueueActionWait = 1;
-    // kill overseer leader
-    cluster.stopJettySolrRunner(overseerLeaderIndex);
-    Thread.sleep(5000);
+    // kill overseer
+    cluster.simRestartOverseer(overseerLeader);
+    cluster.getTimeSource().sleep(5000);
     // new overseer leader should be elected and run triggers
-    await = actionInterrupted.await(3, TimeUnit.SECONDS);
+    await = actionInterrupted.await(3000 / SPEED, TimeUnit.MILLISECONDS);
     assertTrue("action wasn't interrupted", await);
     // it should fire again from enqueued event
-    await = actionStarted.await(60, TimeUnit.SECONDS);
+    await = actionStarted.await(60000 / SPEED, TimeUnit.MILLISECONDS);
     assertTrue("action wasn't started", await);
     TriggerEvent replayedEvent = events.iterator().next();
     assertTrue(replayedEvent.getProperty(TriggerEventQueue.ENQUEUE_TIME) != null);
     assertTrue(events + "\n" + replayedEvent.toString(), replayedEvent.getProperty(TriggerEventQueue.DEQUEUE_TIME) != null);
-    await = actionCompleted.await(10, TimeUnit.SECONDS);
+    await = actionCompleted.await(10000 / SPEED, TimeUnit.MILLISECONDS);
     assertTrue("action wasn't completed", await);
     assertTrue(triggerFired.get());
   }
 
   @Test
   public void testEventFromRestoredState() throws Exception {
-    CloudSolrClient solrClient = cluster.getSolrClient();
+    SolrClient solrClient = cluster.simGetSolrClient();
     String setTriggerCommand = "{" +
         "'set-trigger' : {" +
         "'name' : 'node_added_trigger'," +
@@ -678,45 +674,33 @@ public class TestTriggerIntegration extends SimSolrCloudTestCase {
     NamedList<Object> response = solrClient.request(req);
     assertEquals(response.get("result").toString(), "success");
 
-    if (!actionInitCalled.await(10, TimeUnit.SECONDS))  {
+    if (!actionInitCalled.await(10000 / SPEED, TimeUnit.MILLISECONDS))  {
       fail("The TriggerAction should have been created by now");
     }
 
-    NamedList<Object> overSeerStatus = cluster.getSolrClient().request(CollectionAdminRequest.getOverseerStatus());
-    String overseerLeader = (String) overSeerStatus.get("leader");
-    int overseerLeaderIndex = 0;
-    for (int i = 0; i < cluster.getJettySolrRunners().size(); i++) {
-      JettySolrRunner jetty = cluster.getJettySolrRunner(i);
-      if (jetty.getNodeName().equals(overseerLeader)) {
-        overseerLeaderIndex = i;
-        break;
-      }
-    }
-
     events.clear();
 
-    JettySolrRunner newNode = cluster.startJettySolrRunner();
-    boolean await = triggerFiredLatch.await(20, TimeUnit.SECONDS);
+    String newNode = cluster.simAddNode();
+    boolean await = triggerFiredLatch.await(20000 / SPEED, TimeUnit.MILLISECONDS);
     assertTrue("The trigger did not fire at all", await);
     assertTrue(triggerFired.get());
     // reset
     triggerFired.set(false);
     triggerFiredLatch = new CountDownLatch(1);
-    NodeAddedTrigger.NodeAddedEvent nodeAddedEvent = (NodeAddedTrigger.NodeAddedEvent) events.iterator().next();
+    TriggerEvent nodeAddedEvent = events.iterator().next();
     assertNotNull(nodeAddedEvent);
     List<String> nodeNames = (List<String>)nodeAddedEvent.getProperty(TriggerEvent.NODE_NAMES);
-    assertTrue(nodeNames.contains(newNode.getNodeName()));
+    assertTrue(nodeNames.contains(newNode));
     // add a second node - state of the trigger will change but it won't fire for waitFor sec.
-    JettySolrRunner newNode2 = cluster.startJettySolrRunner();
-    Thread.sleep(10000);
-    // kill overseer leader
-    cluster.stopJettySolrRunner(overseerLeaderIndex);
-    await = triggerFiredLatch.await(20, TimeUnit.SECONDS);
+    String newNode2 = cluster.simAddNode();
+    cluster.getTimeSource().sleep(10000);
+    // kill overseer
+    cluster.simRestartOverseer(null);
+    await = triggerFiredLatch.await(20000 / SPEED, TimeUnit.MILLISECONDS);
     assertTrue("The trigger did not fire at all", await);
     assertTrue(triggerFired.get());
   }
 
-
   private static class TestLiveNodesListener implements LiveNodesListener {
     Set<String> lostNodes = new HashSet<>();
     Set<String> addedNodes = new HashSet<>();
@@ -745,7 +729,7 @@ public class TestTriggerIntegration extends SimSolrCloudTestCase {
 
   private TestLiveNodesListener registerLiveNodesListener() {
     TestLiveNodesListener listener = new TestLiveNodesListener();
-    zkStateReader.registerLiveNodesListener(listener);
+    cluster.getLiveNodesSet().registerLiveNodesListener(listener);
     return listener;
   }
 
@@ -789,47 +773,42 @@ public class TestTriggerIntegration extends SimSolrCloudTestCase {
     triggerFiredLatch = new CountDownLatch(2);
     TestLiveNodesListener listener = registerLiveNodesListener();
 
-    NamedList<Object> overSeerStatus = cluster.getSolrClient().request(CollectionAdminRequest.getOverseerStatus());
-    String overseerLeader = (String) overSeerStatus.get("leader");
-    int overseerLeaderIndex = 0;
-    for (int i = 0; i < cluster.getJettySolrRunners().size(); i++) {
-      JettySolrRunner jetty = cluster.getJettySolrRunner(i);
-      if (jetty.getNodeName().equals(overseerLeader)) {
-        overseerLeaderIndex = i;
-        break;
-      }
-    }
+    SolrClient solrClient = cluster.simGetSolrClient();
+
+    // pick overseer node
+    String overseerLeader = cluster.getSimClusterStateProvider().simGetRandomNode(random());
+
     // add a node
-    JettySolrRunner node = cluster.startJettySolrRunner();
-    if (!listener.onChangeLatch.await(10, TimeUnit.SECONDS)) {
+    String node = cluster.simAddNode();
+    if (!listener.onChangeLatch.await(10000 / SPEED, TimeUnit.MILLISECONDS)) {
       fail("onChange listener didn't execute on cluster change");
     }
     assertEquals(1, listener.addedNodes.size());
-    assertEquals(node.getNodeName(), listener.addedNodes.iterator().next());
+    assertEquals(node, listener.addedNodes.iterator().next());
     // verify that a znode doesn't exist (no trigger)
-    String pathAdded = ZkStateReader.SOLR_AUTOSCALING_NODE_ADDED_PATH + "/" + node.getNodeName();
-    assertFalse("Path " + pathAdded + " was created but there are no nodeAdded triggers", zkClient().exists(pathAdded, true));
+    String pathAdded = ZkStateReader.SOLR_AUTOSCALING_NODE_ADDED_PATH + "/" + node;
+    assertFalse("Path " + pathAdded + " was created but there are no nodeAdded triggers",
+        cluster.getDistribStateManager().hasData(pathAdded));
     listener.reset();
     // stop overseer
     log.info("====== KILL OVERSEER 1");
-    cluster.stopJettySolrRunner(overseerLeaderIndex);
-    if (!listener.onChangeLatch.await(10, TimeUnit.SECONDS)) {
+    cluster.simRestartOverseer(overseerLeader);
+    if (!listener.onChangeLatch.await(10000 / SPEED, TimeUnit.MILLISECONDS)) {
       fail("onChange listener didn't execute on cluster change");
     }
     assertEquals(1, listener.lostNodes.size());
     assertEquals(overseerLeader, listener.lostNodes.iterator().next());
     assertEquals(0, listener.addedNodes.size());
     // wait until the new overseer is up
-    Thread.sleep(5000);
+    cluster.getTimeSource().sleep(5000);
     // verify that a znode does NOT exist - there's no nodeLost trigger,
     // so the new overseer cleaned up existing nodeLost markers
     String pathLost = ZkStateReader.SOLR_AUTOSCALING_NODE_LOST_PATH + "/" + overseerLeader;
-    assertFalse("Path " + pathLost + " exists", zkClient().exists(pathLost, true));
+    assertFalse("Path " + pathLost + " exists", cluster.getDistribStateManager().hasData(pathLost));
 
     listener.reset();
 
     // set up triggers
-    CloudSolrClient solrClient = cluster.getSolrClient();
 
     log.info("====== ADD TRIGGERS");
     String setTriggerCommand = "{" +
@@ -856,45 +835,37 @@ public class TestTriggerIntegration extends SimSolrCloudTestCase {
     response = solrClient.request(req);
     assertEquals(response.get("result").toString(), "success");
 
-    overSeerStatus = cluster.getSolrClient().request(CollectionAdminRequest.getOverseerStatus());
-    overseerLeader = (String) overSeerStatus.get("leader");
-    overseerLeaderIndex = 0;
-    for (int i = 0; i < cluster.getJettySolrRunners().size(); i++) {
-      JettySolrRunner jetty = cluster.getJettySolrRunner(i);
-      if (jetty.getNodeName().equals(overseerLeader)) {
-        overseerLeaderIndex = i;
-        break;
-      }
-    }
+    overseerLeader = cluster.getSimClusterStateProvider().simGetRandomNode(random());
 
     // create another node
     log.info("====== ADD NODE 1");
-    JettySolrRunner node1 = cluster.startJettySolrRunner();
-    if (!listener.onChangeLatch.await(10, TimeUnit.SECONDS)) {
+    String node1 = cluster.simAddNode();
+    if (!listener.onChangeLatch.await(10000 / SPEED, TimeUnit.MILLISECONDS)) {
       fail("onChange listener didn't execute on cluster change");
     }
     assertEquals(1, listener.addedNodes.size());
-    assertEquals(node1.getNodeName(), listener.addedNodes.iterator().next());
+    assertEquals(node1, listener.addedNodes.iterator().next());
     // verify that a znode exists
-    pathAdded = ZkStateReader.SOLR_AUTOSCALING_NODE_ADDED_PATH + "/" + node1.getNodeName();
-    assertTrue("Path " + pathAdded + " wasn't created", zkClient().exists(pathAdded, true));
+    pathAdded = ZkStateReader.SOLR_AUTOSCALING_NODE_ADDED_PATH + "/" + node1;
+    assertTrue("Path " + pathAdded + " wasn't created", cluster.getDistribStateManager().hasData(pathAdded));
 
-    Thread.sleep(5000);
+    cluster.getTimeSource().sleep(5000);
     // nodeAdded marker should be consumed now by nodeAdded trigger
-    assertFalse("Path " + pathAdded + " should have been deleted", zkClient().exists(pathAdded, true));
+    assertFalse("Path " + pathAdded + " should have been deleted",
+        cluster.getDistribStateManager().hasData(pathAdded));
 
     listener.reset();
     events.clear();
     triggerFiredLatch = new CountDownLatch(1);
     // kill overseer again
     log.info("====== KILL OVERSEER 2");
-    cluster.stopJettySolrRunner(overseerLeaderIndex);
-    if (!listener.onChangeLatch.await(10, TimeUnit.SECONDS)) {
+    cluster.simRestartOverseer(overseerLeader);
+    if (!listener.onChangeLatch.await(10000 / SPEED, TimeUnit.MILLISECONDS)) {
       fail("onChange listener didn't execute on cluster change");
     }
 
 
-    if (!triggerFiredLatch.await(20, TimeUnit.SECONDS)) {
+    if (!triggerFiredLatch.await(20000 / SPEED, TimeUnit.MILLISECONDS)) {
       fail("Trigger should have fired by now");
     }
     assertEquals(1, events.size());
@@ -904,7 +875,6 @@ public class TestTriggerIntegration extends SimSolrCloudTestCase {
     assertEquals(TriggerEventType.NODELOST, ev.getEventType());
   }
 
-*/
   static Map<String, List<CapturedEvent>> listenerEvents = new ConcurrentHashMap<>();
   static CountDownLatch listenerCreated = new CountDownLatch(1);
   static boolean failDummyAction = false;


[19/21] lucene-solr:jira/solr-11285-sim: Improve this test.

Posted by ab...@apache.org.
Improve this test.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/8ec50f9a
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/8ec50f9a
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/8ec50f9a

Branch: refs/heads/jira/solr-11285-sim
Commit: 8ec50f9a3ac87873b9fb9e12230e391f35ef5a13
Parents: e4fed21
Author: Andrzej Bialecki <ab...@apache.org>
Authored: Mon Dec 11 14:49:44 2017 +0100
Committer: Andrzej Bialecki <ab...@apache.org>
Committed: Mon Dec 11 14:49:44 2017 +0100

----------------------------------------------------------------------
 .../cloud/autoscaling/sim/TestLargeCluster.java  | 19 +++++++++++++------
 1 file changed, 13 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ec50f9a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestLargeCluster.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestLargeCluster.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestLargeCluster.java
index 7caac42..5bd0d97 100644
--- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestLargeCluster.java
+++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestLargeCluster.java
@@ -43,6 +43,7 @@ import org.apache.solr.cloud.autoscaling.TriggerActionBase;
 import org.apache.solr.cloud.autoscaling.TriggerEvent;
 import org.apache.solr.cloud.autoscaling.TriggerListenerBase;
 import org.apache.solr.cloud.autoscaling.CapturedEvent;
+import org.apache.solr.common.cloud.Replica;
 import org.apache.solr.common.cloud.ZkStateReader;
 import org.apache.solr.common.params.CollectionParams;
 import org.apache.solr.common.util.NamedList;
@@ -161,7 +162,7 @@ public class TestLargeCluster extends SimSolrCloudTestCase {
 
     // pick a few random nodes
     List<String> nodes = new ArrayList<>();
-    int limit = 30;
+    int limit = 75;
     for (String node : cluster.getClusterStateProvider().getLiveNodes()) {
       nodes.add(node);
       if (nodes.size() > limit) {
@@ -171,12 +172,12 @@ public class TestLargeCluster extends SimSolrCloudTestCase {
     Collections.shuffle(nodes, random());
     String collectionName = "testBasic";
     CollectionAdminRequest.Create create = CollectionAdminRequest.createCollection(collectionName,
-        "conf", 2, 5, 5, 5);
+        "conf", 5, 5, 5, 5);
     create.setMaxShardsPerNode(1);
     create.setCreateNodeSet(String.join(",", nodes));
     create.process(solrClient);
 
-    log.info("Ready after " + waitForState(collectionName, 30 * nodes.size(), TimeUnit.SECONDS, clusterShape(2, 15)) + "ms");
+    log.info("Ready after " + waitForState(collectionName, 30 * nodes.size(), TimeUnit.SECONDS, clusterShape(5, 15)) + "ms");
 
     int KILL_NODES = 8;
     // kill off a number of nodes
@@ -184,15 +185,18 @@ public class TestLargeCluster extends SimSolrCloudTestCase {
       cluster.simRemoveNode(nodes.get(i), false);
     }
 
-    log.info("Ready after " + waitForState(collectionName, 90 * KILL_NODES, TimeUnit.SECONDS, clusterShape(2, 15)) + "ms");
+    log.info("Ready after " + waitForState(collectionName, 90 * KILL_NODES, TimeUnit.SECONDS, clusterShape(5, 15)) + "ms");
 
     log.info("OP COUNTS: " + cluster.simGetOpCounts());
     long moveReplicaOps = cluster.simGetOpCount(CollectionParams.CollectionAction.MOVEREPLICA.name());
 
     // simulate a number of flaky nodes
     int FLAKY_NODES = 10;
+    int flakyReplicas = 0;
     for (int cnt = 0; cnt < 10; cnt++) {
       for (int i = KILL_NODES; i < KILL_NODES + FLAKY_NODES; i++) {
+        flakyReplicas += cluster.getSimClusterStateProvider().simGetReplicaInfos(nodes.get(i))
+            .stream().filter(r -> r.getState().equals(Replica.State.ACTIVE)).count();
         cluster.simRemoveNode(nodes.get(i), false);
       }
       cluster.getTimeSource().sleep(TimeUnit.SECONDS.toMillis(waitForSeconds) * 2);
@@ -202,10 +206,13 @@ public class TestLargeCluster extends SimSolrCloudTestCase {
       }
     }
 
-    log.info("Ready after " + waitForState(collectionName, 30 * nodes.size(), TimeUnit.SECONDS, clusterShape(2, 15)) + "ms");
+    log.info("Ready after " + waitForState(collectionName, 30 * nodes.size(), TimeUnit.SECONDS, clusterShape(5, 15)) + "ms");
     log.info("OP COUNTS: " + cluster.simGetOpCounts());
     long newMoveReplicaOps = cluster.simGetOpCount(CollectionParams.CollectionAction.MOVEREPLICA.name());
-    log.info("==== Additional MOVEREPLICA count: " + (newMoveReplicaOps - moveReplicaOps));
+    log.info("==== Flaky replicas: {}. Additional MOVEREPLICA count: {}", flakyReplicas, (newMoveReplicaOps - moveReplicaOps));
+    assertTrue("there should be new MOVERPLICA ops", newMoveReplicaOps - moveReplicaOps > 0);
+    assertTrue("there should be less than flakyReplicas=" + flakyReplicas + " MOVEREPLICA ops",
+        newMoveReplicaOps - moveReplicaOps < flakyReplicas);
   }
 
   @Test


[17/21] lucene-solr:jira/solr-11285-sim: LUCENE-8089: Add PlanetModel method that returns true if planetmodel is a sphere.

Posted by ab...@apache.org.
LUCENE-8089: Add PlanetModel method that returns true if planetmodel is a sphere.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/952f4c4e
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/952f4c4e
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/952f4c4e

Branch: refs/heads/jira/solr-11285-sim
Commit: 952f4c4e59fff389a143aaccf6ac724103667915
Parents: ca84ca2
Author: Karl Wright <Da...@gmail.com>
Authored: Mon Dec 11 04:21:52 2017 -0500
Committer: Karl Wright <Da...@gmail.com>
Committed: Mon Dec 11 04:21:52 2017 -0500

----------------------------------------------------------------------
 .../java/org/apache/lucene/spatial3d/geom/PlanetModel.java    | 7 +++++++
 1 file changed, 7 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/952f4c4e/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/PlanetModel.java
----------------------------------------------------------------------
diff --git a/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/PlanetModel.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/PlanetModel.java
index 2aabfc1..55b730d 100644
--- a/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/PlanetModel.java
+++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/PlanetModel.java
@@ -122,6 +122,13 @@ public class PlanetModel implements SerializableObject {
     SerializableObject.writeDouble(outputStream, c);
   }
   
+  /** Does this planet model describe a sphere?
+   *@return true if so.
+   */
+  public boolean isSphere() {
+    return this.ab == this.c;
+  }
+  
   /** Find the minimum magnitude of all points on the ellipsoid.
    * @return the minimum magnitude for the planet.
    */


[15/21] lucene-solr:jira/solr-11285-sim: LUCENE-8088: Fix for random shape generator; committed on behalf of Ignacio Vera.

Posted by ab...@apache.org.
LUCENE-8088: Fix for random shape generator; committed on behalf of Ignacio Vera.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/a948adc3
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/a948adc3
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/a948adc3

Branch: refs/heads/jira/solr-11285-sim
Commit: a948adc390ef3ba977aeefc917a55974660d6152
Parents: dcb8470
Author: Karl Wright <Da...@gmail.com>
Authored: Sun Dec 10 06:37:10 2017 -0500
Committer: Karl Wright <Da...@gmail.com>
Committed: Sun Dec 10 06:37:10 2017 -0500

----------------------------------------------------------------------
 .../apache/lucene/spatial3d/geom/RandomGeo3dShapeGenerator.java    | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/a948adc3/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/RandomGeo3dShapeGenerator.java
----------------------------------------------------------------------
diff --git a/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/RandomGeo3dShapeGenerator.java b/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/RandomGeo3dShapeGenerator.java
index 4ff973a..cf3713c 100644
--- a/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/RandomGeo3dShapeGenerator.java
+++ b/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/RandomGeo3dShapeGenerator.java
@@ -755,7 +755,7 @@ public class RandomGeo3dShapeGenerator extends LuceneTestCase {
         int vertexCount = random().nextInt(14) + 3;
         List<GeoPoint> geoPoints = points(vertexCount,planetModel, constraints);
         if (geoPoints.size() < 3){
-          continue;
+          break;
         }
         orderPoints(geoPoints);
         polDescription.add(new GeoPolygonFactory.PolygonDescription(geoPoints));


[02/21] lucene-solr:jira/solr-11285-sim: Fix TestTopFieldCollectorEarlyTermination to ensure it has at least 1 none deleted document in the index

Posted by ab...@apache.org.
Fix TestTopFieldCollectorEarlyTermination to ensure it has at least 1 none deleted document in the index


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/a3141457
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/a3141457
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/a3141457

Branch: refs/heads/jira/solr-11285-sim
Commit: a3141457d6ea1fe4ee5cdcecc94b66ce57d66931
Parents: 68d16c2
Author: Simon Willnauer <si...@apache.org>
Authored: Thu Dec 7 15:47:28 2017 +0100
Committer: Simon Willnauer <si...@apache.org>
Committed: Thu Dec 7 15:47:28 2017 +0100

----------------------------------------------------------------------
 .../lucene/search/TestTopFieldCollectorEarlyTermination.java    | 5 ++---
 1 file changed, 2 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/a3141457/lucene/core/src/test/org/apache/lucene/search/TestTopFieldCollectorEarlyTermination.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestTopFieldCollectorEarlyTermination.java b/lucene/core/src/test/org/apache/lucene/search/TestTopFieldCollectorEarlyTermination.java
index 4b891de..df02cb3 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestTopFieldCollectorEarlyTermination.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestTopFieldCollectorEarlyTermination.java
@@ -96,7 +96,7 @@ public class TestTopFieldCollectorEarlyTermination extends LuceneTestCase {
       iw.forceMerge(FORCE_MERGE_MAX_SEGMENT_COUNT);
     }
     reader = iw.getReader();
-    if (reader.maxDoc() == 0) {
+    if (reader.numDocs() == 0) {
       iw.addDocument(new Document());
       reader.close();
       reader = iw.getReader();
@@ -130,7 +130,7 @@ public class TestTopFieldCollectorEarlyTermination extends LuceneTestCase {
         final int numHits = TestUtil.nextInt(random(), 1, numDocs);
         FieldDoc after;
         if (paging) {
-          assert searcher.getIndexReader().maxDoc() > 0;
+          assert searcher.getIndexReader().numDocs() > 0;
           TopFieldDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
           after = (FieldDoc) td.scoreDocs[td.scoreDocs.length - 1];
         } else {
@@ -209,5 +209,4 @@ public class TestTopFieldCollectorEarlyTermination extends LuceneTestCase {
       assertEquals(scoreDoc1.score, scoreDoc2.score, 0f);
     }
   }
-
 }


[04/21] lucene-solr:jira/solr-11285-sim: SOLR-11691: Bug: V2 requests for create-alias didn't work when the collections param was an array.

Posted by ab...@apache.org.
SOLR-11691: Bug: V2 requests for create-alias didn't work when the collections param was an array.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/5448274f
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/5448274f
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/5448274f

Branch: refs/heads/jira/solr-11285-sim
Commit: 5448274f26191a9882aa5c3020e3cbdcbf93551c
Parents: ede46fe
Author: David Smiley <ds...@apache.org>
Authored: Thu Dec 7 10:55:50 2017 -0500
Committer: David Smiley <ds...@apache.org>
Committed: Thu Dec 7 10:55:50 2017 -0500

----------------------------------------------------------------------
 solr/CHANGES.txt                                |  2 ++
 .../org/apache/solr/cloud/CreateAliasCmd.java   | 36 ++++++++++++++------
 .../apache/solr/cloud/AliasIntegrationTest.java | 10 ++++--
 3 files changed, 35 insertions(+), 13 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5448274f/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 80d8557..148b069 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -228,6 +228,8 @@ Bug Fixes
 * SOLR-11664: JSON Facet API: range facets containing unique, hll, min, max aggregations over string fields
   produced incorrect results since 7.0 (Volodymyr Rudniev, yonik)
 
+* SOLR-11691: V2 requests for create-alias didn't work when the collections param was an array.
+  (Jason Gerlowski, Gus Heck, David Smiley, noble)
 
 Optimizations
 ----------------------

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5448274f/solr/core/src/java/org/apache/solr/cloud/CreateAliasCmd.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/CreateAliasCmd.java b/solr/core/src/java/org/apache/solr/cloud/CreateAliasCmd.java
index 62b65f3..e10d53e 100644
--- a/solr/core/src/java/org/apache/solr/cloud/CreateAliasCmd.java
+++ b/solr/core/src/java/org/apache/solr/cloud/CreateAliasCmd.java
@@ -21,6 +21,7 @@ import java.util.HashSet;
 import java.util.List;
 import java.util.Locale;
 import java.util.Set;
+import java.util.stream.Collectors;
 
 import org.apache.solr.cloud.OverseerCollectionMessageHandler.Cmd;
 import org.apache.solr.common.SolrException;
@@ -43,13 +44,14 @@ public class CreateAliasCmd implements Cmd {
   @Override
   public void call(ClusterState state, ZkNodeProps message, NamedList results)
       throws Exception {
-    String aliasName = message.getStr(NAME);
-    String collections = message.getStr("collections"); // could be comma delimited list
+    final String aliasName = message.getStr(NAME);
+    final List<String> canonicalCollectionList = parseCollectionsParameter(message.get("collections"));
+    final String canonicalCollectionsString = StrUtils.join(canonicalCollectionList, ',');
 
     ZkStateReader zkStateReader = ocmh.zkStateReader;
-    validateAllCollectionsExistAndNoDups(collections, zkStateReader);
+    validateAllCollectionsExistAndNoDups(canonicalCollectionList, zkStateReader);
 
-    zkStateReader.aliasesHolder.applyModificationAndExportToZk(aliases -> aliases.cloneWithCollectionAlias(aliasName, collections));
+    zkStateReader.aliasesHolder.applyModificationAndExportToZk(aliases -> aliases.cloneWithCollectionAlias(aliasName, canonicalCollectionsString));
 
     // Sleep a bit to allow ZooKeeper state propagation.
     //
@@ -66,20 +68,34 @@ public class CreateAliasCmd implements Cmd {
     Thread.sleep(100);
   }
 
-  private void validateAllCollectionsExistAndNoDups(String collections, ZkStateReader zkStateReader) {
-    List<String> collectionArr = StrUtils.splitSmart(collections, ",", true);
-    if (new HashSet<>(collectionArr).size() != collectionArr.size()) {
+  private void validateAllCollectionsExistAndNoDups(List<String> collectionList, ZkStateReader zkStateReader) {
+    final String collectionStr = StrUtils.join(collectionList, ',');
+
+    if (new HashSet<>(collectionList).size() != collectionList.size()) {
       throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
-          String.format(Locale.ROOT,  "Can't create collection alias for collections='%s', since it contains duplicates", collections));
+          String.format(Locale.ROOT,  "Can't create collection alias for collections='%s', since it contains duplicates", collectionStr));
     }
     ClusterState clusterState = zkStateReader.getClusterState();
     Set<String> aliasNames = zkStateReader.getAliases().getCollectionAliasListMap().keySet();
-    for (String collection : collectionArr) {
+    for (String collection : collectionList) {
       if (clusterState.getCollectionOrNull(collection) == null && !aliasNames.contains(collection)) {
         throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
-            String.format(Locale.ROOT,  "Can't create collection alias for collections='%s', '%s' is not an existing collection or alias", collections, collection));
+            String.format(Locale.ROOT,  "Can't create collection alias for collections='%s', '%s' is not an existing collection or alias", collectionStr, collection));
       }
     }
   }
+  
+  /**
+   * The v2 API directs that the 'collections' parameter be provided as a JSON array (e.g. ["a", "b"]).  We also
+   * maintain support for the legacy format, a comma-separated list (e.g. a,b).
+   */
+  @SuppressWarnings("unchecked")
+  private List<String> parseCollectionsParameter(Object colls) {
+    if (colls == null) throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "missing collections param");
+    if (colls instanceof List) return (List<String>) colls;
+    return StrUtils.splitSmart(colls.toString(), ",", true).stream()
+        .map(String::trim)
+        .collect(Collectors.toList());
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5448274f/solr/core/src/test/org/apache/solr/cloud/AliasIntegrationTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/AliasIntegrationTest.java b/solr/core/src/test/org/apache/solr/cloud/AliasIntegrationTest.java
index 25a850d..d0f0e80 100644
--- a/solr/core/src/test/org/apache/solr/cloud/AliasIntegrationTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/AliasIntegrationTest.java
@@ -23,12 +23,14 @@ import java.util.function.Consumer;
 import java.util.function.UnaryOperator;
 
 import org.apache.solr.client.solrj.SolrQuery;
+import org.apache.solr.client.solrj.SolrRequest;
 import org.apache.solr.client.solrj.SolrServerException;
 import org.apache.solr.client.solrj.embedded.JettySolrRunner;
 import org.apache.solr.client.solrj.impl.CloudSolrClient;
 import org.apache.solr.client.solrj.impl.HttpSolrClient;
 import org.apache.solr.client.solrj.request.CollectionAdminRequest;
 import org.apache.solr.client.solrj.request.UpdateRequest;
+import org.apache.solr.client.solrj.request.V2Request;
 import org.apache.solr.client.solrj.response.QueryResponse;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.cloud.Aliases;
@@ -177,7 +179,6 @@ public class AliasIntegrationTest extends SolrCloudTestCase {
   
   @Test
   public void test() throws Exception {
-
     CollectionAdminRequest.createCollection("collection1", "conf", 2, 1).process(cluster.getSolrClient());
     CollectionAdminRequest.createCollection("collection2", "conf", 1, 1).process(cluster.getSolrClient());
     waitForState("Expected collection1 to be created with 2 shards and 1 replica", "collection1", clusterShape(2, 1));
@@ -240,7 +241,11 @@ public class AliasIntegrationTest extends SolrCloudTestCase {
     //searchSeveralWays("testalias4,testalias5", new SolrQuery("*:*"), 3);
 
     ///////////////
-    CollectionAdminRequest.createAlias("testalias6", "collection2,collection1").process(cluster.getSolrClient());
+    // use v2 API
+    new V2Request.Builder("/collections")
+        .withMethod(SolrRequest.METHOD.POST)
+        .withPayload("{\"create-alias\": {\"name\": \"testalias6\", collections:[\"collection2\",\"collection1\"]}}")
+        .build().process(cluster.getSolrClient());
 
     searchSeveralWays("testalias6", new SolrQuery("*:*"), 6);
 
@@ -328,7 +333,6 @@ public class AliasIntegrationTest extends SolrCloudTestCase {
   }
 
   public void testErrorChecks() throws Exception {
-
     CollectionAdminRequest.createCollection("testErrorChecks-collection", "conf", 2, 1).process(cluster.getSolrClient());
     waitForState("Expected testErrorChecks-collection to be created with 2 shards and 1 replica", "testErrorChecks-collection", clusterShape(2, 1));
     


[11/21] lucene-solr:jira/solr-11285-sim: LUCENE-8080: Detect when we are constructing a bogus exact circle, and throw an IllegalArgumentException in that case. Committed on behalf of Ignacio Vera.

Posted by ab...@apache.org.
LUCENE-8080: Detect when we are constructing a bogus exact circle, and throw an IllegalArgumentException in that case.  Committed on behalf of Ignacio Vera.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/18f12fdb
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/18f12fdb
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/18f12fdb

Branch: refs/heads/jira/solr-11285-sim
Commit: 18f12fdb2853814bea5f61e5b020b80d6bcebec6
Parents: cd30dab
Author: Karl Wright <Da...@gmail.com>
Authored: Fri Dec 8 05:41:11 2017 -0500
Committer: Karl Wright <Da...@gmail.com>
Committed: Fri Dec 8 05:41:11 2017 -0500

----------------------------------------------------------------------
 .../lucene/spatial3d/geom/GeoExactCircle.java   | 15 +++++----
 .../spatial3d/geom/GeoExactCircleTest.java      | 34 +++++++++++++++-----
 2 files changed, 34 insertions(+), 15 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/18f12fdb/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoExactCircle.java
----------------------------------------------------------------------
diff --git a/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoExactCircle.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoExactCircle.java
index 5726e60..876b24f 100644
--- a/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoExactCircle.java
+++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoExactCircle.java
@@ -77,17 +77,16 @@ class GeoExactCircle extends GeoBaseCircle {
     final GeoPoint eastPoint = planetModel.surfacePointOnBearing(center, cutoffAngle, Math.PI * 0.5);
     final GeoPoint westPoint = planetModel.surfacePointOnBearing(center, cutoffAngle, Math.PI * 1.5);
 
-    final boolean mustSplit = cutoffAngle > Math.PI * 0.5;
     final GeoPoint edgePoint;
     if (planetModel.c > planetModel.ab) {
       // z can be greater than x or y, so ellipse is longer in height than width
-      slices.add(new ApproximationSlice(center, eastPoint, Math.PI * 0.5, westPoint, Math.PI * -0.5, northPoint, 0.0, mustSplit));
-      slices.add(new ApproximationSlice(center, westPoint, Math.PI * 1.5, eastPoint, Math.PI * 0.5, southPoint, Math.PI, mustSplit));
+      slices.add(new ApproximationSlice(center, eastPoint, Math.PI * 0.5, westPoint, Math.PI * -0.5, northPoint, 0.0, true));
+      slices.add(new ApproximationSlice(center, westPoint, Math.PI * 1.5, eastPoint, Math.PI * 0.5, southPoint, Math.PI, true));
       edgePoint = eastPoint;
     } else {
       // z will be less than x or y, so ellipse is shorter than it is tall
-      slices.add(new ApproximationSlice(center, northPoint, 0.0, southPoint, Math.PI, eastPoint, Math.PI * 0.5, mustSplit));
-      slices.add(new ApproximationSlice(center, southPoint, Math.PI, northPoint, Math.PI * 2.0, westPoint, Math.PI * 1.5, mustSplit));
+      slices.add(new ApproximationSlice(center, northPoint, 0.0, southPoint, Math.PI, eastPoint, Math.PI * 0.5, true));
+      slices.add(new ApproximationSlice(center, southPoint, Math.PI, northPoint, Math.PI * 2.0, westPoint, Math.PI * 1.5, true));
       edgePoint = northPoint;
     }
     //System.out.println("Edgepoint = " + edgePoint);
@@ -283,8 +282,10 @@ class GeoExactCircle extends GeoBaseCircle {
       if (this.plane == null) {
         throw new IllegalArgumentException("Either circle is too small or accuracy is too high; could not construct a plane with endPoint1="+endPoint1+" bearing "+point1Bearing+", endPoint2="+endPoint2+" bearing "+point2Bearing+", middle="+middlePoint+" bearing "+middlePointBearing);
       }
-      if (plane.isWithin(center) == false || !plane.evaluateIsZero(endPoint1) || !plane.evaluateIsZero(endPoint2) || !plane.evaluateIsZero(middlePoint))
-        throw new IllegalStateException("SidedPlane constructor built a bad plane!!");
+      if (this.plane.isWithin(-center.x, -center.y, -center.z)) {
+        //Plane is bogus, we cannot build the circle
+        throw new IllegalArgumentException("Could not construct a valid plane for this planet model with endPoint1="+endPoint1+" bearing "+point1Bearing+", endPoint2="+endPoint2+" bearing "+point2Bearing+", middle="+middlePoint+" bearing "+middlePointBearing);
+      }
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/18f12fdb/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/GeoExactCircleTest.java
----------------------------------------------------------------------
diff --git a/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/GeoExactCircleTest.java b/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/GeoExactCircleTest.java
index 2f758f1..6f96214 100644
--- a/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/GeoExactCircleTest.java
+++ b/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/GeoExactCircleTest.java
@@ -54,8 +54,6 @@ public class GeoExactCircleTest extends RandomGeo3dShapeGenerator{
 
   @Test
   public void testSurfacePointOnBearingScale(){
-    double ab = 1.6;
-    double c = 0.7;
     PlanetModel p1 = PlanetModel.WGS84;
     PlanetModel p2 = new PlanetModel(0.5 * PlanetModel.WGS84.ab, 0.5 * PlanetModel.WGS84.c );
     GeoPoint point1P1 = new GeoPoint(p1, 0, 0);
@@ -93,9 +91,18 @@ public class GeoExactCircleTest extends RandomGeo3dShapeGenerator{
   @Test
   @Repeat(iterations = 100)
   public void RandomPointBearingCardinalTest(){
-    double ab = random().nextDouble() * 0.6 + 0.9;
-    double c = random().nextDouble() * 0.6  + 0.9 ;
-    PlanetModel planetModel = new PlanetModel(ab, c);
+    //surface distance calculations methods start not converging when
+    //planet flattening > 0.4
+    PlanetModel planetModel;
+    do {
+      double ab = random().nextDouble() * 2;
+      double c = random().nextDouble() * 2;
+      if (random().nextBoolean()) {
+        planetModel = new PlanetModel(ab, c);
+      } else {
+        planetModel = new PlanetModel(c, ab);
+      }
+    } while (Math.abs(planetModel.flattening) > 0.4);
     GeoPoint center = randomGeoPoint(planetModel);
     double radius =  random().nextDouble() * 0.9 * Math.PI;
     checkBearingPoint(planetModel, center, radius, 0);
@@ -133,7 +140,7 @@ public class GeoExactCircleTest extends RandomGeo3dShapeGenerator{
   public void exactCircleLargeTest(){
     boolean success = true;
     try {
-      GeoCircle circle = GeoCircleFactory.makeExactGeoCircle(new PlanetModel(0.5, 0.7), 0.25 * Math.PI,  0,0.35 * Math.PI, 1e-12);
+      GeoCircle circle = GeoCircleFactory.makeExactGeoCircle(new PlanetModel(0.99, 1.05), 0.25 * Math.PI,  0,0.35 * Math.PI, 1e-12);
     } catch (IllegalArgumentException e) {
       success = false;
     }
@@ -174,8 +181,8 @@ public class GeoExactCircleTest extends RandomGeo3dShapeGenerator{
    */
   @Test
   @Repeat(iterations = 100)
-  public void testRandom_LUCENE8054() {
-    PlanetModel planetModel = PlanetModel.WGS84;
+  public void testRandomLUCENE8054() {
+    PlanetModel planetModel = randomPlanetModel();
     GeoCircle circle1 = (GeoCircle) randomGeoAreaShape(EXACT_CIRCLE, planetModel);
     // new radius, a bit smaller than the generated one!
     double radius = circle1.getRadius() *  (1 - 0.01 * random().nextDouble());
@@ -232,4 +239,15 @@ public class GeoExactCircleTest extends RandomGeo3dShapeGenerator{
     assertTrue(circle1.getRelationship(circle2) != GeoArea.DISJOINT);
   }
 
+  public void testLUCENE8080() {
+    PlanetModel planetModel = new PlanetModel(1.6304230055804751, 1.0199671157571204);
+    boolean fail = false;
+    try {
+      GeoCircle circle = GeoCircleFactory.makeExactGeoCircle(planetModel, 0.8853814403571284, 0.9784990176851283, 0.9071033527030907, 1e-11);
+    } catch (IllegalArgumentException e) {
+      fail = true;
+    }
+    assertTrue(fail);
+  }
+
 }


[05/21] lucene-solr:jira/solr-11285-sim: Remove 7.1.1 section of the changelog.

Posted by ab...@apache.org.
Remove 7.1.1 section of the changelog.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/14713302
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/14713302
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/14713302

Branch: refs/heads/jira/solr-11285-sim
Commit: 14713302c8b9e9335a03edd866def0665b5d82ef
Parents: 5448274
Author: Adrien Grand <jp...@gmail.com>
Authored: Thu Dec 7 20:01:29 2017 +0100
Committer: Adrien Grand <jp...@gmail.com>
Committed: Thu Dec 7 20:01:29 2017 +0100

----------------------------------------------------------------------
 lucene/CHANGES.txt | 19 ++++++++-----------
 1 file changed, 8 insertions(+), 11 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/14713302/lucene/CHANGES.txt
----------------------------------------------------------------------
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index 21da195..e629be0 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -168,27 +168,24 @@ Optimizations
   caching as they could break memory accounting of the query cache.
   (Adrien Grand)
 
-Tests
-
-* LUCENE-8035: Run tests with JDK-specific options: --illegal-access=deny
-  on Java 9+.  (Uwe Schindler)
-
-======================= Lucene 7.1.1 =======================
-
-Bug Fixes
-
-* LUCENE-8055: MemoryIndex.MemoryDocValuesIterator returns 2 documents 
+* LUCENE-8055: MemoryIndex.MemoryDocValuesIterator returns 2 documents
   instead of 1. (Simon Willnauer)
 
 * LUCENE-8043: Fix document accounting in IndexWriter to prevent writing too many
   documents. Once this happens, Lucene refuses to open the index and throws a
-  CorruptIndexException. (Simon Willnauer, Yonik Seeley, Mike McCandless) 
+  CorruptIndexException. (Simon Willnauer, Yonik Seeley, Mike McCandless)
+
+Tests
+
+* LUCENE-8035: Run tests with JDK-specific options: --illegal-access=deny
+  on Java 9+.  (Uwe Schindler)
 
 Build
 
 * LUCENE-6144: Upgrade Ivy to 2.4.0; 'ant ivy-bootstrap' now removes old Ivy
   jars in ~/.ant/lib/.  (Shawn Heisey, Steve Rowe)
 
+
 ======================= Lucene 7.1.0 =======================
 
 Changes in Runtime Behavior


[14/21] lucene-solr:jira/solr-11285-sim: SOLR-11423: fix solr/CHANGES.txt, fixed in 7.2 not 7.1

Posted by ab...@apache.org.
SOLR-11423: fix solr/CHANGES.txt, fixed in 7.2 not 7.1


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/3a7f1071
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/3a7f1071
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/3a7f1071

Branch: refs/heads/jira/solr-11285-sim
Commit: 3a7f1071644ffe11ee74c96cfd4946204b6544b5
Parents: cb14da3
Author: Scott Blum <dr...@apache.org>
Authored: Fri Dec 8 14:25:44 2017 -0500
Committer: Scott Blum <dr...@apache.org>
Committed: Fri Dec 8 14:25:44 2017 -0500

----------------------------------------------------------------------
 solr/CHANGES.txt | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/3a7f1071/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 148b069..1218171 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -160,6 +160,8 @@ New Features
 Bug Fixes
 ----------------------
 
+* SOLR-11423: Overseer queue needs a hard cap (maximum size) that clients respect (Scott Blum, Joshua Humphries, Noble Paul)
+
 * SOLR-11445: Overseer should not hang when process bad message. (Cao Manh Dat, shalin) 
 
 * SOLR-11447: ZkStateWriter should process commands in atomic. (Cao Manh Dat, shalin)
@@ -443,8 +445,6 @@ New Features
 Bug Fixes
 ----------------------
 
-* SOLR-11423: Overseer queue needs a hard cap (maximum size) that clients respect (Scott Blum, Joshua Humphries, Noble Paul)
-
 * SOLR-10602: Triggers should be able to restore state from old instances when taking over. (shalin)
 
 * SOLR-10714: OverseerTriggerThread does not start triggers on overseer start until autoscaling


[03/21] lucene-solr:jira/solr-11285-sim: LUCENE-8081: Allow IndexWriter to opt out of flushing on indexing threads

Posted by ab...@apache.org.
LUCENE-8081: Allow IndexWriter to opt out of flushing on indexing threads

Index/Update Threads try to help out flushing pending document buffers to
disk. This change adds an expert setting to opt ouf of this behavior unless
flusing is falling behind.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/ede46fe6
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/ede46fe6
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/ede46fe6

Branch: refs/heads/jira/solr-11285-sim
Commit: ede46fe6e972811ca49635d07106f177a7d90d30
Parents: a314145
Author: Simon Willnauer <si...@apache.org>
Authored: Wed Dec 6 18:20:48 2017 +0100
Committer: Simon Willnauer <si...@apache.org>
Committed: Thu Dec 7 16:22:52 2017 +0100

----------------------------------------------------------------------
 lucene/CHANGES.txt                              |  7 ++
 .../apache/lucene/index/DocumentsWriter.java    |  5 +-
 .../apache/lucene/index/IndexWriterConfig.java  |  5 ++
 .../lucene/index/LiveIndexWriterConfig.java     | 27 ++++++
 .../apache/lucene/index/TestIndexWriter.java    | 87 ++++++++++++++++++++
 .../lucene/index/TestIndexWriterConfig.java     |  2 +
 .../org/apache/lucene/util/LuceneTestCase.java  |  4 +
 7 files changed, 135 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ede46fe6/lucene/CHANGES.txt
----------------------------------------------------------------------
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index bb84357..21da195 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -59,6 +59,13 @@ API Changes
 * LUCENE-8051: LevensteinDistance renamed to LevenshteinDistance.
   (Pulak Ghosh via Adrien Grand)
 
+Improvements
+
+* LUCENE-8081: Allow IndexWriter to opt out of flushing on indexing threads
+  Index/Update Threads try to help out flushing pending document buffers to
+  disk. This change adds an expert setting to opt ouf of this behavior unless
+  flusing is falling behind. (Simon Willnauer)
+
 ======================= Lucene 7.2.0 =======================
 
 API Changes

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ede46fe6/lucene/core/src/java/org/apache/lucene/index/DocumentsWriter.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/DocumentsWriter.java b/lucene/core/src/java/org/apache/lucene/index/DocumentsWriter.java
index d4e4e23..7ad4feb 100644
--- a/lucene/core/src/java/org/apache/lucene/index/DocumentsWriter.java
+++ b/lucene/core/src/java/org/apache/lucene/index/DocumentsWriter.java
@@ -392,7 +392,8 @@ final class DocumentsWriter implements Closeable, Accountable {
   private boolean preUpdate() throws IOException, AbortingException {
     ensureOpen();
     boolean hasEvents = false;
-    if (flushControl.anyStalledThreads() || flushControl.numQueuedFlushes() > 0) {
+
+    if (flushControl.anyStalledThreads() || (flushControl.numQueuedFlushes() > 0 && config.checkPendingFlushOnUpdate)) {
       // Help out flushing any queued DWPTs so we can un-stall:
       do {
         // Try pick up pending threads here if possible
@@ -412,7 +413,7 @@ final class DocumentsWriter implements Closeable, Accountable {
     hasEvents |= applyAllDeletes(deleteQueue);
     if (flushingDWPT != null) {
       hasEvents |= doFlush(flushingDWPT);
-    } else {
+    } else if (config.checkPendingFlushOnUpdate) {
       final DocumentsWriterPerThread nextPendingFlush = flushControl.nextPendingFlush();
       if (nextPendingFlush != null) {
         hasEvents |= doFlush(nextPendingFlush);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ede46fe6/lucene/core/src/java/org/apache/lucene/index/IndexWriterConfig.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/IndexWriterConfig.java b/lucene/core/src/java/org/apache/lucene/index/IndexWriterConfig.java
index 6e96322..997a686 100644
--- a/lucene/core/src/java/org/apache/lucene/index/IndexWriterConfig.java
+++ b/lucene/core/src/java/org/apache/lucene/index/IndexWriterConfig.java
@@ -479,5 +479,10 @@ public final class IndexWriterConfig extends LiveIndexWriterConfig {
     sb.append("writer=").append(writer.get()).append("\n");
     return sb.toString();
   }
+
+  @Override
+  public IndexWriterConfig setCheckPendingFlushUpdate(boolean checkPendingFlushOnUpdate) {
+    return (IndexWriterConfig) super.setCheckPendingFlushUpdate(checkPendingFlushOnUpdate);
+  }
   
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ede46fe6/lucene/core/src/java/org/apache/lucene/index/LiveIndexWriterConfig.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/LiveIndexWriterConfig.java b/lucene/core/src/java/org/apache/lucene/index/LiveIndexWriterConfig.java
index b67d26b..1be6a73 100644
--- a/lucene/core/src/java/org/apache/lucene/index/LiveIndexWriterConfig.java
+++ b/lucene/core/src/java/org/apache/lucene/index/LiveIndexWriterConfig.java
@@ -103,6 +103,9 @@ public class LiveIndexWriterConfig {
   /** The field names involved in the index sort */
   protected Set<String> indexSortFields = Collections.emptySet();
 
+  /** if an indexing thread should check for pending flushes on update in order to help out on a full flush*/
+  protected volatile boolean checkPendingFlushOnUpdate = true;
+
   // used by IndexWriterConfig
   LiveIndexWriterConfig(Analyzer analyzer) {
     this.analyzer = analyzer;
@@ -426,6 +429,29 @@ public class LiveIndexWriterConfig {
     return indexSortFields;
   }
 
+  /**
+   * Expert: Returns if indexing threads check for pending flushes on update in order
+   * to help our flushing indexing buffers to disk
+   * @lucene.eperimental
+   */
+  public boolean isCheckPendingFlushOnUpdate() {
+    return checkPendingFlushOnUpdate;
+  }
+
+  /**
+   * Expert: sets if indexing threads check for pending flushes on update in order
+   * to help our flushing indexing buffers to disk. As a consequence, threads calling
+   * {@link DirectoryReader#openIfChanged(DirectoryReader, IndexWriter)} or {@link IndexWriter#flush()} will
+   * be the only thread writing segments to disk unless flushes are falling behind. If indexing is stalled
+   * due to too many pending flushes indexing threads will help our writing pending segment flushes to disk.
+   *
+   * @lucene.eperimental
+   */
+  public LiveIndexWriterConfig setCheckPendingFlushUpdate(boolean checkPendingFlushOnUpdate) {
+    this.checkPendingFlushOnUpdate = checkPendingFlushOnUpdate;
+    return this;
+  }
+
   @Override
   public String toString() {
     StringBuilder sb = new StringBuilder();
@@ -448,6 +474,7 @@ public class LiveIndexWriterConfig {
     sb.append("useCompoundFile=").append(getUseCompoundFile()).append("\n");
     sb.append("commitOnClose=").append(getCommitOnClose()).append("\n");
     sb.append("indexSort=").append(getIndexSort()).append("\n");
+    sb.append("checkPendingFlushOnUpdate=").append(isCheckPendingFlushOnUpdate()).append("\n");
     return sb.toString();
   }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ede46fe6/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java
index 04460cd..76a8172 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java
@@ -29,6 +29,7 @@ import java.nio.file.NoSuchFileException;
 import java.nio.file.Path;
 import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.Iterator;
@@ -37,6 +38,7 @@ import java.util.Map;
 import java.util.Random;
 import java.util.Set;
 import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.atomic.AtomicBoolean;
 
 import org.apache.lucene.analysis.Analyzer;
 import org.apache.lucene.analysis.CannedTokenStream;
@@ -2877,4 +2879,89 @@ public class TestIndexWriter extends LuceneTestCase {
     dir.close();
   }
 
+  public void testCheckPendingFlushPostUpdate() throws IOException, InterruptedException {
+    MockDirectoryWrapper dir = newMockDirectory();
+    Set<String> flushingThreads = Collections.synchronizedSet(new HashSet<>());
+    dir.failOn(new MockDirectoryWrapper.Failure() {
+      @Override
+      public void eval(MockDirectoryWrapper dir) throws IOException {
+        StackTraceElement[] trace = new Exception().getStackTrace();
+        for (int i = 0; i < trace.length; i++) {
+          if ("flush".equals(trace[i].getMethodName())
+              && "org.apache.lucene.index.DocumentsWriterPerThread".equals(trace[i].getClassName())) {
+            flushingThreads.add(Thread.currentThread().getName());
+            break;
+          }
+        }
+      }
+    });
+    IndexWriter w = new IndexWriter(dir, new IndexWriterConfig()
+        .setCheckPendingFlushUpdate(false)
+        .setMaxBufferedDocs(Integer.MAX_VALUE)
+        .setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH));
+    AtomicBoolean done = new AtomicBoolean(false);
+    int numThreads = 1 + random().nextInt(3);
+    CountDownLatch latch = new CountDownLatch(numThreads);
+    Set<String> indexingThreads = new HashSet<>();
+    Thread[] threads = new Thread[numThreads];
+    for (int i = 0; i < numThreads; i++) {
+      threads[i] = new Thread(() -> {
+        latch.countDown();
+        int numDocs = 0;
+        while (done.get() == false) {
+
+          Document doc = new Document();
+          doc.add(new StringField("id", "foo", Field.Store.YES));
+          try {
+            w.addDocument(doc);
+          } catch (Exception e) {
+            throw new AssertionError(e);
+          }
+          if (numDocs++ % 10 == 0) {
+            Thread.yield();
+          }
+        }
+      });
+      indexingThreads.add(threads[i].getName());
+      threads[i].start();
+    }
+    latch.await();
+    try {
+      int numIters = rarely() ? 1 + random().nextInt(5) : 1;
+      for (int i = 0; i < numIters; i++) {
+        waitForDocs(w);
+        w.commit();
+        assertTrue(flushingThreads.toString(), flushingThreads.contains(Thread.currentThread().getName()));
+        flushingThreads.retainAll(indexingThreads);
+        assertTrue(flushingThreads.toString(), flushingThreads.isEmpty());
+      }
+      w.getConfig().setCheckPendingFlushUpdate(true);
+      numIters = 0;
+      while (true) {
+        assertFalse("should finish in less than 100 iterations", numIters++ >= 100);
+        waitForDocs(w);
+        w.flush();
+        flushingThreads.retainAll(indexingThreads);
+        if (flushingThreads.isEmpty() == false) {
+          break;
+        }
+      }
+    } finally {
+      done.set(true);
+      for (int i = 0; i < numThreads; i++) {
+        threads[i].join();
+      }
+      IOUtils.close(w, dir);
+    }
+  }
+
+  private static void waitForDocs(IndexWriter w) {
+    int numDocsInRam = w.numRamDocs();
+    while(true) {
+      if (numDocsInRam != w.numRamDocs()) {
+        return;
+      }
+    }
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ede46fe6/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterConfig.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterConfig.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterConfig.java
index 464966a..063045e 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterConfig.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterConfig.java
@@ -74,6 +74,7 @@ public class TestIndexWriterConfig extends LuceneTestCase {
     assertEquals(Codec.getDefault(), conf.getCodec());
     assertEquals(InfoStream.getDefault(), conf.getInfoStream());
     assertEquals(IndexWriterConfig.DEFAULT_USE_COMPOUND_FILE_SYSTEM, conf.getUseCompoundFile());
+    assertTrue(conf.isCheckPendingFlushOnUpdate());
     // Sanity check - validate that all getters are covered.
     Set<String> getters = new HashSet<>();
     getters.add("getAnalyzer");
@@ -98,6 +99,7 @@ public class TestIndexWriterConfig extends LuceneTestCase {
     getters.add("getCodec");
     getters.add("getInfoStream");
     getters.add("getUseCompoundFile");
+    getters.add("isCheckPendingFlushOnUpdate");
     
     for (Method m : IndexWriterConfig.class.getDeclaredMethods()) {
       if (m.getDeclaringClass() == IndexWriterConfig.class && m.getName().startsWith("get")) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ede46fe6/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java
index d7b913c..ac48aa6 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java
@@ -141,6 +141,7 @@ import com.carrotsearch.randomizedtesting.rules.StaticFieldsInvariantRule;
 
 import junit.framework.AssertionFailedError;
 
+import static com.carrotsearch.randomizedtesting.RandomizedTest.frequently;
 import static com.carrotsearch.randomizedtesting.RandomizedTest.systemPropertyAsBoolean;
 import static com.carrotsearch.randomizedtesting.RandomizedTest.systemPropertyAsInt;
 import static org.apache.lucene.search.DocIdSetIterator.NO_MORE_DOCS;
@@ -988,6 +989,9 @@ public abstract class LuceneTestCase extends Assert {
     }
     c.setUseCompoundFile(r.nextBoolean());
     c.setReaderPooling(r.nextBoolean());
+    if (rarely(r)) {
+      c.setCheckPendingFlushUpdate(false);
+    }
     return c;
   }
 


[06/21] lucene-solr:jira/solr-11285-sim: SOLR-11359: added documentation

Posted by ab...@apache.org.
SOLR-11359: added documentation


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/24a0708d
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/24a0708d
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/24a0708d

Branch: refs/heads/jira/solr-11285-sim
Commit: 24a0708d3c65138ecdee77edd7ce7e08e7e19c75
Parents: 1471330
Author: Noble Paul <no...@apache.org>
Authored: Fri Dec 8 17:36:41 2017 +1100
Committer: Noble Paul <no...@apache.org>
Committed: Fri Dec 8 17:36:41 2017 +1100

----------------------------------------------------------------------
 .../src/solrcloud-autoscaling-api.adoc          | 51 ++++++++++++++++++++
 1 file changed, 51 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/24a0708d/solr/solr-ref-guide/src/solrcloud-autoscaling-api.adoc
----------------------------------------------------------------------
diff --git a/solr/solr-ref-guide/src/solrcloud-autoscaling-api.adoc b/solr/solr-ref-guide/src/solrcloud-autoscaling-api.adoc
index 5a41196..9d53552 100644
--- a/solr/solr-ref-guide/src/solrcloud-autoscaling-api.adoc
+++ b/solr/solr-ref-guide/src/solrcloud-autoscaling-api.adoc
@@ -142,6 +142,57 @@ However, since the first node in the first example had more than 1 replica for a
 
 In the above example the node with port 8983 has two replicas for `shard1` in violation of our policy.
 
+== Suggestions API ==
+Suggestions are operations recommended by the system according to the policies and preferences the user has set. Note that the suggestions are made only if there are `violations` to the policies and the collection admin operation would use the preferences to identify the target node.
+
+The API is available at `/admin/autocaling/suggestion`
+[source,json]
+----
+{
+  "responseHeader":{
+    "status":0,
+    "QTime":101},
+  "suggestions":[{
+      "type":"violation",
+      "violation":{
+        "collection":"mycoll",
+        "shard":"shard2",
+        "tagKey":"7574",
+        "violation":{ "delta":-1},
+        "clause":{
+          "replica":"0",
+          "shard":"#EACH",
+          "port":7574,
+          "collection":"mycoll"}},
+      "operation":{
+        "method":"POST",
+        "path":"/c/mycoll",
+        "command":{"move-replica":{
+            "targetNode":"192.168.43.37:8983_solr",
+            "replica":"core_node7"}}}},
+    {
+      "type":"violation",
+      "violation":{
+        "collection":"mycoll",
+        "shard":"shard2",
+        "tagKey":"7574",
+        "violation":{ "delta":-1},
+        "clause":{
+          "replica":"0",
+          "shard":"#EACH",
+          "port":7574,
+          "collection":"mycoll"}},
+      "operation":{
+        "method":"POST",
+        "path":"/c/mycoll",
+        "command":{"move-replica":{
+            "targetNode":"192.168.43.37:7575_solr",
+            "replica":"core_node15"}}}}],
+  "WARNING":"This response format is experimental.  It is likely to change in the future."}
+----
+
+The operation is an actual API call that can be invoked to remedy the current violation
+
 == History API
 
 The history of autoscaling events is available at `/admin/autoscaling/history`. It returns information


[09/21] lucene-solr:jira/solr-11285-sim: LUCENE-8081: Fix javadoc tag.

Posted by ab...@apache.org.
LUCENE-8081: Fix javadoc tag.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/b3273942
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/b3273942
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/b3273942

Branch: refs/heads/jira/solr-11285-sim
Commit: b32739428be0a357a61b7506ca36af3c85b6f236
Parents: 0e1d668
Author: Adrien Grand <jp...@gmail.com>
Authored: Fri Dec 8 08:52:19 2017 +0100
Committer: Adrien Grand <jp...@gmail.com>
Committed: Fri Dec 8 08:52:19 2017 +0100

----------------------------------------------------------------------
 .../src/java/org/apache/lucene/index/LiveIndexWriterConfig.java  | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/b3273942/lucene/core/src/java/org/apache/lucene/index/LiveIndexWriterConfig.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/LiveIndexWriterConfig.java b/lucene/core/src/java/org/apache/lucene/index/LiveIndexWriterConfig.java
index 1be6a73..af8ff15 100644
--- a/lucene/core/src/java/org/apache/lucene/index/LiveIndexWriterConfig.java
+++ b/lucene/core/src/java/org/apache/lucene/index/LiveIndexWriterConfig.java
@@ -432,7 +432,7 @@ public class LiveIndexWriterConfig {
   /**
    * Expert: Returns if indexing threads check for pending flushes on update in order
    * to help our flushing indexing buffers to disk
-   * @lucene.eperimental
+   * @lucene.experimental
    */
   public boolean isCheckPendingFlushOnUpdate() {
     return checkPendingFlushOnUpdate;
@@ -445,7 +445,7 @@ public class LiveIndexWriterConfig {
    * be the only thread writing segments to disk unless flushes are falling behind. If indexing is stalled
    * due to too many pending flushes indexing threads will help our writing pending segment flushes to disk.
    *
-   * @lucene.eperimental
+   * @lucene.experimental
    */
   public LiveIndexWriterConfig setCheckPendingFlushUpdate(boolean checkPendingFlushOnUpdate) {
     this.checkPendingFlushOnUpdate = checkPendingFlushOnUpdate;


[16/21] lucene-solr:jira/solr-11285-sim: Merge branch 'master' of https://git-wip-us.apache.org/repos/asf/lucene-solr

Posted by ab...@apache.org.
Merge branch 'master' of https://git-wip-us.apache.org/repos/asf/lucene-solr


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/ca84ca2f
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/ca84ca2f
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/ca84ca2f

Branch: refs/heads/jira/solr-11285-sim
Commit: ca84ca2f79b548a1d468981cf3ebb325d1df4ab5
Parents: a948adc 3a7f107
Author: Karl Wright <Da...@gmail.com>
Authored: Sun Dec 10 06:37:43 2017 -0500
Committer: Karl Wright <Da...@gmail.com>
Committed: Sun Dec 10 06:37:43 2017 -0500

----------------------------------------------------------------------
 .../apache/lucene/index/TestIndexWriter.java    | 27 +++++++++++++++-----
 solr/CHANGES.txt                                |  4 +--
 2 files changed, 23 insertions(+), 8 deletions(-)
----------------------------------------------------------------------



[08/21] lucene-solr:jira/solr-11285-sim: LUCENE-4100: Fix more queries to implement the new updated createWeight API.

Posted by ab...@apache.org.
LUCENE-4100: Fix more queries to implement the new updated createWeight API.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/0e1d6682
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/0e1d6682
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/0e1d6682

Branch: refs/heads/jira/solr-11285-sim
Commit: 0e1d6682d6ca66590e279ee0c4ccce745f2accd6
Parents: d5c72eb
Author: Adrien Grand <jp...@gmail.com>
Authored: Fri Dec 8 08:50:00 2017 +0100
Committer: Adrien Grand <jp...@gmail.com>
Committed: Fri Dec 8 08:50:00 2017 +0100

----------------------------------------------------------------------
 .../core/src/java/org/apache/solr/search/ExportQParserPlugin.java | 3 ++-
 solr/core/src/java/org/apache/solr/search/HashQParserPlugin.java  | 3 ++-
 2 files changed, 4 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0e1d6682/solr/core/src/java/org/apache/solr/search/ExportQParserPlugin.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/ExportQParserPlugin.java b/solr/core/src/java/org/apache/solr/search/ExportQParserPlugin.java
index 6722fee..fd625af 100644
--- a/solr/core/src/java/org/apache/solr/search/ExportQParserPlugin.java
+++ b/solr/core/src/java/org/apache/solr/search/ExportQParserPlugin.java
@@ -71,7 +71,8 @@ public class ExportQParserPlugin extends QParserPlugin {
       return null;
     }
 
-    public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException{
+    @Override
+    public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException{
       return mainQuery.createWeight(searcher, ScoreMode.COMPLETE, boost);
     }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0e1d6682/solr/core/src/java/org/apache/solr/search/HashQParserPlugin.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/HashQParserPlugin.java b/solr/core/src/java/org/apache/solr/search/HashQParserPlugin.java
index 7d794c6..8832bb7 100644
--- a/solr/core/src/java/org/apache/solr/search/HashQParserPlugin.java
+++ b/solr/core/src/java/org/apache/solr/search/HashQParserPlugin.java
@@ -113,7 +113,8 @@ public class HashQParserPlugin extends QParserPlugin {
       this.worker = worker;
     }
 
-    public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
+    @Override
+    public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException {
 
       String[] keys = keysParam.split(",");
       SolrIndexSearcher solrIndexSearcher = (SolrIndexSearcher)searcher;


[07/21] lucene-solr:jira/solr-11285-sim: LUCENE-8081: Remove unused import.

Posted by ab...@apache.org.
LUCENE-8081: Remove unused import.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/d5c72eb5
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/d5c72eb5
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/d5c72eb5

Branch: refs/heads/jira/solr-11285-sim
Commit: d5c72eb5887fe3d399908c4accf453b7a7b339ab
Parents: 24a0708
Author: Adrien Grand <jp...@gmail.com>
Authored: Fri Dec 8 08:45:18 2017 +0100
Committer: Adrien Grand <jp...@gmail.com>
Committed: Fri Dec 8 08:45:18 2017 +0100

----------------------------------------------------------------------
 .../src/java/org/apache/lucene/util/LuceneTestCase.java             | 1 -
 1 file changed, 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d5c72eb5/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java
index ac48aa6..3d8d346 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java
@@ -141,7 +141,6 @@ import com.carrotsearch.randomizedtesting.rules.StaticFieldsInvariantRule;
 
 import junit.framework.AssertionFailedError;
 
-import static com.carrotsearch.randomizedtesting.RandomizedTest.frequently;
 import static com.carrotsearch.randomizedtesting.RandomizedTest.systemPropertyAsBoolean;
 import static com.carrotsearch.randomizedtesting.RandomizedTest.systemPropertyAsInt;
 import static org.apache.lucene.search.DocIdSetIterator.NO_MORE_DOCS;


[13/21] lucene-solr:jira/solr-11285-sim: TEST: Stabelize TestIndexWriter#testCheckPendingFlushPostUpdate

Posted by ab...@apache.org.
TEST: Stabelize TestIndexWriter#testCheckPendingFlushPostUpdate


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/cb14da3b
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/cb14da3b
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/cb14da3b

Branch: refs/heads/jira/solr-11285-sim
Commit: cb14da3b120ee8436a98c60142eac731b2a93469
Parents: dcb8470
Author: Simon Willnauer <si...@apache.org>
Authored: Fri Dec 8 14:10:35 2017 +0100
Committer: Simon Willnauer <si...@apache.org>
Committed: Fri Dec 8 14:10:48 2017 +0100

----------------------------------------------------------------------
 .../apache/lucene/index/TestIndexWriter.java    | 27 +++++++++++++++-----
 1 file changed, 21 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/cb14da3b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java
index 76a8172..bbedc20 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java
@@ -2900,7 +2900,7 @@ public class TestIndexWriter extends LuceneTestCase {
         .setMaxBufferedDocs(Integer.MAX_VALUE)
         .setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH));
     AtomicBoolean done = new AtomicBoolean(false);
-    int numThreads = 1 + random().nextInt(3);
+    int numThreads = 2 + random().nextInt(3);
     CountDownLatch latch = new CountDownLatch(numThreads);
     Set<String> indexingThreads = new HashSet<>();
     Thread[] threads = new Thread[numThreads];
@@ -2929,7 +2929,7 @@ public class TestIndexWriter extends LuceneTestCase {
     try {
       int numIters = rarely() ? 1 + random().nextInt(5) : 1;
       for (int i = 0; i < numIters; i++) {
-        waitForDocs(w);
+        waitForDocsInBuffers(w, Math.min(2, threads.length));
         w.commit();
         assertTrue(flushingThreads.toString(), flushingThreads.contains(Thread.currentThread().getName()));
         flushingThreads.retainAll(indexingThreads);
@@ -2939,7 +2939,7 @@ public class TestIndexWriter extends LuceneTestCase {
       numIters = 0;
       while (true) {
         assertFalse("should finish in less than 100 iterations", numIters++ >= 100);
-        waitForDocs(w);
+        waitForDocsInBuffers(w, Math.min(2, threads.length));
         w.flush();
         flushingThreads.retainAll(indexingThreads);
         if (flushingThreads.isEmpty() == false) {
@@ -2955,10 +2955,25 @@ public class TestIndexWriter extends LuceneTestCase {
     }
   }
 
-  private static void waitForDocs(IndexWriter w) {
-    int numDocsInRam = w.numRamDocs();
+  private static void waitForDocsInBuffers(IndexWriter w, int buffersWithDocs) {
+    // wait until at least N threadstates have a doc in order to observe
+    // who flushes the segments.
     while(true) {
-      if (numDocsInRam != w.numRamDocs()) {
+      int numStatesWithDocs = 0;
+      DocumentsWriterPerThreadPool perThreadPool = w.docWriter.perThreadPool;
+      for (int i = 0; i < perThreadPool.getActiveThreadStateCount(); i++) {
+        DocumentsWriterPerThreadPool.ThreadState threadState = perThreadPool.getThreadState(i);
+        threadState.lock();
+        try {
+          DocumentsWriterPerThread dwpt = threadState.dwpt;
+          if (dwpt != null && dwpt.getNumDocsInRAM() > 1) {
+            numStatesWithDocs++;
+          }
+        } finally {
+          threadState.unlock();
+        }
+      }
+      if (numStatesWithDocs >= buffersWithDocs) {
         return;
       }
     }


[12/21] lucene-solr:jira/solr-11285-sim: Merge branch 'master' of https://git-wip-us.apache.org/repos/asf/lucene-solr

Posted by ab...@apache.org.
Merge branch 'master' of https://git-wip-us.apache.org/repos/asf/lucene-solr


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/dcb84701
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/dcb84701
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/dcb84701

Branch: refs/heads/jira/solr-11285-sim
Commit: dcb84701a4c6d293b8eb30c676630d16d6cdcaa8
Parents: 18f12fd 25f24e0
Author: Karl Wright <Da...@gmail.com>
Authored: Fri Dec 8 05:41:33 2017 -0500
Committer: Karl Wright <Da...@gmail.com>
Committed: Fri Dec 8 05:41:33 2017 -0500

----------------------------------------------------------------------
 lucene/CHANGES.txt                              |  29 +-
 .../apache/lucene/document/RangeFieldQuery.java |   3 +-
 .../SortedNumericDocValuesRangeQuery.java       |   3 +-
 .../document/SortedSetDocValuesRangeQuery.java  |   3 +-
 .../apache/lucene/index/DocumentsWriter.java    |   5 +-
 .../lucene/index/FrozenBufferedUpdates.java     |   3 +-
 .../apache/lucene/index/IndexWriterConfig.java  |   5 +
 .../lucene/index/LiveIndexWriterConfig.java     |  27 ++
 .../java/org/apache/lucene/index/Sorter.java    |   5 +
 .../lucene/search/Boolean2ScorerSupplier.java   |  30 +-
 .../org/apache/lucene/search/BooleanQuery.java  |   6 +-
 .../org/apache/lucene/search/BooleanWeight.java |  25 +-
 .../org/apache/lucene/search/BoostQuery.java    |   4 +-
 .../apache/lucene/search/CachingCollector.java  |  13 +-
 .../org/apache/lucene/search/Collector.java     |   6 +-
 .../apache/lucene/search/ConjunctionScorer.java |  20 +
 .../lucene/search/ConstantScoreQuery.java       |  14 +-
 .../lucene/search/ConstantScoreScorer.java      |   5 +
 .../org/apache/lucene/search/DisiWrapper.java   |   3 +
 .../lucene/search/DisjunctionMaxQuery.java      |  14 +-
 .../lucene/search/DisjunctionMaxScorer.java     |   6 +
 .../lucene/search/DisjunctionSumScorer.java     |   7 +
 .../search/DocValuesFieldExistsQuery.java       |   2 +-
 .../lucene/search/DocValuesRewriteMethod.java   |   2 +-
 .../lucene/search/DoubleValuesSource.java       |   4 +-
 .../apache/lucene/search/ExactPhraseScorer.java |   5 +
 .../org/apache/lucene/search/FakeScorer.java    |   5 +
 .../apache/lucene/search/FilterCollector.java   |   4 +-
 .../org/apache/lucene/search/FilterScorer.java  |   3 +
 .../org/apache/lucene/search/FilterWeight.java  |   2 +-
 .../lucene/search/IndexOrDocValuesQuery.java    |   6 +-
 .../org/apache/lucene/search/IndexSearcher.java |  34 +-
 .../apache/lucene/search/MatchAllDocsQuery.java |   2 +-
 .../apache/lucene/search/MatchNoDocsQuery.java  |   2 +-
 .../lucene/search/MinShouldMatchSumScorer.java  |   6 +
 .../apache/lucene/search/MultiCollector.java    |  13 +-
 .../lucene/search/MultiCollectorManager.java    |  18 +-
 .../apache/lucene/search/MultiPhraseQuery.java  |   4 +-
 .../MultiTermQueryConstantScoreWrapper.java     |   4 +-
 .../lucene/search/NormsFieldExistsQuery.java    |   2 +-
 .../org/apache/lucene/search/PhraseQuery.java   |   4 +-
 .../apache/lucene/search/PointInSetQuery.java   |   2 +-
 .../apache/lucene/search/PointRangeQuery.java   |   2 +-
 .../java/org/apache/lucene/search/Query.java    |   4 +-
 .../org/apache/lucene/search/QueryCache.java    |   2 +-
 .../org/apache/lucene/search/QueryRescorer.java |   2 +-
 .../org/apache/lucene/search/ReqExclScorer.java |  11 +
 .../apache/lucene/search/ReqOptSumScorer.java   |   5 +
 .../search/ScoreCachingWrappingScorer.java      |  12 +-
 .../org/apache/lucene/search/ScoreMode.java     |  60 +++
 .../java/org/apache/lucene/search/Scorer.java   |  17 +
 .../lucene/search/SloppyPhraseScorer.java       |   5 +
 .../org/apache/lucene/search/SynonymQuery.java  |  49 +-
 .../apache/lucene/search/TermInSetQuery.java    |   4 +-
 .../org/apache/lucene/search/TermQuery.java     |  25 +-
 .../org/apache/lucene/search/TermScorer.java    |  11 +-
 .../lucene/search/TimeLimitingCollector.java    |   4 +-
 .../apache/lucene/search/TopDocsCollector.java  |   2 +-
 .../apache/lucene/search/TopFieldCollector.java |  16 +-
 .../lucene/search/TopScoreDocCollector.java     |  69 ++-
 .../lucene/search/TotalHitCountCollector.java   |   4 +-
 .../org/apache/lucene/search/WANDScorer.java    | 478 +++++++++++++++++++
 .../java/org/apache/lucene/search/Weight.java   |   2 +-
 .../org/apache/lucene/search/package-info.java  |   6 +-
 .../lucene/search/similarities/Axiomatic.java   |   6 +
 .../search/similarities/BM25Similarity.java     |   8 +-
 .../search/similarities/BooleanSimilarity.java  |   5 +
 .../search/similarities/DFISimilarity.java      |   6 +
 .../search/similarities/DFRSimilarity.java      |   8 +-
 .../search/similarities/IBSimilarity.java       |   6 +
 .../similarities/LMDirichletSimilarity.java     |   8 +-
 .../similarities/LMJelinekMercerSimilarity.java |   8 +-
 .../search/similarities/MultiSimilarity.java    |   9 +
 .../lucene/search/similarities/Similarity.java  |   7 +
 .../search/similarities/SimilarityBase.java     |  14 +-
 .../search/similarities/TFIDFSimilarity.java    |  14 +
 .../search/spans/FieldMaskingSpanQuery.java     |   5 +-
 .../lucene/search/spans/SpanBoostQuery.java     |   5 +-
 .../search/spans/SpanContainingQuery.java       |   9 +-
 .../search/spans/SpanMultiTermQueryWrapper.java |   3 +-
 .../lucene/search/spans/SpanNearQuery.java      |   9 +-
 .../lucene/search/spans/SpanNotQuery.java       |   9 +-
 .../apache/lucene/search/spans/SpanOrQuery.java |   7 +-
 .../search/spans/SpanPositionCheckQuery.java    |   7 +-
 .../apache/lucene/search/spans/SpanQuery.java   |   3 +-
 .../apache/lucene/search/spans/SpanScorer.java  |   5 +
 .../lucene/search/spans/SpanTermQuery.java      |   5 +-
 .../lucene/search/spans/SpanWithinQuery.java    |   9 +-
 .../apache/lucene/index/TestIndexWriter.java    |  87 ++++
 .../lucene/index/TestIndexWriterConfig.java     |   2 +
 .../lucene/index/TestMaxTermFrequency.java      |   4 +
 .../org/apache/lucene/index/TestOmitTf.java     |  21 +-
 .../apache/lucene/search/JustCompileSearch.java |   9 +-
 .../lucene/search/MultiCollectorTest.java       |  24 +-
 .../search/TestBoolean2ScorerSupplier.java      |  62 +--
 .../org/apache/lucene/search/TestBooleanOr.java |   6 +-
 .../apache/lucene/search/TestBooleanQuery.java  |  26 +-
 .../search/TestBooleanQueryVisitSubscorers.java |  14 +-
 .../lucene/search/TestBooleanRewrites.java      |   2 +-
 .../apache/lucene/search/TestBooleanScorer.java |  14 +-
 .../lucene/search/TestCachingCollector.java     |  11 +-
 .../lucene/search/TestConjunctionDISI.java      |   4 +
 .../apache/lucene/search/TestConjunctions.java  |   9 +-
 .../lucene/search/TestConstantScoreQuery.java   |  14 +-
 .../lucene/search/TestDisjunctionMaxQuery.java  |   4 +-
 .../lucene/search/TestDocValuesQueries.java     |   2 +-
 .../lucene/search/TestDocValuesScoring.java     |   5 +
 .../lucene/search/TestDoubleValuesSource.java   |   8 +-
 .../lucene/search/TestEarlyTermination.java     |   4 +-
 .../search/TestIndexOrDocValuesQuery.java       |   4 +-
 .../apache/lucene/search/TestLRUQueryCache.java |  14 +-
 .../lucene/search/TestMinShouldMatch2.java      |   6 +-
 .../apache/lucene/search/TestNeedsScores.java   |  22 +-
 .../apache/lucene/search/TestPointQueries.java  |  12 +-
 .../lucene/search/TestPositionIncrement.java    |   4 +-
 .../search/TestPositiveScoresOnlyCollector.java |   7 +-
 .../apache/lucene/search/TestQueryRescorer.java |   7 +-
 .../search/TestScoreCachingWrappingScorer.java  |  11 +-
 .../apache/lucene/search/TestScorerPerf.java    |   6 +-
 .../apache/lucene/search/TestSimilarity.java    |  16 +-
 .../lucene/search/TestSimilarityProvider.java   |  10 +-
 .../lucene/search/TestSloppyPhraseQuery.java    |   8 +-
 .../apache/lucene/search/TestSortRandom.java    |   2 +-
 .../lucene/search/TestSubScorerFreqs.java       |   5 +
 .../org/apache/lucene/search/TestTermQuery.java |   4 +-
 .../apache/lucene/search/TestTermScorer.java    |  14 +-
 .../search/TestTimeLimitingCollector.java       |   4 +-
 .../lucene/search/TestTopDocsCollector.java     | 206 +++++++-
 .../apache/lucene/search/TestTopDocsMerge.java  |   3 +-
 .../lucene/search/TestTopFieldCollector.java    |  34 +-
 .../TestTopFieldCollectorEarlyTermination.java  |   5 +-
 .../TestUsageTrackingFilterCachingPolicy.java   |   2 +-
 .../apache/lucene/search/TestWANDScorer.java    | 394 +++++++++++++++
 .../search/spans/JustCompileSearchSpans.java    |   3 +-
 .../search/spans/TestFieldMaskingSpanQuery.java |  15 +-
 .../search/spans/TestNearSpansOrdered.java      |  29 +-
 .../lucene/search/spans/TestSpanCollection.java |   7 +-
 .../search/spans/TestSpanContainQuery.java      |   3 +-
 .../apache/lucene/search/spans/TestSpans.java   |  11 +-
 .../apache/lucene/expressions/FakeScorer.java   |   5 +
 .../org/apache/lucene/facet/DrillSideways.java  |  11 +-
 .../apache/lucene/facet/DrillSidewaysQuery.java |   7 +-
 .../lucene/facet/DrillSidewaysScorer.java       |   5 +
 .../apache/lucene/facet/FacetsCollector.java    |   7 +-
 .../apache/lucene/facet/range/DoubleRange.java  |   5 +-
 .../facet/range/DoubleRangeFacetCounts.java     |   3 +-
 .../apache/lucene/facet/range/LongRange.java    |   5 +-
 .../facet/range/LongRangeFacetCounts.java       |   3 +-
 .../facet/AssertingSubDocsAtOnceCollector.java  |   5 +-
 .../apache/lucene/facet/TestDrillSideways.java  |   7 +-
 .../facet/range/TestRangeFacetCounts.java       |   5 +-
 .../search/grouping/AllGroupHeadsCollector.java |   5 +-
 .../search/grouping/AllGroupsCollector.java     |   5 +-
 .../search/grouping/BlockGroupingCollector.java |   5 +-
 .../grouping/DistinctValuesCollector.java       |   5 +-
 .../lucene/search/grouping/FakeScorer.java      |   5 +
 .../grouping/FirstPassGroupingCollector.java    |   5 +-
 .../search/grouping/GroupFacetCollector.java    |   5 +-
 .../lucene/search/grouping/GroupingSearch.java  |   3 +-
 .../grouping/SecondPassGroupingCollector.java   |   5 +-
 .../lucene/search/grouping/TestGrouping.java    |   5 +-
 .../search/highlight/QueryTermExtractor.java    |   3 +-
 .../highlight/WeightedSpanTermExtractor.java    |   9 +-
 .../lucene/search/uhighlight/PhraseHelper.java  |   7 +-
 .../search/uhighlight/UnifiedHighlighter.java   |   3 +-
 .../search/highlight/HighlighterPhraseTest.java |   5 +-
 .../uhighlight/TestUnifiedHighlighterMTQ.java   |   5 +-
 .../TestUnifiedHighlighterStrictPhrases.java    |   5 +-
 .../search/join/BaseGlobalOrdinalScorer.java    |   5 +
 .../apache/lucene/search/join/FakeScorer.java   |   5 +
 .../search/join/GenericTermsCollector.java      |   4 +-
 .../search/join/GlobalOrdinalsCollector.java    |   4 +-
 .../lucene/search/join/GlobalOrdinalsQuery.java |   4 +-
 .../join/GlobalOrdinalsWithScoreCollector.java  |   8 +-
 .../join/GlobalOrdinalsWithScoreQuery.java      |   8 +-
 .../org/apache/lucene/search/join/JoinUtil.java |   8 +-
 .../join/ParentChildrenBlockJoinQuery.java      |   8 +-
 .../join/PointInSetIncludingScoreQuery.java     |   7 +-
 .../lucene/search/join/QueryBitSetProducer.java |   2 +-
 .../lucene/search/join/TermsCollector.java      |   4 +-
 .../search/join/TermsIncludingScoreQuery.java   |  11 +-
 .../search/join/TermsWithScoreCollector.java    |   4 +-
 .../search/join/ToChildBlockJoinQuery.java      |   9 +-
 .../search/join/ToParentBlockJoinQuery.java     |  15 +-
 .../lucene/search/join/TestBlockJoin.java       |  21 +-
 .../search/join/TestBlockJoinValidation.java    |   2 +-
 .../apache/lucene/search/join/TestJoinUtil.java |  28 +-
 .../apache/lucene/index/memory/MemoryIndex.java |   5 +-
 .../apache/lucene/index/PKIndexSplitter.java    |   4 +-
 .../search/DiversifiedTopDocsCollector.java     |   4 +-
 .../lucene/search/DocValuesStatsCollector.java  |   4 +-
 .../search/TestDiversifiedTopDocsCollector.java |   5 +
 .../apache/lucene/queries/BoostingQuery.java    |  19 +-
 .../apache/lucene/queries/CustomScoreQuery.java |  16 +-
 .../lucene/queries/function/BoostedQuery.java   |  14 +-
 .../queries/function/FunctionMatchQuery.java    |   3 +-
 .../lucene/queries/function/FunctionQuery.java  |   8 +-
 .../queries/function/FunctionRangeQuery.java    |   3 +-
 .../queries/function/FunctionScoreQuery.java    |  11 +-
 .../lucene/queries/function/ValueSource.java    |   5 +
 .../queries/function/ValueSourceScorer.java     |   5 +
 .../function/valuesource/QueryValueSource.java  |   3 +-
 .../queries/payloads/PayloadScoreQuery.java     |   7 +-
 .../queries/payloads/SpanPayloadCheckQuery.java |   7 +-
 .../function/TestIndexReaderFunctions.java      |   5 +-
 .../queries/payloads/TestPayloadSpans.java      |  31 +-
 .../queries/payloads/TestPayloadTermQuery.java  |   5 +-
 .../surround/query/BooleanQueryTst.java         |   5 +-
 .../document/LatLonDocValuesBoxQuery.java       |   3 +-
 .../document/LatLonDocValuesDistanceQuery.java  |   3 +-
 .../document/LatLonPointDistanceQuery.java      |   3 +-
 .../document/LatLonPointInPolygonQuery.java     |   3 +-
 .../apache/lucene/payloads/PayloadSpanUtil.java |   3 +-
 .../org/apache/lucene/search/CoveringQuery.java |   4 +-
 .../apache/lucene/search/CoveringScorer.java    |   6 +
 .../lucene/search/DocValuesNumbersQuery.java    |   2 +-
 .../lucene/search/DocValuesTermsQuery.java      |   2 +-
 .../lucene/search/TermAutomatonQuery.java       |   2 +-
 .../lucene/search/TermAutomatonScorer.java      |   6 +-
 .../sandbox/queries/FuzzyLikeThisQueryTest.java |   9 +-
 .../lucene/search/TestTermAutomatonQuery.java   |   2 +-
 .../spatial/composite/CompositeVerifyQuery.java |   5 +-
 .../composite/IntersectsRPTVerifyQuery.java     |   3 +-
 .../spatial/prefix/AbstractPrefixTreeQuery.java |   3 +-
 .../serialized/SerializedDVStrategy.java        |   3 +-
 .../spatial/vector/PointVectorStrategy.java     |   5 +-
 .../spatial/prefix/NumberRangeFacetsTest.java   |   5 +-
 .../spatial3d/PointInGeo3DShapeQuery.java       |   3 +-
 .../apache/lucene/spatial3d/TestGeo3DPoint.java |   5 +-
 .../search/suggest/document/ContextQuery.java   |   5 +-
 .../suggest/document/FuzzyCompletionQuery.java  |   3 +-
 .../suggest/document/PrefixCompletionQuery.java |   3 +-
 .../suggest/document/RegexCompletionQuery.java  |   3 +-
 .../suggest/document/SuggestIndexSearcher.java  |   2 +-
 .../document/TopSuggestDocsCollector.java       |   5 +-
 .../apache/lucene/geo/BaseGeoPointTestCase.java |  17 +-
 .../lucene/search/AssertingBulkScorer.java      |  12 +-
 .../lucene/search/AssertingCollector.java       |   2 +-
 .../lucene/search/AssertingIndexSearcher.java   |   4 +-
 .../lucene/search/AssertingLeafCollector.java   |   6 +-
 .../apache/lucene/search/AssertingQuery.java    |   4 +-
 .../apache/lucene/search/AssertingScorer.java   |  30 +-
 .../apache/lucene/search/AssertingWeight.java   |  12 +-
 .../search/BaseRangeFieldQueryTestCase.java     |   2 +-
 .../lucene/search/BulkScorerWrapperScorer.java  |   5 +
 .../org/apache/lucene/search/CheckHits.java     |   8 +-
 .../org/apache/lucene/search/QueryUtils.java    |  22 +-
 .../lucene/search/RandomApproximationQuery.java |   9 +-
 .../lucene/search/ShardSearchingTestBase.java   |   2 +-
 .../similarities/AssertingSimilarity.java       |   9 +-
 .../similarities/BaseSimilarityTestCase.java    |   2 +
 .../lucene/search/spans/AssertingSpanQuery.java |   5 +-
 .../org/apache/lucene/util/LuceneTestCase.java  |   3 +
 .../search/TestBaseExplanationTestCase.java     |   5 +-
 solr/CHANGES.txt                                |   2 +
 .../analytics/facet/AbstractSolrQueryFacet.java |   5 +-
 .../java/org/apache/solr/ltr/LTRRescorer.java   |   5 +-
 .../org/apache/solr/ltr/LTRScoringQuery.java    |  22 +-
 .../org/apache/solr/ltr/feature/Feature.java    |   4 +
 .../solr/ltr/feature/FieldLengthFeature.java    |   5 +
 .../solr/ltr/feature/FieldValueFeature.java     |   5 +
 .../solr/ltr/feature/OriginalScoreFeature.java  |   8 +-
 .../apache/solr/ltr/feature/SolrFeature.java    |   8 +-
 .../LTRFeatureLoggerTransformerFactory.java     |   3 +-
 .../solr/ltr/TestLTRReRankingPipeline.java      |   5 +-
 .../apache/solr/ltr/TestLTRScoringQuery.java    |   3 +-
 .../solr/ltr/TestSelectiveWeightCreation.java   |   3 +-
 .../org/apache/solr/cloud/CreateAliasCmd.java   |  36 +-
 .../solr/handler/component/ExpandComponent.java |   9 +-
 .../solr/handler/component/QueryComponent.java  |   5 +
 .../handler/component/RealTimeGetComponent.java |   3 +-
 .../java/org/apache/solr/query/FilterQuery.java |   7 +-
 .../org/apache/solr/query/SolrRangeQuery.java   |   7 +-
 .../java/org/apache/solr/schema/LatLonType.java |   8 +-
 .../apache/solr/search/AbstractReRankQuery.java |   6 +-
 .../solr/search/CollapsingQParserPlugin.java    |  14 +-
 .../apache/solr/search/DelegatingCollector.java |   5 +-
 .../org/apache/solr/search/DocSetCollector.java |   5 +-
 .../apache/solr/search/ExportQParserPlugin.java |   9 +-
 .../src/java/org/apache/solr/search/Filter.java |   3 +-
 .../solr/search/GraphTermsQParserPlugin.java    |   5 +-
 .../apache/solr/search/HashQParserPlugin.java   |   6 +-
 .../apache/solr/search/JoinQParserPlugin.java   |   3 +-
 .../apache/solr/search/QueryWrapperFilter.java  |   3 +-
 .../org/apache/solr/search/ReRankCollector.java |   5 +-
 .../solr/search/SolrConstantScoreQuery.java     |   3 +-
 .../apache/solr/search/SolrIndexSearcher.java   |  14 +-
 .../org/apache/solr/search/WrappedQuery.java    |   5 +-
 .../facet/FacetFieldProcessorByHashDV.java      |   7 +-
 .../org/apache/solr/search/join/GraphQuery.java |   8 +-
 .../solr/search/join/GraphTermsCollector.java   |   5 +-
 .../search/join/ScoreJoinQParserPlugin.java     |  12 +-
 .../solr/search/stats/ExactStatsCache.java      |   3 +-
 .../solr/update/DeleteByQueryWrapper.java       |   5 +-
 .../apache/solr/cloud/AliasIntegrationTest.java |  10 +-
 .../solr/search/TestQueryWrapperFilter.java     |   5 +-
 .../apache/solr/search/TestRankQueryPlugin.java |  18 +-
 .../uninverting/TestFieldCacheSortRandom.java   |   3 +-
 solr/solr-ref-guide/src/collections-api.adoc    |  11 +
 .../src/solrcloud-autoscaling-api.adoc          |  51 ++
 300 files changed, 2896 insertions(+), 745 deletions(-)
----------------------------------------------------------------------