You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by da...@apache.org on 2018/08/06 04:15:34 UTC

[01/48] lucene-solr:jira/http2: Fix InfixSuggestersTest.testShutdownDuringBuild() failures

Repository: lucene-solr
Updated Branches:
  refs/heads/jira/http2 8b208776e -> d6d6fbdbe


Fix InfixSuggestersTest.testShutdownDuringBuild() failures


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/a08eadb4
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/a08eadb4
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/a08eadb4

Branch: refs/heads/jira/http2
Commit: a08eadb4809b1d70d9cf6a098db9489f0325c260
Parents: 8d28bbc
Author: Steve Rowe <sa...@apache.org>
Authored: Mon Jul 30 22:49:30 2018 -0400
Committer: Steve Rowe <sa...@apache.org>
Committed: Mon Jul 30 22:49:49 2018 -0400

----------------------------------------------------------------------
 .../org/apache/lucene/util/LuceneTestCase.java  | 54 ++++++++++++++++++--
 solr/CHANGES.txt                                |  2 +
 .../handler/component/InfixSuggestersTest.java  | 33 +++++++++---
 3 files changed, 79 insertions(+), 10 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/a08eadb4/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java
index 6390cc8..a17d60e 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java
@@ -52,6 +52,7 @@ import java.util.EnumSet;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.Iterator;
+import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Locale;
 import java.util.Map;
@@ -2731,17 +2732,64 @@ public abstract class LuceneTestCase extends Assert {
           return expectedWrappedType.cast(cause);
         } else {
           AssertionFailedError assertion = new AssertionFailedError
-              ("Unexpected wrapped exception type, expected " + expectedWrappedType.getSimpleName());
+              ("Unexpected wrapped exception type, expected " + expectedWrappedType.getSimpleName() 
+                  + " but got: " + cause);
           assertion.initCause(e);
           throw assertion;
         }
       }
       AssertionFailedError assertion = new AssertionFailedError
-          ("Unexpected outer exception type, expected " + expectedOuterType.getSimpleName());
+          ("Unexpected outer exception type, expected " + expectedOuterType.getSimpleName()
+           + " but got: " + e);
       assertion.initCause(e);
       throw assertion;
     }
-    throw new AssertionFailedError("Expected outer exception " + expectedOuterType.getSimpleName());
+    throw new AssertionFailedError("Expected outer exception " + expectedOuterType.getSimpleName()
+        + " but no exception was thrown.");
+  }
+
+  /**
+   * Checks that one of the specified wrapped and outer exception classes are thrown
+   * by the given runnable, and returns the outer exception.
+   * 
+   * This method accepts outer exceptions with no wrapped exception;
+   * an empty list of expected wrapped exception types indicates no wrapped exception.
+   */
+  public static <TO extends Throwable, TW extends Throwable> TO expectThrowsAnyOf
+  (LinkedHashMap<Class<? extends TO>,List<Class<? extends TW>>> expectedOuterToWrappedTypes, ThrowingRunnable runnable) {
+    try {
+      runnable.run();
+    } catch (Throwable e) {
+      for (Map.Entry<Class<? extends TO>, List<Class<? extends TW>>> entry : expectedOuterToWrappedTypes.entrySet()) {
+        Class<? extends TO> expectedOuterType = entry.getKey();
+        List<Class<? extends TW>> expectedWrappedTypes = entry.getValue();
+        Throwable cause = e.getCause();
+        if (expectedOuterType.isInstance(e)) {
+          if (expectedWrappedTypes.isEmpty()) {
+            return null; // no wrapped exception
+          } else {
+            for (Class<? extends TW> expectedWrappedType : expectedWrappedTypes) {
+              if (expectedWrappedType.isInstance(cause)) {
+                return expectedOuterType.cast(e);
+              }
+            }
+            List<String> wrappedTypes = expectedWrappedTypes.stream().map(Class::getSimpleName).collect(Collectors.toList());
+            AssertionFailedError assertion = new AssertionFailedError
+                ("Unexpected wrapped exception type, expected one of " + wrappedTypes + " but got: " + cause);
+            assertion.initCause(e);
+            throw assertion;
+          }
+        }
+      }
+      List<String> outerTypes = expectedOuterToWrappedTypes.keySet().stream().map(Class::getSimpleName).collect(Collectors.toList());
+      AssertionFailedError assertion = new AssertionFailedError
+          ("Unexpected outer exception type, expected one of " + outerTypes + " but got: " + e);
+      assertion.initCause(e);
+      throw assertion;
+    }
+    List<String> outerTypes = expectedOuterToWrappedTypes.keySet().stream().map(Class::getSimpleName).collect(Collectors.toList());
+    throw new AssertionFailedError("Expected any of the following outer exception types: " + outerTypes
+        + " but no exception was thrown.");
   }
 
   /** Returns true if the file exists (can be opened), false

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/a08eadb4/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 89e6938..2d8db8c 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -183,6 +183,8 @@ Bug Fixes
 
 * SOLR-12477: An update would return a client error(400) if it hit a AlreadyClosedException.
   We now return the error as a server error(500) instead (Jeffery via Varun Thacker)
+  
+* SOLR-12606: Fix InfixSuggestersTest.testShutdownDuringBuild() failures. (Steve Rowe) 
 
 Optimizations
 ----------------------

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/a08eadb4/solr/core/src/test/org/apache/solr/handler/component/InfixSuggestersTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/handler/component/InfixSuggestersTest.java b/solr/core/src/test/org/apache/solr/handler/component/InfixSuggestersTest.java
index a8188bb..7971bf0 100644
--- a/solr/core/src/test/org/apache/solr/handler/component/InfixSuggestersTest.java
+++ b/solr/core/src/test/org/apache/solr/handler/component/InfixSuggestersTest.java
@@ -17,10 +17,14 @@
 
 package org.apache.solr.handler.component;
 
+import java.util.Arrays;
+import java.util.LinkedHashMap;
+import java.util.List;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Future;
 
 import org.apache.solr.SolrTestCaseJ4;
+import org.apache.solr.common.SolrException;
 import org.apache.solr.common.util.ExecutorUtil;
 import org.apache.solr.spelling.suggest.RandomTestDictionaryFactory;
 import org.apache.solr.spelling.suggest.SuggesterParams;
@@ -94,7 +98,7 @@ public class InfixSuggestersTest extends SolrTestCaseJ4 {
 
   @Test
   public void testReloadDuringBuild() throws Exception {
-    ExecutorService executor = ExecutorUtil.newMDCAwareCachedThreadPool("AnalyzingInfixSuggesterTest");
+    ExecutorService executor = ExecutorUtil.newMDCAwareCachedThreadPool("InfixSuggesterTest");
     try {
       // Build the suggester in the background with a long dictionary
       Future job = executor.submit(() ->
@@ -114,20 +118,35 @@ public class InfixSuggestersTest extends SolrTestCaseJ4 {
 
   @Test
   public void testShutdownDuringBuild() throws Exception {
-    ExecutorService executor = ExecutorUtil.newMDCAwareCachedThreadPool("AnalyzingInfixSuggesterTest");
+    ExecutorService executor = ExecutorUtil.newMDCAwareCachedThreadPool("InfixSuggesterTest");
     try {
+      LinkedHashMap<Class<? extends Throwable>, List<Class<? extends Throwable>>> expected = new LinkedHashMap<>();
+      expected.put(RuntimeException.class, Arrays.asList
+          (SolrCoreState.CoreIsClosedException.class, SolrException.class, IllegalStateException.class));
+      final Throwable[] outerException = new Throwable[1];
       // Build the suggester in the background with a long dictionary
-      Future job = executor.submit(() -> 
-          expectThrows(RuntimeException.class, SolrCoreState.CoreIsClosedException.class,
-              () -> assertQ(req("qt", rh_analyzing_long,
-                  SuggesterParams.SUGGEST_BUILD_ALL, "true"),
-                  "//str[@name='command'][.='buildAll']")));
+      Future job = executor.submit(() -> outerException[0] = expectThrowsAnyOf(expected,
+          () -> assertQ(req("qt", rh_analyzing_long, SuggesterParams.SUGGEST_BUILD_ALL, "true"),
+              "//str[@name='command'][.='buildAll']")));
       Thread.sleep(100); // TODO: is there a better way to ensure that the build has begun?
       h.close();
       // Stop the dictionary's input iterator
       System.clearProperty(RandomTestDictionaryFactory.RandomTestDictionary
           .getEnabledSysProp("longRandomAnalyzingInfixSuggester"));
       job.get();
+      Throwable wrappedException = outerException[0].getCause();
+      if (wrappedException instanceof SolrException) {
+        String expectedMessage = "SolrCoreState already closed.";
+        assertTrue("Expected wrapped SolrException message to contain '" + expectedMessage 
+            + "' but message is '" + wrappedException.getMessage() + "'", 
+            wrappedException.getMessage().contains(expectedMessage));
+      } else if (wrappedException instanceof IllegalStateException
+          && ! (wrappedException instanceof SolrCoreState.CoreIsClosedException)) { // CoreIsClosedException extends IllegalStateException
+        String expectedMessage = "Cannot commit on an closed writer. Add documents first";
+        assertTrue("Expected wrapped IllegalStateException message to contain '" + expectedMessage
+                + "' but message is '" + wrappedException.getMessage() + "'",
+            wrappedException.getMessage().contains(expectedMessage));
+      }
     } finally {
       ExecutorUtil.shutdownAndAwaitTermination(executor);
       initCore("solrconfig-infixsuggesters.xml","schema.xml"); // put the core back for other tests


[04/48] lucene-solr:jira/http2: SOLR-12574: Fix the SignificantTermStream to use the new bucket format

Posted by da...@apache.org.
SOLR-12574: Fix the SignificantTermStream to use the new bucket format


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/abd6b07e
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/abd6b07e
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/abd6b07e

Branch: refs/heads/jira/http2
Commit: abd6b07ea98eaf24577ad8c347bb39f491276fa0
Parents: 4602e4d
Author: Alexandre Rafalovitch <ar...@apache.org>
Authored: Tue Jul 31 08:18:39 2018 -0400
Committer: Alexandre Rafalovitch <ar...@apache.org>
Committed: Tue Jul 31 08:18:39 2018 -0400

----------------------------------------------------------------------
 .../solrj/io/stream/SignificantTermsStream.java     | 16 ++++++++++------
 1 file changed, 10 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/abd6b07e/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/SignificantTermsStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/SignificantTermsStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/SignificantTermsStream.java
index 4562cd3..729ddb1 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/SignificantTermsStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/SignificantTermsStream.java
@@ -44,6 +44,7 @@ import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionValue;
 import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
 import org.apache.solr.client.solrj.request.QueryRequest;
 import org.apache.solr.client.solrj.response.QueryResponse;
+import org.apache.solr.common.SolrDocumentList;
 import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.util.ExecutorUtil;
 import org.apache.solr.common.util.NamedList;
@@ -286,13 +287,16 @@ public class SignificantTermsStream extends TupleStream implements Expressible{
         long numDocs = 0;
         long resultCount = 0;
         for (Future<NamedList> getTopTermsCall : callShards(getShards(zkHost, collection, streamContext))) {
-          NamedList resp = getTopTermsCall.get();
+          NamedList fullResp = getTopTermsCall.get();
+          Map stResp = (Map)fullResp.get("significantTerms");
 
-          List<String> terms = (List<String>)resp.get("sterms");
-          List<Integer> docFreqs = (List<Integer>)resp.get("docFreq");
-          List<Integer> queryDocFreqs = (List<Integer>)resp.get("queryDocFreq");
-          numDocs += (Integer)resp.get("numDocs");
-          resultCount += (Integer)resp.get("resultCount");
+          List<String> terms = (List<String>)stResp.get("sterms");
+          List<Integer> docFreqs = (List<Integer>)stResp.get("docFreq");
+          List<Integer> queryDocFreqs = (List<Integer>)stResp.get("queryDocFreq");
+          numDocs += (Integer)stResp.get("numDocs");
+
+          SolrDocumentList searchResp = (SolrDocumentList) fullResp.get("response");
+          resultCount += searchResp.getNumFound();
 
           for (int i = 0; i < terms.size(); i++) {
             String term = terms.get(i);


[11/48] lucene-solr:jira/http2: SOLR-12609: include actual value in two MathExpressionTest.testMultiVariateNormalDistribution asserts

Posted by da...@apache.org.
SOLR-12609: include actual value in two MathExpressionTest.testMultiVariateNormalDistribution asserts


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/8a448280
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/8a448280
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/8a448280

Branch: refs/heads/jira/http2
Commit: 8a4482805bfdcb31450b25a824b8f785543e8f4d
Parents: 4a1ee04
Author: Christine Poerschke <cp...@apache.org>
Authored: Tue Jul 31 18:49:38 2018 +0100
Committer: Christine Poerschke <cp...@apache.org>
Committed: Tue Jul 31 19:29:50 2018 +0100

----------------------------------------------------------------------
 .../apache/solr/client/solrj/io/stream/MathExpressionTest.java   | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8a448280/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/MathExpressionTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/MathExpressionTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/MathExpressionTest.java
index fe782b2..3806b22 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/MathExpressionTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/MathExpressionTest.java
@@ -3006,8 +3006,8 @@ public class MathExpressionTest extends SolrCloudTestCase {
     assertEquals(sample.size(), 2);
     Number sample1 = sample.get(0);
     Number sample2 = sample.get(1);
-    assertTrue(sample1.doubleValue() > -30 && sample1.doubleValue() < 30);
-    assertTrue(sample2.doubleValue() > 50 && sample2.doubleValue() < 250);
+    assertTrue(sample.toString(), sample1.doubleValue() > -30 && sample1.doubleValue() < 30);
+    assertTrue(sample.toString(), sample2.doubleValue() > 50 && sample2.doubleValue() < 250);
 
     Number density = (Number)tuples.get(0).get("j");
     assertEquals(density.doubleValue(), 0.007852638121596995, .00001);


[02/48] lucene-solr:jira/http2: SOLR-12601: Refactor the autoscaling package to improve readability

Posted by da...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4602e4de/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Variable.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Variable.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Variable.java
new file mode 100644
index 0000000..8df74bf
--- /dev/null
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Variable.java
@@ -0,0 +1,364 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.client.solrj.cloud.autoscaling;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.Set;
+import java.util.function.Consumer;
+
+import org.apache.solr.common.cloud.rule.ImplicitSnitch;
+
+import static java.util.Collections.emptySet;
+import static java.util.Collections.unmodifiableSet;
+
+/**
+ * A Variable Type used in Autoscaling policy rules. Each variable type may have unique implementation
+ * of functionalities
+ */
+public interface Variable {
+  String NULL = "";
+  String coreidxsize = "INDEX.sizeInGB";
+
+  default boolean match(Object inputVal, Operand op, Object val, String name, Row row) {
+    return op.match(val, validate(name, inputVal, false)) == Clause.TestStatus.PASS;
+  }
+  default Object convertVal(Object val) {
+    return val;
+  }
+
+  default void projectAddReplica(Cell cell, ReplicaInfo ri, Consumer<Row.OperationInfo> opCollector, boolean strictMode) {
+  }
+
+  default void addViolatingReplicas(Violation.Ctx ctx) {
+    for (Row row : ctx.allRows) {
+      if (ctx.clause.tag.varType.meta.isNodeSpecificVal() && !row.node.equals(ctx.tagKey)) continue;
+      Violation.collectViolatingReplicas(ctx, row);
+    }
+  }
+
+  void getSuggestions(Suggestion.Ctx ctx) ;
+
+  default Object computeValue(Policy.Session session, Clause.Condition condition, String collection, String shard, String node) {
+    return condition.val;
+  }
+
+  int compareViolation(Violation v1, Violation v2);
+
+  default void projectRemoveReplica(Cell cell, ReplicaInfo ri, Consumer<Row.OperationInfo> opCollector) {
+  }
+
+  default String postValidate(Clause.Condition condition) {
+    return null;
+  }
+
+  default Operand getOperand(Operand expected, Object strVal, Clause.ComputedType computedType) {
+    return expected;
+  }
+
+  Object validate(String name, Object val, boolean isRuleVal);
+
+  /**
+   * Type details of each variable in policies
+   */
+  public enum Type implements Variable {
+    @Meta(name = "withCollection", type = String.class, isNodeSpecificVal = true, implementation = WithCollectionVariable.class)
+    WITH_COLLECTION(),
+
+    @Meta(name = "collection",
+        type = String.class)
+    COLL(),
+    @Meta(
+        name = "shard",
+        type = String.class,
+        wildCards = {Policy.EACH, Policy.ANY})
+    SHARD(),
+
+    @Meta(name = "replica",
+        type = Double.class,
+        min = 0, max = -1,
+        implementation = ReplicaVariable.class,
+        computedValues = {Clause.ComputedType.EQUAL, Clause.ComputedType.PERCENT, Clause.ComputedType.ALL})
+    REPLICA(),
+    @Meta(name = ImplicitSnitch.PORT,
+        type = Long.class,
+        min = 1,
+        max = 65535,
+        supportArrayVals = true,
+        wildCards = Policy.EACH
+    )
+    PORT(),
+    @Meta(name = "ip_1",
+        type = Long.class,
+        min = 0,
+        max = 255,
+        supportArrayVals = true,
+        wildCards = Policy.EACH)
+    IP_1(),
+    @Meta(name = "ip_2",
+        type = Long.class,
+        min = 0,
+        max = 255,
+        supportArrayVals = true,
+        wildCards = Policy.EACH)
+    IP_2(),
+    @Meta(name = "ip_3",
+        type = Long.class,
+        min = 0,
+        max = 255,
+        supportArrayVals = true,
+        wildCards = Policy.EACH)
+    IP_3(),
+    @Meta(name = "ip_4",
+        type = Long.class,
+        min = 0,
+        max = 255,
+        supportArrayVals = true,
+        wildCards = Policy.EACH)
+    IP_4(),
+    @Meta(name = ImplicitSnitch.DISK,
+        type = Double.class,
+        min = 0,
+        isNodeSpecificVal = true,
+        associatedPerReplicaValue = Variable.coreidxsize,
+        associatedPerNodeValue = "totaldisk",
+        implementation = FreeDiskVariable.class,
+        computedValues = Clause.ComputedType.PERCENT)
+    FREEDISK(),
+
+    @Meta(name = "totaldisk",
+        type = Double.class,
+        isHidden = true, implementation = VariableBase.TotalDiskVariable.class)
+    TOTALDISK(),
+
+    @Meta(name = Variable.coreidxsize,
+        type = Double.class,
+        isNodeSpecificVal = true,
+        isHidden = true,
+        min = 0,
+        implementation = VariableBase.CoreIndexSizeVariable.class,
+        metricsKey = "INDEX.sizeInBytes")
+    CORE_IDX(),
+    @Meta(name = ImplicitSnitch.NODEROLE,
+        type = String.class,
+        enumVals = "overseer")
+    NODE_ROLE(),
+
+    @Meta(name = ImplicitSnitch.CORES,
+        type = Long.class,
+        min = 0,
+        implementation = CoresVariable.class)
+    CORES(),
+
+    @Meta(name = ImplicitSnitch.SYSLOADAVG,
+        type = Double.class,
+        min = 0,
+        max = 100,
+        isNodeSpecificVal = true)
+    SYSLOADAVG(),
+
+    @Meta(name = ImplicitSnitch.HEAPUSAGE,
+        type = Double.class,
+        min = 0,
+        isNodeSpecificVal = true)
+    HEAPUSAGE(),
+    @Meta(name = "NUMBER",
+        type = Long.class,
+        min = 0)
+    NUMBER(),
+
+    @Meta(name = "STRING",
+        type = String.class,
+        wildCards = Policy.EACH,
+        supportArrayVals = true)
+    STRING(),
+
+    @Meta(name = "node",
+        type = String.class,
+        isNodeSpecificVal = true,
+        wildCards = {Policy.ANY, Policy.EACH},
+        implementation = NodeVariable.class,
+        supportArrayVals = true)
+    NODE(),
+
+    @Meta(name = "LAZY",
+        type = void.class,
+        implementation = VariableBase.LazyVariable.class)
+    LAZY(),
+
+    @Meta(name = ImplicitSnitch.DISKTYPE,
+        type = String.class,
+        enumVals = {"ssd", "rotational"},
+        implementation = VariableBase.DiskTypeVariable.class,
+        supportArrayVals = true)
+    DISKTYPE();
+
+    public final String tagName;
+    public final Class type;
+    public Meta meta;
+
+    public final Set<String> vals;
+    public final Number min;
+    public final Number max;
+    public final Boolean additive;
+    public final Set<String> wildCards;
+    public final String perReplicaValue;
+    public final Set<String> associatedPerNodeValues;
+    public final String metricsAttribute;
+    public final Set<Clause.ComputedType> supportedComputedTypes;
+    final Variable impl;
+
+
+    Type() {
+      try {
+        meta = Type.class.getField(name()).getAnnotation(Meta.class);
+        if (meta == null) {
+          throw new RuntimeException("Invalid type, should have a @Meta annotation " + name());
+        }
+      } catch (NoSuchFieldException e) {
+        //cannot happen
+      }
+      impl= VariableBase.loadImpl(meta, this);
+
+      this.tagName = meta.name();
+      this.type = meta.type();
+
+      this.vals = readSet(meta.enumVals());
+      this.max = readNum(meta.max());
+      this.min = readNum(meta.min());
+      this.perReplicaValue = readStr(meta.associatedPerReplicaValue());
+      this.associatedPerNodeValues = readSet(meta.associatedPerNodeValue());
+      this.additive = meta.isAdditive();
+      this.metricsAttribute = readStr(meta.metricsKey());
+      this.supportedComputedTypes = meta.computedValues()[0] == Clause.ComputedType.NULL ?
+          emptySet() :
+          unmodifiableSet(new HashSet(Arrays.asList(meta.computedValues())));
+      this.wildCards = readSet(meta.wildCards());
+
+    }
+
+    public String getTagName() {
+      return meta.name();
+    }
+
+    private String readStr(String s) {
+      return NULL.equals(s) ? null : s;
+    }
+
+    private Number readNum(double v) {
+      return v == -1 ? null :
+          (Number) validate(null, v, true);
+    }
+
+    Set<String> readSet(String[] vals) {
+      if (NULL.equals(vals[0])) return emptySet();
+      return unmodifiableSet(new HashSet<>(Arrays.asList(vals)));
+    }
+
+    @Override
+    public void getSuggestions(Suggestion.Ctx ctx) {
+      impl.getSuggestions(ctx);
+    }
+
+    @Override
+    public void addViolatingReplicas(Violation.Ctx ctx) {
+        impl.addViolatingReplicas(ctx);
+    }
+
+    public Operand getOperand(Operand expected, Object val, Clause.ComputedType computedType) {
+      return impl.getOperand(expected, val, computedType);
+    }
+
+
+    public Object convertVal(Object val) {
+      return impl.convertVal(val);
+    }
+
+    public String postValidate(Clause.Condition condition) {
+      return impl.postValidate(condition);
+    }
+
+    public Object validate(String name, Object val, boolean isRuleVal) {
+      return impl.validate(name, val, isRuleVal);
+    }
+
+    /**
+     * Simulate a replica addition to a node in the cluster
+     */
+    public void projectAddReplica(Cell cell, ReplicaInfo ri, Consumer<Row.OperationInfo> opCollector, boolean strictMode) {
+      impl.projectAddReplica(cell, ri, opCollector, strictMode);
+    }
+
+    public void projectRemoveReplica(Cell cell, ReplicaInfo ri, Consumer<Row.OperationInfo> opCollector) {
+      impl.projectRemoveReplica(cell, ri, opCollector);
+    }
+
+    @Override
+    public int compareViolation(Violation v1, Violation v2) {
+      return impl.compareViolation(v1, v2);
+    }
+
+    @Override
+    public Object computeValue(Policy.Session session, Clause.Condition condition, String collection, String shard, String node) {
+      return impl.computeValue(session, condition, collection, shard, node);
+    }
+
+    @Override
+    public boolean match(Object inputVal, Operand op, Object val, String name, Row row) {
+      return impl.match(inputVal, op, val, name, row);
+    }
+  }
+
+  @Target(ElementType.FIELD)
+  @Retention(RetentionPolicy.RUNTIME)
+  @interface Meta {
+    String name();
+
+    Class type();
+
+    String[] associatedPerNodeValue() default NULL;
+
+    String associatedPerReplicaValue() default NULL;
+
+    String[] enumVals() default NULL;
+
+    String[] wildCards() default NULL;
+
+    boolean isNodeSpecificVal() default false;
+
+    boolean isHidden() default false;
+
+    boolean isAdditive() default true;
+
+    double min() default -1d;
+
+    double max() default -1d;
+
+    boolean supportArrayVals() default false;
+
+    String metricsKey() default NULL;
+
+    Class implementation() default void.class;
+
+    Clause.ComputedType[] computedValues() default Clause.ComputedType.NULL;
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4602e4de/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/VariableBase.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/VariableBase.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/VariableBase.java
new file mode 100644
index 0000000..ad2b43b
--- /dev/null
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/VariableBase.java
@@ -0,0 +1,205 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.client.solrj.cloud.autoscaling;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.solr.common.cloud.rule.ImplicitSnitch;
+import org.apache.solr.common.util.StrUtils;
+
+import static org.apache.solr.client.solrj.cloud.autoscaling.Clause.parseString;
+import static org.apache.solr.client.solrj.cloud.autoscaling.Suggestion.perNodeSuggestions;
+import static org.apache.solr.client.solrj.cloud.autoscaling.Variable.Type.FREEDISK;
+
+public class VariableBase implements Variable {
+  final Type varType;
+
+  public VariableBase(Type type) {
+    this.varType = type;
+  }
+
+  @Override
+  public void getSuggestions(Suggestion.Ctx ctx) {
+    perNodeSuggestions(ctx);
+  }
+
+  static Object getOperandAdjustedValue(Object val, Object original) {
+    if (original instanceof Clause.Condition) {
+      Clause.Condition condition = (Clause.Condition) original;
+      if (condition.computedType == null && isIntegerEquivalent(val)) {
+        if (condition.op == Operand.LESS_THAN) {
+          //replica : '<3'
+          val = val instanceof Long ?
+              (Long) val - 1 :
+              (Double) val - 1;
+        } else if (condition.op == Operand.GREATER_THAN) {
+          //replica : '>4'
+          val = val instanceof Long ?
+              (Long) val + 1 :
+              (Double) val + 1;
+        }
+      }
+    }
+    return val;
+  }
+
+  static boolean isIntegerEquivalent(Object val) {
+    if (val instanceof Number) {
+      Number number = (Number) val;
+      return Math.ceil(number.doubleValue()) == Math.floor(number.doubleValue());
+    } else if (val instanceof String) {
+      try {
+        double dval = Double.parseDouble((String) val);
+        return Math.ceil(dval) == Math.floor(dval);
+      } catch (NumberFormatException e) {
+        return false;
+      }
+    } else {
+      return false;
+    }
+
+  }
+
+  public static Type getTagType(String name) {
+    Type info = validatetypes.get(name);
+    if (info == null && name.startsWith(ImplicitSnitch.SYSPROP)) info = Type.STRING;
+    if (info == null && name.startsWith(Clause.METRICS_PREFIX)) info = Type.LAZY;
+    return info;
+  }
+
+  static Variable loadImpl(Meta meta, Type t) {
+    Class implementation = meta.implementation();
+    if (implementation == void.class) implementation = VariableBase.class;
+    try {
+      return (Variable) implementation.getConstructor(Type.class).newInstance(t);
+    } catch (Exception e) {
+      throw new RuntimeException("Unable to instantiate: " + implementation.getName(), e);
+    }
+  }
+
+  @Override
+  public int compareViolation(Violation v1, Violation v2) {
+    if (v2.replicaCountDelta == null || v1.replicaCountDelta == null) return 0;
+    if (Math.abs(v1.replicaCountDelta) == Math.abs(v2.replicaCountDelta)) return 0;
+    return Math.abs(v1.replicaCountDelta) < Math.abs(v2.replicaCountDelta) ? -1 : 1;
+  }
+
+  @Override
+  public Object validate(String name, Object val, boolean isRuleVal) {
+    if (val instanceof Clause.Condition) {
+      Clause.Condition condition = (Clause.Condition) val;
+      val = condition.op.readRuleValue(condition);
+      if (val != condition.val) return val;
+    }
+    if (name == null) name = this.varType.tagName;
+    if (varType.type == Double.class) {
+      Double num = Clause.parseDouble(name, val);
+      if (isRuleVal) {
+        if (varType.min != null)
+          if (Double.compare(num, varType.min.doubleValue()) == -1)
+            throw new RuntimeException(name + ": " + val + " must be greater than " + varType.min);
+        if (varType.max != null)
+          if (Double.compare(num, varType.max.doubleValue()) == 1)
+            throw new RuntimeException(name + ": " + val + " must be less than " + varType.max);
+      }
+      return num;
+    } else if (varType.type == Long.class) {
+      Long num = Clause.parseLong(name, val);
+      if (isRuleVal) {
+        if (varType.min != null)
+          if (num < varType.min.longValue())
+            throw new RuntimeException(name + ": " + val + " must be greater than " + varType.min);
+        if (varType.max != null)
+          if (num > varType.max.longValue())
+            throw new RuntimeException(name + ": " + val + " must be less than " + varType.max);
+      }
+      return num;
+    } else if (varType.type == String.class) {
+      if (isRuleVal && !varType.vals.isEmpty() && !varType.vals.contains(val))
+        throw new RuntimeException(name + ": " + val + " must be one of " + StrUtils.join(varType.vals, ','));
+      return val;
+    } else {
+      throw new RuntimeException("Invalid type ");
+    }
+  }
+
+  public static class TotalDiskVariable extends VariableBase {
+    public TotalDiskVariable(Type type) {
+      super(type);
+    }
+
+    @Override
+    public Object convertVal(Object val) {
+      return FREEDISK.convertVal(val);
+    }
+  }
+
+  public static class CoreIndexSizeVariable extends VariableBase {
+    public CoreIndexSizeVariable(Type type) {
+      super(type);
+    }
+
+    @Override
+    public Object convertVal(Object val) {
+      return FREEDISK.convertVal(val);
+    }
+  }
+
+  public static class LazyVariable extends VariableBase {
+    public LazyVariable(Type type) {
+      super(type);
+    }
+
+    @Override
+    public Object validate(String name, Object val, boolean isRuleVal) {
+      return parseString(val);
+    }
+
+    @Override
+    public boolean match(Object inputVal, Operand op, Object val, String name, Row row) {
+      return op.match(parseString(val), parseString(inputVal)) == Clause.TestStatus.PASS;
+    }
+
+    @Override
+    public void getSuggestions(Suggestion.Ctx ctx) {
+      perNodeSuggestions(ctx);
+    }
+  }
+
+  public static class DiskTypeVariable extends VariableBase {
+    public DiskTypeVariable(Type type) {
+      super(type);
+    }
+
+    @Override
+    public void getSuggestions(Suggestion.Ctx ctx) {
+      perNodeSuggestions(ctx);
+    }
+
+
+  }
+
+  private static Map<String, Type> validatetypes;
+
+  static {
+    validatetypes = new HashMap<>();
+    for (Type t : Type.values())
+      validatetypes.put(t.tagName, t);
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4602e4de/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Violation.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Violation.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Violation.java
index 7b0f0f3..2f81291 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Violation.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Violation.java
@@ -23,6 +23,7 @@ import java.util.HashMap;
 import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Objects;
+import java.util.function.Function;
 
 import org.apache.solr.common.MapWriter;
 import org.apache.solr.common.util.Utils;
@@ -47,6 +48,29 @@ public class Violation implements MapWriter {
     hash = ("" + coll + " " + shard + " " + node + " " + String.valueOf(tagKey) + " " + Utils.toJSONString(getClause().toMap(new HashMap<>()))).hashCode();
   }
 
+  static void collectViolatingReplicas(Ctx ctx, Row row) {
+    if (ctx.clause.tag.varType.meta.isNodeSpecificVal()) {
+      row.forEachReplica(replica -> {
+        if (ctx.clause.collection.isPass(replica.getCollection()) && ctx.clause.getShard().isPass(replica.getShard())) {
+          ctx.currentViolation.addReplica(new ReplicaInfoAndErr(replica)
+              .withDelta(ctx.clause.tag.delta(row.getVal(ctx.clause.tag.name))));
+        }
+      });
+    } else {
+      row.forEachReplica(replica -> {
+        if (ctx.clause.replica.isPass(0) && !ctx.clause.tag.isPass(row)) return;
+        if (!ctx.clause.replica.isPass(0) && ctx.clause.tag.isPass(row)) return;
+        if(!ctx.currentViolation.getClause().matchShard(replica.getShard(), ctx.currentViolation.shard)) return;
+        if (!ctx.clause.collection.isPass(ctx.currentViolation.coll) || !ctx.clause.shard.isPass(ctx.currentViolation.shard))
+          return;
+        ctx.currentViolation.addReplica(new ReplicaInfoAndErr(replica).withDelta(ctx.clause.tag.delta(row.getVal(ctx.clause.tag.name))));
+      });
+
+    }
+
+
+  }
+
   public Violation addReplica(ReplicaInfoAndErr r) {
     replicaInfoAndErrs.add(r);
     return this;
@@ -144,4 +168,29 @@ public class Violation implements MapWriter {
     });
     ew.put("clause", getClause());
   }
+
+  static class Ctx {
+    final Function<Clause.Condition, Object> evaluator;
+    String tagKey;
+    Clause clause;
+    ReplicaCount count;
+    Violation currentViolation;
+    List<Row> allRows;
+    List<Violation> allViolations = new ArrayList<>();
+
+    public Ctx(Clause clause, List<Row> allRows, Function<Clause.Condition, Object> evaluator) {
+      this.allRows = allRows;
+      this.clause = clause;
+      this.evaluator = evaluator;
+    }
+
+    public Ctx reset(String tagKey, ReplicaCount count, Violation currentViolation) {
+      this.tagKey = tagKey;
+      this.count = count;
+      this.currentViolation = currentViolation;
+      allViolations.add(currentViolation);
+      this.clause = currentViolation.getClause();
+      return this;
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4602e4de/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/WithCollectionVarType.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/WithCollectionVarType.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/WithCollectionVarType.java
deleted file mode 100644
index 989a087..0000000
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/WithCollectionVarType.java
+++ /dev/null
@@ -1,160 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.solr.client.solrj.cloud.autoscaling;
-
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.function.Consumer;
-
-import org.apache.solr.common.cloud.Replica;
-import org.apache.solr.common.util.Pair;
-
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.ADDREPLICA;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.MOVEREPLICA;
-
-/**
- * Implements the 'withCollection' variable type
- */
-public class WithCollectionVarType implements VarType {
-  @Override
-  public boolean match(Object inputVal, Operand op, Object val, String name, Row row) {
-    Map<String, String> withCollectionMap = (Map<String, String>) inputVal;
-    if (withCollectionMap == null || withCollectionMap.isEmpty()) return true;
-
-    Set<String> uniqueColls = new HashSet<>();
-    row.forEachReplica(replicaInfo -> uniqueColls.add(replicaInfo.getCollection()));
-
-    for (Map.Entry<String, String> e : withCollectionMap.entrySet()) {
-      if (uniqueColls.contains(e.getKey()) && !uniqueColls.contains(e.getValue())) return false;
-    }
-
-    return true;
-  }
-
-  public void projectAddReplica(Cell cell, ReplicaInfo ri, Consumer<Row.OperationInfo> opCollector, boolean strictMode) {
-    if (strictMode) {
-      // we do not want to add a replica of the 'withCollection' in strict mode
-      return;
-    }
-
-    Map<String, String> withCollectionMap = (Map<String, String>) cell.val;
-    if (withCollectionMap == null || withCollectionMap.isEmpty()) return;
-
-    Set<String> uniqueColls = new HashSet<>();
-    Row row = cell.row;
-    row.forEachReplica(replicaInfo -> uniqueColls.add(replicaInfo.getCollection()));
-
-    for (Map.Entry<String, String> e : withCollectionMap.entrySet()) {
-      if (uniqueColls.contains(e.getKey()) && !uniqueColls.contains(e.getValue())) {
-        String withCollection = e.getValue();
-
-        opCollector.accept(new Row.OperationInfo(withCollection, "shard1", row.node, cell.name, true, Replica.Type.NRT));
-      }
-    }
-  }
-
-  @Override
-  public int compareViolation(Violation v1, Violation v2) {
-    return Integer.compare(v1.getViolatingReplicas().size(), v2.getViolatingReplicas().size());
-  }
-
-  public void addViolatingReplicas(Suggestion.ViolationCtx ctx) {
-    String node = ctx.currentViolation.node;
-    for (Row row : ctx.allRows) {
-      if (node.equals(row.node)) {
-        Map<String, String> withCollectionMap = (Map<String, String>) row.getVal("withCollection");
-        if (withCollectionMap != null) {
-          row.forEachReplica(r -> {
-            String withCollection = withCollectionMap.get(r.getCollection());
-            if (withCollection != null) {
-              // test whether this row has at least 1 replica of withCollection, else there is a violation
-              Set<String> uniqueCollections = new HashSet<>();
-              row.forEachReplica(replicaInfo -> uniqueCollections.add(replicaInfo.getCollection()));
-              if (!uniqueCollections.contains(withCollection)) {
-                ctx.currentViolation.addReplica(new Violation.ReplicaInfoAndErr(r).withDelta(1.0d));
-              }
-            }
-          });
-          ctx.currentViolation.replicaCountDelta = (double) ctx.currentViolation.getViolatingReplicas().size();
-        }
-      }
-    }
-  }
-
-  @Override
-  public void getSuggestions(Suggestion.SuggestionCtx ctx) {
-    if (ctx.violation.getViolatingReplicas().isEmpty()) return;
-
-    Map<String, Object> nodeValues = ctx.session.nodeStateProvider.getNodeValues(ctx.violation.node, Collections.singleton("withCollection"));
-    Map<String, String> withCollectionsMap = (Map<String, String>) nodeValues.get("withCollection");
-    if (withCollectionsMap == null) return;
-
-    Set<String> uniqueCollections = new HashSet<>();
-    for (Violation.ReplicaInfoAndErr replicaInfoAndErr : ctx.violation.getViolatingReplicas()) {
-      uniqueCollections.add(replicaInfoAndErr.replicaInfo.getCollection());
-    }
-
-    collectionLoop:
-    for (String collection : uniqueCollections) {
-      String withCollection = withCollectionsMap.get(collection);
-      if (withCollection == null) continue;
-
-      // can we find a node from which we can move a replica of the `withCollection`
-      // without creating another violation?
-      for (Row row : ctx.session.matrix) {
-        if (ctx.violation.node.equals(row.node))  continue; // filter the violating node
-
-        Set<String> hostedCollections = new HashSet<>();
-        row.forEachReplica(replicaInfo -> hostedCollections.add(replicaInfo.getCollection()));
-
-        if (hostedCollections.contains(withCollection) && !hostedCollections.contains(collection))  {
-          // find the candidate replicas that we can move
-          List<ReplicaInfo> movableReplicas = new ArrayList<>();
-          row.forEachReplica(replicaInfo -> {
-            if (replicaInfo.getCollection().equals(withCollection)) {
-              movableReplicas.add(replicaInfo);
-            }
-          });
-
-          for (ReplicaInfo toMove : movableReplicas) {
-            // candidate source node for a move replica operation
-            Suggester suggester = ctx.session.getSuggester(MOVEREPLICA)
-                .forceOperation(true)
-                .hint(Suggester.Hint.COLL_SHARD, new Pair<>(withCollection, "shard1"))
-                .hint(Suggester.Hint.SRC_NODE, row.node)
-                .hint(Suggester.Hint.REPLICA, toMove.getName())
-                .hint(Suggester.Hint.TARGET_NODE, ctx.violation.node);
-            if (ctx.addSuggestion(suggester) != null)
-              continue collectionLoop; // one suggestion is enough for this collection
-          }
-        }
-      }
-
-      // we could not find a valid move, so we suggest adding a replica
-      Suggester suggester = ctx.session.getSuggester(ADDREPLICA)
-          .forceOperation(true)
-          .hint(Suggester.Hint.COLL_SHARD, new Pair<>(withCollection, "shard1"))
-          .hint(Suggester.Hint.TARGET_NODE, ctx.violation.node);
-      ctx.addSuggestion(suggester);
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4602e4de/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/WithCollectionVariable.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/WithCollectionVariable.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/WithCollectionVariable.java
new file mode 100644
index 0000000..b295aee
--- /dev/null
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/WithCollectionVariable.java
@@ -0,0 +1,165 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.client.solrj.cloud.autoscaling;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.function.Consumer;
+
+import org.apache.solr.common.cloud.Replica;
+import org.apache.solr.common.util.Pair;
+
+import static org.apache.solr.common.params.CollectionParams.CollectionAction.ADDREPLICA;
+import static org.apache.solr.common.params.CollectionParams.CollectionAction.MOVEREPLICA;
+
+/**
+ * Implements the 'withCollection' variable type
+ */
+public class WithCollectionVariable extends VariableBase {
+
+  public WithCollectionVariable(Type type) {
+    super(type);
+  }
+
+  @Override
+  public boolean match(Object inputVal, Operand op, Object val, String name, Row row) {
+    Map<String, String> withCollectionMap = (Map<String, String>) inputVal;
+    if (withCollectionMap == null || withCollectionMap.isEmpty()) return true;
+
+    Set<String> uniqueColls = new HashSet<>();
+    row.forEachReplica(replicaInfo -> uniqueColls.add(replicaInfo.getCollection()));
+
+    for (Map.Entry<String, String> e : withCollectionMap.entrySet()) {
+      if (uniqueColls.contains(e.getKey()) && !uniqueColls.contains(e.getValue())) return false;
+    }
+
+    return true;
+  }
+
+  public void projectAddReplica(Cell cell, ReplicaInfo ri, Consumer<Row.OperationInfo> opCollector, boolean strictMode) {
+    if (strictMode) {
+      // we do not want to add a replica of the 'withCollection' in strict mode
+      return;
+    }
+
+    Map<String, String> withCollectionMap = (Map<String, String>) cell.val;
+    if (withCollectionMap == null || withCollectionMap.isEmpty()) return;
+
+    Set<String> uniqueColls = new HashSet<>();
+    Row row = cell.row;
+    row.forEachReplica(replicaInfo -> uniqueColls.add(replicaInfo.getCollection()));
+
+    for (Map.Entry<String, String> e : withCollectionMap.entrySet()) {
+      if (uniqueColls.contains(e.getKey()) && !uniqueColls.contains(e.getValue())) {
+        String withCollection = e.getValue();
+
+        opCollector.accept(new Row.OperationInfo(withCollection, "shard1", row.node, cell.name, true, Replica.Type.NRT));
+      }
+    }
+  }
+
+  @Override
+  public int compareViolation(Violation v1, Violation v2) {
+    return Integer.compare(v1.getViolatingReplicas().size(), v2.getViolatingReplicas().size());
+  }
+
+  public void addViolatingReplicas(Violation.Ctx ctx) {
+    String node = ctx.currentViolation.node;
+    for (Row row : ctx.allRows) {
+      if (node.equals(row.node)) {
+        Map<String, String> withCollectionMap = (Map<String, String>) row.getVal("withCollection");
+        if (withCollectionMap != null) {
+          row.forEachReplica(r -> {
+            String withCollection = withCollectionMap.get(r.getCollection());
+            if (withCollection != null) {
+              // test whether this row has at least 1 replica of withCollection, else there is a violation
+              Set<String> uniqueCollections = new HashSet<>();
+              row.forEachReplica(replicaInfo -> uniqueCollections.add(replicaInfo.getCollection()));
+              if (!uniqueCollections.contains(withCollection)) {
+                ctx.currentViolation.addReplica(new Violation.ReplicaInfoAndErr(r).withDelta(1.0d));
+              }
+            }
+          });
+          ctx.currentViolation.replicaCountDelta = (double) ctx.currentViolation.getViolatingReplicas().size();
+        }
+      }
+    }
+  }
+
+  @Override
+  public void getSuggestions(Suggestion.Ctx ctx) {
+    if (ctx.violation.getViolatingReplicas().isEmpty()) return;
+
+    Map<String, Object> nodeValues = ctx.session.nodeStateProvider.getNodeValues(ctx.violation.node, Collections.singleton("withCollection"));
+    Map<String, String> withCollectionsMap = (Map<String, String>) nodeValues.get("withCollection");
+    if (withCollectionsMap == null) return;
+
+    Set<String> uniqueCollections = new HashSet<>();
+    for (Violation.ReplicaInfoAndErr replicaInfoAndErr : ctx.violation.getViolatingReplicas()) {
+      uniqueCollections.add(replicaInfoAndErr.replicaInfo.getCollection());
+    }
+
+    collectionLoop:
+    for (String collection : uniqueCollections) {
+      String withCollection = withCollectionsMap.get(collection);
+      if (withCollection == null) continue;
+
+      // can we find a node from which we can move a replica of the `withCollection`
+      // without creating another violation?
+      for (Row row : ctx.session.matrix) {
+        if (ctx.violation.node.equals(row.node))  continue; // filter the violating node
+
+        Set<String> hostedCollections = new HashSet<>();
+        row.forEachReplica(replicaInfo -> hostedCollections.add(replicaInfo.getCollection()));
+
+        if (hostedCollections.contains(withCollection) && !hostedCollections.contains(collection))  {
+          // find the candidate replicas that we can move
+          List<ReplicaInfo> movableReplicas = new ArrayList<>();
+          row.forEachReplica(replicaInfo -> {
+            if (replicaInfo.getCollection().equals(withCollection)) {
+              movableReplicas.add(replicaInfo);
+            }
+          });
+
+          for (ReplicaInfo toMove : movableReplicas) {
+            // candidate source node for a move replica operation
+            Suggester suggester = ctx.session.getSuggester(MOVEREPLICA)
+                .forceOperation(true)
+                .hint(Suggester.Hint.COLL_SHARD, new Pair<>(withCollection, "shard1"))
+                .hint(Suggester.Hint.SRC_NODE, row.node)
+                .hint(Suggester.Hint.REPLICA, toMove.getName())
+                .hint(Suggester.Hint.TARGET_NODE, ctx.violation.node);
+            if (ctx.addSuggestion(suggester) != null)
+              continue collectionLoop; // one suggestion is enough for this collection
+          }
+        }
+      }
+
+      // we could not find a valid move, so we suggest adding a replica
+      Suggester suggester = ctx.session.getSuggester(ADDREPLICA)
+          .forceOperation(true)
+          .hint(Suggester.Hint.COLL_SHARD, new Pair<>(withCollection, "shard1"))
+          .hint(Suggester.Hint.TARGET_NODE, ctx.violation.node);
+      ctx.addSuggestion(suggester);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4602e4de/solr/solrj/src/java/org/apache/solr/client/solrj/impl/SolrClientNodeStateProvider.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/SolrClientNodeStateProvider.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/SolrClientNodeStateProvider.java
index 2015b52..83fb25a 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/SolrClientNodeStateProvider.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/SolrClientNodeStateProvider.java
@@ -36,7 +36,8 @@ import org.apache.solr.client.solrj.SolrServerException;
 import org.apache.solr.client.solrj.cloud.NodeStateProvider;
 import org.apache.solr.client.solrj.cloud.autoscaling.ReplicaInfo;
 import org.apache.solr.client.solrj.cloud.autoscaling.Row;
-import org.apache.solr.client.solrj.cloud.autoscaling.Suggestion;
+import org.apache.solr.client.solrj.cloud.autoscaling.Variable.Type;
+import org.apache.solr.client.solrj.cloud.autoscaling.VariableBase;
 import org.apache.solr.client.solrj.request.GenericSolrRequest;
 import org.apache.solr.client.solrj.response.SimpleSolrResponse;
 import org.apache.solr.common.MapWriter;
@@ -59,9 +60,9 @@ import org.slf4j.LoggerFactory;
 
 import static java.util.Collections.emptyMap;
 import static org.apache.solr.client.solrj.cloud.autoscaling.Clause.METRICS_PREFIX;
-import static org.apache.solr.client.solrj.cloud.autoscaling.Suggestion.ConditionType.FREEDISK;
-import static org.apache.solr.client.solrj.cloud.autoscaling.Suggestion.ConditionType.TOTALDISK;
-import static org.apache.solr.client.solrj.cloud.autoscaling.Suggestion.ConditionType.WITH_COLLECTION;
+import static org.apache.solr.client.solrj.cloud.autoscaling.Variable.Type.FREEDISK;
+import static org.apache.solr.client.solrj.cloud.autoscaling.Variable.Type.TOTALDISK;
+import static org.apache.solr.client.solrj.cloud.autoscaling.Variable.Type.WITH_COLLECTION;
 
 /**
  *
@@ -153,7 +154,7 @@ public class SolrClientNodeStateProvider implements NodeStateProvider, MapWriter
         for (String key : keys) {
           if (r.getVariables().containsKey(key)) continue;// it's already collected
           String perReplicaMetricsKey = "solr.core." + r.getCollection() + "." + r.getShard() + "." + Utils.parseMetricsReplicaName(r.getCollection(), r.getCore()) + ":";
-          Suggestion.ConditionType tagType = Suggestion.getTagType(key);
+          Type tagType = VariableBase.getTagType(key);
           String perReplicaValue = key;
           if (tagType != null) {
             perReplicaValue = tagType.metricsAttribute;
@@ -168,7 +169,7 @@ public class SolrClientNodeStateProvider implements NodeStateProvider, MapWriter
         Map<String, Object> tagValues = fetchReplicaMetrics(node, metricsKeyVsTagReplica);
         tagValues.forEach((k, o) -> {
           Pair<String, ReplicaInfo> p = metricsKeyVsTagReplica.get(k);
-          Suggestion.ConditionType validator = Suggestion.getTagType(p.first());
+          Type validator = VariableBase.getTagType(p.first());
           if (validator != null) o = validator.convertVal(o);
           if (p.second() != null) p.second().getVariables().put(p.first(), o);
         });

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4602e4de/solr/solrj/src/test/org/apache/solr/client/solrj/cloud/autoscaling/TestPolicy.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/cloud/autoscaling/TestPolicy.java b/solr/solrj/src/test/org/apache/solr/client/solrj/cloud/autoscaling/TestPolicy.java
index 16addd4..16dfdcd 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/cloud/autoscaling/TestPolicy.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/cloud/autoscaling/TestPolicy.java
@@ -43,7 +43,6 @@ import org.apache.solr.client.solrj.cloud.DistribStateManager;
 import org.apache.solr.client.solrj.cloud.DistributedQueueFactory;
 import org.apache.solr.client.solrj.cloud.NodeStateProvider;
 import org.apache.solr.client.solrj.cloud.SolrCloudManager;
-import org.apache.solr.client.solrj.cloud.autoscaling.Clause.RangeVal;
 import org.apache.solr.client.solrj.cloud.autoscaling.Suggester.Hint;
 import org.apache.solr.client.solrj.impl.ClusterStateProvider;
 import org.apache.solr.client.solrj.impl.SolrClientNodeStateProvider;
@@ -72,9 +71,9 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import static java.nio.charset.StandardCharsets.UTF_8;
-import static org.apache.solr.client.solrj.cloud.autoscaling.Suggestion.ConditionType.CORES;
-import static org.apache.solr.client.solrj.cloud.autoscaling.Suggestion.ConditionType.FREEDISK;
-import static org.apache.solr.client.solrj.cloud.autoscaling.Suggestion.ConditionType.REPLICA;
+import static org.apache.solr.client.solrj.cloud.autoscaling.Variable.Type.CORES;
+import static org.apache.solr.client.solrj.cloud.autoscaling.Variable.Type.FREEDISK;
+import static org.apache.solr.client.solrj.cloud.autoscaling.Variable.Type.REPLICA;
 import static org.apache.solr.common.params.CollectionParams.CollectionAction.ADDREPLICA;
 import static org.apache.solr.common.params.CollectionParams.CollectionAction.MOVEREPLICA;
 
@@ -3413,18 +3412,18 @@ public void testUtilizeNodeFailure2() throws Exception {
         "        {'node':'solr-27:8983_solr'}]}]}";
 
     List l = (List) ((Map) Utils.fromJSONString(rowsData)).get("sortedNodes");
-    List<Suggestion.ConditionType> params = new ArrayList<>();
+    List<Variable.Type> params = new ArrayList<>();
     params.add(CORES);
-    params.add(Suggestion.ConditionType.FREEDISK);
-    params.add(Suggestion.ConditionType.SYSLOADAVG);
-    params.add(Suggestion.ConditionType.NODE);
+    params.add(Variable.Type.FREEDISK);
+    params.add(Variable.Type.SYSLOADAVG);
+    params.add(Variable.Type.NODE);
     List<Row> rows = new ArrayList<>();
     for (Object o : l) {
       Map m = (Map) o;
       Cell[] c = new Cell[params.size()];
       List attrs = (List) m.get("attributes");
       for (int i = 0; i < params.size(); i++) {
-        Suggestion.ConditionType param = params.get(i);
+        Variable.Type param = params.get(i);
         for (Object attr : attrs) {
           Object o1 = ((Map) attr).get(param.tagName);
           if (o1 != null) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4602e4de/solr/solrj/src/test/org/apache/solr/client/solrj/cloud/autoscaling/TestPolicy2.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/cloud/autoscaling/TestPolicy2.java b/solr/solrj/src/test/org/apache/solr/client/solrj/cloud/autoscaling/TestPolicy2.java
index 9c5528a..678600f 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/cloud/autoscaling/TestPolicy2.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/cloud/autoscaling/TestPolicy2.java
@@ -40,7 +40,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import static java.util.Collections.emptyMap;
-import static org.apache.solr.client.solrj.cloud.autoscaling.Suggestion.ConditionType.CORES;
+import static org.apache.solr.client.solrj.cloud.autoscaling.Variable.Type.CORES;
 
 public class TestPolicy2 extends SolrTestCaseJ4 {
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());


[25/48] lucene-solr:jira/http2: SOLR-12509: Improve SplitShardCmd performance and reliability.

Posted by da...@apache.org.
SOLR-12509: Improve SplitShardCmd performance and reliability.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/1133bf98
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/1133bf98
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/1133bf98

Branch: refs/heads/jira/http2
Commit: 1133bf98a5fd075173efecfb75a51493fceb62b3
Parents: c6e0c28
Author: Andrzej Bialecki <ab...@apache.org>
Authored: Wed Aug 1 14:39:37 2018 +0200
Committer: Andrzej Bialecki <ab...@apache.org>
Committed: Wed Aug 1 16:30:59 2018 +0200

----------------------------------------------------------------------
 solr/CHANGES.txt                                |   4 +
 .../cloud/api/collections/SplitShardCmd.java    | 144 ++++--
 .../solr/cloud/overseer/ReplicaMutator.java     |  47 +-
 .../solr/handler/admin/CollectionsHandler.java  |   6 +-
 .../org/apache/solr/handler/admin/SplitOp.java  |  24 +-
 .../solr/update/DirectUpdateHandler2.java       |   5 +-
 .../apache/solr/update/SolrIndexSplitter.java   | 464 +++++++++++++++++--
 .../apache/solr/update/SplitIndexCommand.java   |  22 +-
 .../solr/cloud/ChaosMonkeyShardSplitTest.java   |   2 +-
 .../cloud/api/collections/ShardSplitTest.java   |  73 ++-
 .../solr/update/SolrIndexSplitterTest.java      | 112 ++++-
 solr/solr-ref-guide/src/collections-api.adoc    |  20 +-
 .../solrj/request/CollectionAdminRequest.java   |  11 +
 .../solr/common/params/CommonAdminParams.java   |   2 +
 .../solr/common/params/CoreAdminParams.java     |   2 +-
 15 files changed, 773 insertions(+), 165 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/1133bf98/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 37dd5a7..49fc7fe 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -205,6 +205,10 @@ Optimizations
 * SOLR-12305: When a replica is applying updates, some kind of updates can skip buffering for faster recovery.
   (Cao Manh Dat)
 
+* SOLR-12509: Improve SplitShardCmd performance and reliability. A new method of splitting has been
+  introduced (splitMethod=link) which uses hard-linking of index files when possible, resulting in
+  significant speedups and reduced CPU / IO load on shard leader. (ab)
+
 Other Changes
 ----------------------
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/1133bf98/solr/core/src/java/org/apache/solr/cloud/api/collections/SplitShardCmd.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/SplitShardCmd.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/SplitShardCmd.java
index b5408f8..00488a3 100644
--- a/solr/core/src/java/org/apache/solr/cloud/api/collections/SplitShardCmd.java
+++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/SplitShardCmd.java
@@ -23,6 +23,7 @@ import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.HashMap;
+import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
@@ -51,11 +52,14 @@ import org.apache.solr.common.cloud.ZkNodeProps;
 import org.apache.solr.common.cloud.ZkStateReader;
 import org.apache.solr.common.cloud.rule.ImplicitSnitch;
 import org.apache.solr.common.params.CommonAdminParams;
+import org.apache.solr.common.params.CommonParams;
 import org.apache.solr.common.params.CoreAdminParams;
 import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.Utils;
 import org.apache.solr.handler.component.ShardHandler;
+import org.apache.solr.update.SolrIndexSplitter;
+import org.apache.solr.util.RTimerTree;
 import org.apache.solr.util.TestInjection;
 import org.apache.zookeeper.KeeperException;
 import org.apache.zookeeper.data.Stat;
@@ -87,13 +91,23 @@ public class SplitShardCmd implements OverseerCollectionMessageHandler.Cmd {
 
   public boolean split(ClusterState clusterState, ZkNodeProps message, NamedList results) throws Exception {
     boolean waitForFinalState = message.getBool(CommonAdminParams.WAIT_FOR_FINAL_STATE, false);
+    String methodStr = message.getStr(CommonAdminParams.SPLIT_METHOD, SolrIndexSplitter.SplitMethod.REWRITE.toLower());
+    SolrIndexSplitter.SplitMethod splitMethod = SolrIndexSplitter.SplitMethod.get(methodStr);
+    if (splitMethod == null) {
+      throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Unknown value '" + CommonAdminParams.SPLIT_METHOD +
+          ": " + methodStr);
+    }
+    boolean withTiming = message.getBool(CommonParams.TIMING, false);
+
     String collectionName = message.getStr(CoreAdminParams.COLLECTION);
 
-    log.info("Split shard invoked");
+    log.debug("Split shard invoked: {}", message);
     ZkStateReader zkStateReader = ocmh.zkStateReader;
     zkStateReader.forceUpdateCollection(collectionName);
     AtomicReference<String> slice = new AtomicReference<>();
     slice.set(message.getStr(ZkStateReader.SHARD_ID_PROP));
+    Set<String> offlineSlices = new HashSet<>();
+    RTimerTree timings = new RTimerTree();
 
     String splitKey = message.getStr("split.key");
     DocCollection collection = clusterState.getCollection(collectionName);
@@ -101,6 +115,9 @@ public class SplitShardCmd implements OverseerCollectionMessageHandler.Cmd {
     PolicyHelper.SessionWrapper sessionWrapper = null;
 
     Slice parentSlice = getParentSlice(clusterState, collectionName, slice, splitKey);
+    if (parentSlice.getState() != Slice.State.ACTIVE) {
+      throw new SolrException(SolrException.ErrorCode.INVALID_STATE, "Parent slice is not active: " + parentSlice.getState());
+    }
 
     // find the leader for the shard
     Replica parentShardLeader = null;
@@ -111,7 +128,9 @@ public class SplitShardCmd implements OverseerCollectionMessageHandler.Cmd {
       throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Interrupted.");
     }
 
+    RTimerTree t = timings.sub("checkDiskSpace");
     checkDiskSpace(collectionName, slice.get(), parentShardLeader);
+    t.stop();
 
     // let's record the ephemeralOwner of the parent leader node
     Stat leaderZnodeStat = zkStateReader.getZkClient().exists(ZkStateReader.LIVE_NODES_ZKNODE + "/" + parentShardLeader.getNodeName(), null, true);
@@ -142,20 +161,22 @@ public class SplitShardCmd implements OverseerCollectionMessageHandler.Cmd {
     });
     int repFactor = numNrt.get() + numTlog.get() + numPull.get();
 
-    // type of the first subreplica will be the same as leader
-    boolean firstNrtReplica = parentShardLeader.getType() == Replica.Type.NRT;
-    // verify that we indeed have the right number of correct replica types
-    if ((firstNrtReplica && numNrt.get() < 1) || (!firstNrtReplica && numTlog.get() < 1)) {
-      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "aborting split - inconsistent replica types in collection " + collectionName +
-          ": nrt=" + numNrt.get() + ", tlog=" + numTlog.get() + ", pull=" + numPull.get() + ", shard leader type is " +
-      parentShardLeader.getType());
-    }
-
-    List<Map<String, Object>> replicas = new ArrayList<>((repFactor - 1) * 2);
+    boolean success = false;
+    try {
+      // type of the first subreplica will be the same as leader
+      boolean firstNrtReplica = parentShardLeader.getType() == Replica.Type.NRT;
+      // verify that we indeed have the right number of correct replica types
+      if ((firstNrtReplica && numNrt.get() < 1) || (!firstNrtReplica && numTlog.get() < 1)) {
+        throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "aborting split - inconsistent replica types in collection " + collectionName +
+            ": nrt=" + numNrt.get() + ", tlog=" + numTlog.get() + ", pull=" + numPull.get() + ", shard leader type is " +
+            parentShardLeader.getType());
+      }
 
-    String rangesStr = fillRanges(ocmh.cloudManager, message, collection, parentSlice, subRanges, subSlices, subShardNames, firstNrtReplica);
+      List<Map<String, Object>> replicas = new ArrayList<>((repFactor - 1) * 2);
 
-    try {
+      t = timings.sub("fillRanges");
+      String rangesStr = fillRanges(ocmh.cloudManager, message, collection, parentSlice, subRanges, subSlices, subShardNames, firstNrtReplica);
+      t.stop();
 
       boolean oldShardsDeleted = false;
       for (String subSlice : subSlices) {
@@ -196,12 +217,13 @@ public class SplitShardCmd implements OverseerCollectionMessageHandler.Cmd {
       Map<String, String> requestMap = new HashMap<>();
       String nodeName = parentShardLeader.getNodeName();
 
+      t = timings.sub("createSubSlicesAndLeadersInState");
       for (int i = 0; i < subRanges.size(); i++) {
         String subSlice = subSlices.get(i);
         String subShardName = subShardNames.get(i);
         DocRouter.Range subRange = subRanges.get(i);
 
-        log.info("Creating slice " + subSlice + " of collection " + collectionName + " on " + nodeName);
+        log.debug("Creating slice " + subSlice + " of collection " + collectionName + " on " + nodeName);
 
         Map<String, Object> propMap = new HashMap<>();
         propMap.put(Overseer.QUEUE_OPERATION, CREATESHARD.toLower());
@@ -210,7 +232,7 @@ public class SplitShardCmd implements OverseerCollectionMessageHandler.Cmd {
         propMap.put(ZkStateReader.SHARD_RANGE_PROP, subRange.toString());
         propMap.put(ZkStateReader.SHARD_STATE_PROP, Slice.State.CONSTRUCTION.toString());
         propMap.put(ZkStateReader.SHARD_PARENT_PROP, parentSlice.getName());
-        propMap.put("shard_parent_node", parentShardLeader.getNodeName());
+        propMap.put("shard_parent_node", nodeName);
         propMap.put("shard_parent_zk_session", leaderZnodeStat.getEphemeralOwner());
         DistributedQueue inQueue = Overseer.getStateUpdateQueue(zkStateReader.getZkClient());
         inQueue.offer(Utils.toJSON(new ZkNodeProps(propMap)));
@@ -221,7 +243,7 @@ public class SplitShardCmd implements OverseerCollectionMessageHandler.Cmd {
         // refresh cluster state
         clusterState = zkStateReader.getClusterState();
 
-        log.info("Adding replica " + subShardName + " as part of slice " + subSlice + " of collection " + collectionName
+        log.debug("Adding first replica " + subShardName + " as part of slice " + subSlice + " of collection " + collectionName
             + " on " + nodeName);
         propMap = new HashMap<>();
         propMap.put(Overseer.QUEUE_OPERATION, ADDREPLICA.toLower());
@@ -248,9 +270,11 @@ public class SplitShardCmd implements OverseerCollectionMessageHandler.Cmd {
 
       ocmh.processResponses(results, shardHandler, true, "SPLITSHARD failed to create subshard leaders", asyncId, requestMap);
 
+      t.stop();
+      t = timings.sub("waitForSubSliceLeadersAlive");
       for (String subShardName : subShardNames) {
         // wait for parent leader to acknowledge the sub-shard core
-        log.info("Asking parent leader to wait for: " + subShardName + " to be alive on: " + nodeName);
+        log.debug("Asking parent leader to wait for: " + subShardName + " to be alive on: " + nodeName);
         String coreNodeName = ocmh.waitForCoreNodeName(collectionName, nodeName, subShardName);
         CoreAdminRequest.WaitForState cmd = new CoreAdminRequest.WaitForState();
         cmd.setCoreName(subShardName);
@@ -266,8 +290,9 @@ public class SplitShardCmd implements OverseerCollectionMessageHandler.Cmd {
 
       ocmh.processResponses(results, shardHandler, true, "SPLITSHARD timed out waiting for subshard leaders to come up",
           asyncId, requestMap);
+      t.stop();
 
-      log.info("Successfully created all sub-shards for collection " + collectionName + " parent shard: " + slice
+      log.debug("Successfully created all sub-shards for collection " + collectionName + " parent shard: " + slice
           + " on: " + parentShardLeader);
 
       log.info("Splitting shard " + parentShardLeader.getName() + " as part of slice " + slice + " of collection "
@@ -275,6 +300,7 @@ public class SplitShardCmd implements OverseerCollectionMessageHandler.Cmd {
 
       ModifiableSolrParams params = new ModifiableSolrParams();
       params.set(CoreAdminParams.ACTION, CoreAdminParams.CoreAdminAction.SPLIT.toString());
+      params.set(CommonAdminParams.SPLIT_METHOD, splitMethod.toLower());
       params.set(CoreAdminParams.CORE, parentShardLeader.getStr("core"));
       for (int i = 0; i < subShardNames.size(); i++) {
         String subShardName = subShardNames.get(i);
@@ -282,18 +308,22 @@ public class SplitShardCmd implements OverseerCollectionMessageHandler.Cmd {
       }
       params.set(CoreAdminParams.RANGES, rangesStr);
 
+      t = timings.sub("splitParentCore");
+
       ocmh.sendShardRequest(parentShardLeader.getNodeName(), params, shardHandler, asyncId, requestMap);
 
       ocmh.processResponses(results, shardHandler, true, "SPLITSHARD failed to invoke SPLIT core admin command", asyncId,
           requestMap);
+      t.stop();
 
-      log.info("Index on shard: " + nodeName + " split into two successfully");
+      log.debug("Index on shard: " + nodeName + " split into two successfully");
 
+      t = timings.sub("applyBufferedUpdates");
       // apply buffered updates on sub-shards
       for (int i = 0; i < subShardNames.size(); i++) {
         String subShardName = subShardNames.get(i);
 
-        log.info("Applying buffered updates on : " + subShardName);
+        log.debug("Applying buffered updates on : " + subShardName);
 
         params = new ModifiableSolrParams();
         params.set(CoreAdminParams.ACTION, CoreAdminParams.CoreAdminAction.REQUESTAPPLYUPDATES.toString());
@@ -304,8 +334,9 @@ public class SplitShardCmd implements OverseerCollectionMessageHandler.Cmd {
 
       ocmh.processResponses(results, shardHandler, true, "SPLITSHARD failed while asking sub shard leaders" +
           " to apply buffered updates", asyncId, requestMap);
+      t.stop();
 
-      log.info("Successfully applied buffered updates on : " + subShardNames);
+      log.debug("Successfully applied buffered updates on : " + subShardNames);
 
       // Replica creation for the new Slices
       // replica placement is controlled by the autoscaling policy framework
@@ -329,6 +360,7 @@ public class SplitShardCmd implements OverseerCollectionMessageHandler.Cmd {
         numTlog.decrementAndGet();
       }
 
+      t = timings.sub("identifyNodesForReplicas");
       List<ReplicaPosition> replicaPositions = Assign.identifyNodes(ocmh.cloudManager,
           clusterState,
           new ArrayList<>(clusterState.getLiveNodes()),
@@ -336,13 +368,15 @@ public class SplitShardCmd implements OverseerCollectionMessageHandler.Cmd {
           new ZkNodeProps(collection.getProperties()),
           subSlices, numNrt.get(), numTlog.get(), numPull.get());
       sessionWrapper = PolicyHelper.getLastSessionWrapper(true);
+      t.stop();
 
+      t = timings.sub("createReplicaPlaceholders");
       for (ReplicaPosition replicaPosition : replicaPositions) {
         String sliceName = replicaPosition.shard;
         String subShardNodeName = replicaPosition.node;
         String solrCoreName = Assign.buildSolrCoreName(collectionName, sliceName, replicaPosition.type, replicaPosition.index);
 
-        log.info("Creating replica shard " + solrCoreName + " as part of slice " + sliceName + " of collection "
+        log.debug("Creating replica shard " + solrCoreName + " as part of slice " + sliceName + " of collection "
             + collectionName + " on " + subShardNodeName);
 
         // we first create all replicas in DOWN state without actually creating their cores in order to
@@ -384,7 +418,7 @@ public class SplitShardCmd implements OverseerCollectionMessageHandler.Cmd {
 
         replicas.add(propMap);
       }
-
+      t.stop();
       assert TestInjection.injectSplitFailureBeforeReplicaCreation();
 
       long ephemeralOwner = leaderZnodeStat.getEphemeralOwner();
@@ -414,12 +448,12 @@ public class SplitShardCmd implements OverseerCollectionMessageHandler.Cmd {
       }
 
       // we must set the slice state into recovery before actually creating the replica cores
-      // this ensures that the logic inside Overseer to update sub-shard state to 'active'
+      // this ensures that the logic inside ReplicaMutator to update sub-shard state to 'active'
       // always gets a chance to execute. See SOLR-7673
 
       if (repFactor == 1) {
         // switch sub shard states to 'active'
-        log.info("Replication factor is 1 so switching shard states");
+        log.debug("Replication factor is 1 so switching shard states");
         DistributedQueue inQueue = Overseer.getStateUpdateQueue(zkStateReader.getZkClient());
         Map<String, Object> propMap = new HashMap<>();
         propMap.put(Overseer.QUEUE_OPERATION, OverseerAction.UPDATESHARDSTATE.toLower());
@@ -431,7 +465,7 @@ public class SplitShardCmd implements OverseerCollectionMessageHandler.Cmd {
         ZkNodeProps m = new ZkNodeProps(propMap);
         inQueue.offer(Utils.toJSON(m));
       } else {
-        log.info("Requesting shard state be set to 'recovery'");
+        log.debug("Requesting shard state be set to 'recovery'");
         DistributedQueue inQueue = Overseer.getStateUpdateQueue(zkStateReader.getZkClient());
         Map<String, Object> propMap = new HashMap<>();
         propMap.put(Overseer.QUEUE_OPERATION, OverseerAction.UPDATESHARDSTATE.toLower());
@@ -443,6 +477,7 @@ public class SplitShardCmd implements OverseerCollectionMessageHandler.Cmd {
         inQueue.offer(Utils.toJSON(m));
       }
 
+      t = timings.sub("createCoresForReplicas");
       // now actually create replica cores on sub shard nodes
       for (Map<String, Object> replica : replicas) {
         ocmh.addReplica(clusterState, new ZkNodeProps(replica), results, null);
@@ -451,20 +486,28 @@ public class SplitShardCmd implements OverseerCollectionMessageHandler.Cmd {
       assert TestInjection.injectSplitFailureAfterReplicaCreation();
 
       ocmh.processResponses(results, shardHandler, true, "SPLITSHARD failed to create subshard replicas", asyncId, requestMap);
+      t.stop();
 
       log.info("Successfully created all replica shards for all sub-slices " + subSlices);
 
+      t = timings.sub("finalCommit");
       ocmh.commit(results, slice.get(), parentShardLeader);
-
+      t.stop();
+      if (withTiming) {
+        results.add(CommonParams.TIMING, timings.asNamedList());
+      }
+      success = true;
       return true;
     } catch (SolrException e) {
-      cleanupAfterFailure(zkStateReader, collectionName, parentSlice.getName(), subSlices);
       throw e;
     } catch (Exception e) {
       log.error("Error executing split operation for collection: " + collectionName + " parent shard: " + slice, e);
       throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, null, e);
     } finally {
       if (sessionWrapper != null) sessionWrapper.release();
+      if (!success) {
+        cleanupAfterFailure(zkStateReader, collectionName, parentSlice.getName(), subSlices, offlineSlices);
+      }
     }
   }
 
@@ -505,13 +548,14 @@ public class SplitShardCmd implements OverseerCollectionMessageHandler.Cmd {
     }
   }
 
-  private void cleanupAfterFailure(ZkStateReader zkStateReader, String collectionName, String parentShard, List<String> subSlices) {
-    log.debug("- cleanup after failed split of " + collectionName + "/" + parentShard);
+  private void cleanupAfterFailure(ZkStateReader zkStateReader, String collectionName, String parentShard,
+                                   List<String> subSlices, Set<String> offlineSlices) {
+    log.info("Cleaning up after a failed split of " + collectionName + "/" + parentShard);
     // get the latest state
     try {
       zkStateReader.forceUpdateCollection(collectionName);
     } catch (KeeperException | InterruptedException e) {
-      log.warn("Cleanup after failed split of " + collectionName + "/" + parentShard + ": (force update collection)", e);
+      log.warn("Cleanup failed after failed split of " + collectionName + "/" + parentShard + ": (force update collection)", e);
       return;
     }
     ClusterState clusterState = zkStateReader.getClusterState();
@@ -524,7 +568,8 @@ public class SplitShardCmd implements OverseerCollectionMessageHandler.Cmd {
     // set already created sub shards states to CONSTRUCTION - this prevents them
     // from entering into RECOVERY or ACTIVE (SOLR-9455)
     DistributedQueue inQueue = Overseer.getStateUpdateQueue(zkStateReader.getZkClient());
-    Map<String, Object> propMap = new HashMap<>();
+    final Map<String, Object> propMap = new HashMap<>();
+    boolean sendUpdateState = false;
     propMap.put(Overseer.QUEUE_OPERATION, OverseerAction.UPDATESHARDSTATE.toLower());
     propMap.put(ZkStateReader.COLLECTION_PROP, collectionName);
     for (Slice s : coll.getSlices()) {
@@ -532,20 +577,29 @@ public class SplitShardCmd implements OverseerCollectionMessageHandler.Cmd {
         continue;
       }
       propMap.put(s.getName(), Slice.State.CONSTRUCTION.toString());
+      sendUpdateState = true;
     }
 
     // if parent is inactive activate it again
     Slice parentSlice = coll.getSlice(parentShard);
     if (parentSlice.getState() == Slice.State.INACTIVE) {
+      sendUpdateState = true;
       propMap.put(parentShard, Slice.State.ACTIVE.toString());
     }
+    // plus any other previously deactivated slices
+    for (String sliceName : offlineSlices) {
+      propMap.put(sliceName, Slice.State.ACTIVE.toString());
+      sendUpdateState = true;
+    }
 
-    try {
-      ZkNodeProps m = new ZkNodeProps(propMap);
-      inQueue.offer(Utils.toJSON(m));
-    } catch (Exception e) {
-      // don't give up yet - just log the error, we may still be able to clean up
-      log.warn("Cleanup after failed split of " + collectionName + "/" + parentShard + ": (slice state changes)", e);
+    if (sendUpdateState) {
+      try {
+        ZkNodeProps m = new ZkNodeProps(propMap);
+        inQueue.offer(Utils.toJSON(m));
+      } catch (Exception e) {
+        // don't give up yet - just log the error, we may still be able to clean up
+        log.warn("Cleanup failed after failed split of " + collectionName + "/" + parentShard + ": (slice state changes)", e);
+      }
     }
 
     // delete existing subShards
@@ -554,16 +608,16 @@ public class SplitShardCmd implements OverseerCollectionMessageHandler.Cmd {
       if (s == null) {
         continue;
       }
-      log.info("Sub-shard: {} already exists therefore requesting its deletion", subSlice);
-      propMap = new HashMap<>();
-      propMap.put(Overseer.QUEUE_OPERATION, "deleteshard");
-      propMap.put(COLLECTION_PROP, collectionName);
-      propMap.put(SHARD_ID_PROP, subSlice);
-      ZkNodeProps m = new ZkNodeProps(propMap);
+      log.debug("- sub-shard: {} exists therefore requesting its deletion", subSlice);
+      HashMap<String, Object> props = new HashMap<>();
+      props.put(Overseer.QUEUE_OPERATION, "deleteshard");
+      props.put(COLLECTION_PROP, collectionName);
+      props.put(SHARD_ID_PROP, subSlice);
+      ZkNodeProps m = new ZkNodeProps(props);
       try {
         ocmh.commandMap.get(DELETESHARD).call(clusterState, m, new NamedList());
       } catch (Exception e) {
-        log.warn("Cleanup after failed split of " + collectionName + "/" + parentShard + ": (deleting existing sub shard " + subSlice + ")", e);
+        log.warn("Cleanup failed after failed split of " + collectionName + "/" + parentShard + ": (deleting existing sub shard " + subSlice + ")", e);
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/1133bf98/solr/core/src/java/org/apache/solr/cloud/overseer/ReplicaMutator.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/overseer/ReplicaMutator.java b/solr/core/src/java/org/apache/solr/cloud/overseer/ReplicaMutator.java
index f897072..34843c1 100644
--- a/solr/core/src/java/org/apache/solr/cloud/overseer/ReplicaMutator.java
+++ b/solr/core/src/java/org/apache/solr/cloud/overseer/ReplicaMutator.java
@@ -25,6 +25,7 @@ import java.util.Locale;
 import java.util.Map;
 import java.util.NoSuchElementException;
 import java.util.Set;
+import java.util.concurrent.TimeUnit;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.solr.client.solrj.cloud.DistribStateManager;
@@ -52,12 +53,12 @@ import static org.apache.solr.common.params.CommonParams.NAME;
 public class ReplicaMutator {
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
-  protected final SolrCloudManager dataProvider;
+  protected final SolrCloudManager cloudManager;
   protected final DistribStateManager stateManager;
 
-  public ReplicaMutator(SolrCloudManager dataProvider) {
-    this.dataProvider = dataProvider;
-    this.stateManager = dataProvider.getDistribStateManager();
+  public ReplicaMutator(SolrCloudManager cloudManager) {
+    this.cloudManager = cloudManager;
+    this.stateManager = cloudManager.getDistribStateManager();
   }
 
   protected Replica setProperty(Replica replica, String key, String value) {
@@ -96,11 +97,11 @@ public class ReplicaMutator {
   }
 
   public ZkWriteCommand addReplicaProperty(ClusterState clusterState, ZkNodeProps message) {
-    if (checkKeyExistence(message, ZkStateReader.COLLECTION_PROP) == false ||
-        checkKeyExistence(message, ZkStateReader.SHARD_ID_PROP) == false ||
-        checkKeyExistence(message, ZkStateReader.REPLICA_PROP) == false ||
-        checkKeyExistence(message, ZkStateReader.PROPERTY_PROP) == false ||
-        checkKeyExistence(message, ZkStateReader.PROPERTY_VALUE_PROP) == false) {
+    if (!checkKeyExistence(message, ZkStateReader.COLLECTION_PROP) ||
+        !checkKeyExistence(message, ZkStateReader.SHARD_ID_PROP) ||
+        !checkKeyExistence(message, ZkStateReader.REPLICA_PROP) ||
+        !checkKeyExistence(message, ZkStateReader.PROPERTY_PROP) ||
+        !checkKeyExistence(message, ZkStateReader.PROPERTY_VALUE_PROP)) {
       throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
           "Overseer ADDREPLICAPROP requires " +
               ZkStateReader.COLLECTION_PROP + " and " + ZkStateReader.SHARD_ID_PROP + " and " +
@@ -200,7 +201,7 @@ public class ReplicaMutator {
   }
 
   public ZkWriteCommand setState(ClusterState clusterState, ZkNodeProps message) {
-    if (Overseer.isLegacy(dataProvider.getClusterStateProvider())) {
+    if (Overseer.isLegacy(cloudManager.getClusterStateProvider())) {
       return updateState(clusterState, message);
     } else {
       return updateStateNew(clusterState, message);
@@ -224,7 +225,7 @@ public class ReplicaMutator {
       ClusterStateMutator.getShardNames(numShards, shardNames);
       Map<String, Object> createMsg = Utils.makeMap(NAME, cName);
       createMsg.putAll(message.getProperties());
-      writeCommand = new ClusterStateMutator(dataProvider).createCollection(prevState, new ZkNodeProps(createMsg));
+      writeCommand = new ClusterStateMutator(cloudManager).createCollection(prevState, new ZkNodeProps(createMsg));
       DocCollection collection = writeCommand.collection;
       newState = ClusterStateMutator.newState(prevState, cName, collection);
     }
@@ -451,30 +452,34 @@ public class ReplicaMutator {
               }
             }
 
+            Map<String, Object> propMap = new HashMap<>();
+            propMap.put(Overseer.QUEUE_OPERATION, OverseerAction.UPDATESHARDSTATE.toLower());
+            propMap.put(ZkStateReader.COLLECTION_PROP, collection.getName());
             if (isLeaderSame) {
               log.info("Sub-shard leader node is still the same one at {} with ZK session id {}. Preparing to switch shard states.", shardParentNode, shardParentZkSession);
-              Map<String, Object> propMap = new HashMap<>();
-              propMap.put(Overseer.QUEUE_OPERATION, "updateshardstate");
               propMap.put(parentSliceName, Slice.State.INACTIVE.toString());
               propMap.put(sliceName, Slice.State.ACTIVE.toString());
+              long now = cloudManager.getTimeSource().getEpochTimeNs();
               for (Slice subShardSlice : subShardSlices) {
                 propMap.put(subShardSlice.getName(), Slice.State.ACTIVE.toString());
+                String lastTimeStr = subShardSlice.getStr(ZkStateReader.STATE_TIMESTAMP_PROP);
+                if (lastTimeStr != null) {
+                  long start = Long.parseLong(lastTimeStr);
+                  log.info("TIMINGS: Sub-shard " + subShardSlice.getName() + " recovered in " +
+                      TimeUnit.MILLISECONDS.convert(now - start, TimeUnit.NANOSECONDS) + " ms");
+                } else {
+                  log.info("TIMINGS Sub-shard " + subShardSlice.getName() + " not available: " + subShardSlice);
+                }
               }
-              propMap.put(ZkStateReader.COLLECTION_PROP, collection.getName());
-              ZkNodeProps m = new ZkNodeProps(propMap);
-              return new SliceMutator(dataProvider).updateShardState(prevState, m).collection;
             } else  {
               // we must mark the shard split as failed by switching sub-shards to recovery_failed state
-              Map<String, Object> propMap = new HashMap<>();
-              propMap.put(Overseer.QUEUE_OPERATION, "updateshardstate");
               propMap.put(sliceName, Slice.State.RECOVERY_FAILED.toString());
               for (Slice subShardSlice : subShardSlices) {
                 propMap.put(subShardSlice.getName(), Slice.State.RECOVERY_FAILED.toString());
               }
-              propMap.put(ZkStateReader.COLLECTION_PROP, collection.getName());
-              ZkNodeProps m = new ZkNodeProps(propMap);
-              return new SliceMutator(dataProvider).updateShardState(prevState, m).collection;
             }
+            ZkNodeProps m = new ZkNodeProps(propMap);
+            return new SliceMutator(cloudManager).updateShardState(prevState, m).collection;
           }
         }
       }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/1133bf98/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
index 8d7cdbf..3a46b2b 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
@@ -144,8 +144,10 @@ import static org.apache.solr.common.params.CollectionAdminParams.WITH_COLLECTIO
 import static org.apache.solr.common.params.CollectionParams.CollectionAction.*;
 import static org.apache.solr.common.params.CommonAdminParams.ASYNC;
 import static org.apache.solr.common.params.CommonAdminParams.IN_PLACE_MOVE;
+import static org.apache.solr.common.params.CommonAdminParams.SPLIT_METHOD;
 import static org.apache.solr.common.params.CommonAdminParams.WAIT_FOR_FINAL_STATE;
 import static org.apache.solr.common.params.CommonParams.NAME;
+import static org.apache.solr.common.params.CommonParams.TIMING;
 import static org.apache.solr.common.params.CommonParams.VALUE_LONG;
 import static org.apache.solr.common.params.CoreAdminParams.DATA_DIR;
 import static org.apache.solr.common.params.CoreAdminParams.DELETE_DATA_DIR;
@@ -662,7 +664,9 @@ public class CollectionsHandler extends RequestHandlerBase implements Permission
           SHARD_ID_PROP,
           "split.key",
           CoreAdminParams.RANGES,
-          WAIT_FOR_FINAL_STATE);
+          WAIT_FOR_FINAL_STATE,
+          TIMING,
+          SPLIT_METHOD);
       return copyPropertiesWithPrefix(req.getParams(), map, COLL_PROP_PREFIX);
     }),
     DELETESHARD_OP(DELETESHARD, (req, rsp, h) -> {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/1133bf98/solr/core/src/java/org/apache/solr/handler/admin/SplitOp.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/SplitOp.java b/solr/core/src/java/org/apache/solr/handler/admin/SplitOp.java
index 9dda6d4..31382c3 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/SplitOp.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/SplitOp.java
@@ -30,11 +30,13 @@ import org.apache.solr.common.cloud.ClusterState;
 import org.apache.solr.common.cloud.DocCollection;
 import org.apache.solr.common.cloud.DocRouter;
 import org.apache.solr.common.cloud.Slice;
+import org.apache.solr.common.params.CommonAdminParams;
 import org.apache.solr.common.params.CoreAdminParams;
 import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.core.SolrCore;
 import org.apache.solr.request.LocalSolrQueryRequest;
 import org.apache.solr.request.SolrQueryRequest;
+import org.apache.solr.update.SolrIndexSplitter;
 import org.apache.solr.update.SplitIndexCommand;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -78,9 +80,14 @@ class SplitOp implements CoreAdminHandler.CoreAdminOp {
     }
 
     log.info("Invoked split action for core: " + cname);
-    SolrCore core = it.handler.coreContainer.getCore(cname);
-    SolrQueryRequest req = new LocalSolrQueryRequest(core, params);
+    String methodStr = params.get(CommonAdminParams.SPLIT_METHOD, SolrIndexSplitter.SplitMethod.REWRITE.toLower());
+    SolrIndexSplitter.SplitMethod splitMethod = SolrIndexSplitter.SplitMethod.get(methodStr);
+    if (splitMethod == null) {
+      throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Unsupported value of '" + CommonAdminParams.SPLIT_METHOD + "': " + methodStr);
+    }
+    SolrCore parentCore = it.handler.coreContainer.getCore(cname);
     List<SolrCore> newCores = null;
+    SolrQueryRequest req = null;
 
     try {
       // TODO: allow use of rangesStr in the future
@@ -91,9 +98,9 @@ class SplitOp implements CoreAdminHandler.CoreAdminOp {
       String routeFieldName = null;
       if (it.handler.coreContainer.isZooKeeperAware()) {
         ClusterState clusterState = it.handler.coreContainer.getZkController().getClusterState();
-        String collectionName = req.getCore().getCoreDescriptor().getCloudDescriptor().getCollectionName();
+        String collectionName = parentCore.getCoreDescriptor().getCloudDescriptor().getCollectionName();
         DocCollection collection = clusterState.getCollection(collectionName);
-        String sliceName = req.getCore().getCoreDescriptor().getCloudDescriptor().getShardId();
+        String sliceName = parentCore.getCoreDescriptor().getCloudDescriptor().getShardId();
         Slice slice = collection.getSlice(sliceName);
         router = collection.getRouter() != null ? collection.getRouter() : DocRouter.DEFAULT;
         if (ranges == null) {
@@ -101,7 +108,7 @@ class SplitOp implements CoreAdminHandler.CoreAdminOp {
           ranges = currentRange != null ? router.partitionRange(partitions, currentRange) : null;
         }
         Object routerObj = collection.get(DOC_ROUTER); // for back-compat with Solr 4.4
-        if (routerObj != null && routerObj instanceof Map) {
+        if (routerObj instanceof Map) {
           Map routerProps = (Map) routerObj;
           routeFieldName = (String) routerProps.get("field");
         }
@@ -131,9 +138,10 @@ class SplitOp implements CoreAdminHandler.CoreAdminOp {
         paths = Arrays.asList(pathsArr);
       }
 
+      req = new LocalSolrQueryRequest(parentCore, params);
 
-      SplitIndexCommand cmd = new SplitIndexCommand(req, paths, newCores, ranges, router, routeFieldName, splitKey);
-      core.getUpdateHandler().split(cmd);
+      SplitIndexCommand cmd = new SplitIndexCommand(req, it.rsp, paths, newCores, ranges, router, routeFieldName, splitKey, splitMethod);
+      parentCore.getUpdateHandler().split(cmd);
 
       if (it.handler.coreContainer.isZooKeeperAware()) {
         for (SolrCore newcore : newCores) {
@@ -150,7 +158,7 @@ class SplitOp implements CoreAdminHandler.CoreAdminOp {
       throw e;
     } finally {
       if (req != null) req.close();
-      if (core != null) core.close();
+      if (parentCore != null) parentCore.close();
       if (newCores != null) {
         for (SolrCore newCore : newCores) {
           newCore.close();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/1133bf98/solr/core/src/java/org/apache/solr/update/DirectUpdateHandler2.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/DirectUpdateHandler2.java b/solr/core/src/java/org/apache/solr/update/DirectUpdateHandler2.java
index bcc97eb..e64ee8a 100644
--- a/solr/core/src/java/org/apache/solr/update/DirectUpdateHandler2.java
+++ b/solr/core/src/java/org/apache/solr/update/DirectUpdateHandler2.java
@@ -47,6 +47,7 @@ import org.apache.solr.cloud.ZkController;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.cloud.Replica;
 import org.apache.solr.common.params.ModifiableSolrParams;
+import org.apache.solr.common.util.NamedList;
 import org.apache.solr.core.SolrConfig.UpdateHandlerInfo;
 import org.apache.solr.core.SolrCore;
 import org.apache.solr.metrics.SolrMetricManager;
@@ -902,8 +903,10 @@ public class DirectUpdateHandler2 extends UpdateHandler implements SolrCoreState
     commit(new CommitUpdateCommand(cmd.req, false));
     SolrIndexSplitter splitter = new SolrIndexSplitter(cmd);
     splitCommands.mark();
+    NamedList<Object> results = new NamedList<>();
     try {
-      splitter.split();
+      splitter.split(results);
+      cmd.rsp.addResponse(results);
     } catch (IOException e) {
       numErrors.increment();
       numErrorsCumulative.mark();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/1133bf98/solr/core/src/java/org/apache/solr/update/SolrIndexSplitter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/SolrIndexSplitter.java b/solr/core/src/java/org/apache/solr/update/SolrIndexSplitter.java
index aadbe74..75234fa 100644
--- a/solr/core/src/java/org/apache/solr/update/SolrIndexSplitter.java
+++ b/solr/core/src/java/org/apache/solr/update/SolrIndexSplitter.java
@@ -18,31 +18,59 @@ package org.apache.solr.update;
 
 import java.io.IOException;
 import java.lang.invoke.MethodHandles;
+import java.text.SimpleDateFormat;
 import java.util.ArrayList;
+import java.util.Date;
 import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.Future;
 
 import org.apache.lucene.index.CodecReader;
 import org.apache.lucene.index.FilterCodecReader;
+import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
 import org.apache.lucene.index.LeafReader;
 import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.index.NoMergePolicy;
 import org.apache.lucene.index.PostingsEnum;
 import org.apache.lucene.index.SlowCodecReaderWrapper;
 import org.apache.lucene.index.Terms;
 import org.apache.lucene.index.TermsEnum;
+import org.apache.lucene.search.ConstantScoreScorer;
+import org.apache.lucene.search.ConstantScoreWeight;
 import org.apache.lucene.search.DocIdSetIterator;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.ScoreMode;
+import org.apache.lucene.search.Scorer;
+import org.apache.lucene.search.Weight;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.HardlinkCopyDirectoryWrapper;
+import org.apache.lucene.store.IOContext;
+import org.apache.lucene.store.Lock;
+import org.apache.lucene.util.BitSetIterator;
 import org.apache.lucene.util.Bits;
 import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.CharsRefBuilder;
 import org.apache.lucene.util.FixedBitSet;
 import org.apache.lucene.util.IOUtils;
+import org.apache.solr.common.SolrException;
 import org.apache.solr.common.cloud.CompositeIdRouter;
 import org.apache.solr.common.cloud.DocRouter;
 import org.apache.solr.common.cloud.HashBasedRouter;
+import org.apache.solr.common.params.CommonParams;
+import org.apache.solr.common.util.NamedList;
+import org.apache.solr.core.DirectoryFactory;
 import org.apache.solr.core.SolrCore;
+import org.apache.solr.handler.IndexFetcher;
+import org.apache.solr.handler.SnapShooter;
 import org.apache.solr.schema.SchemaField;
 import org.apache.solr.search.BitsFilteredPostingsEnum;
 import org.apache.solr.search.SolrIndexSearcher;
+import org.apache.solr.util.RTimerTree;
 import org.apache.solr.util.RefCounted;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -50,26 +78,47 @@ import org.slf4j.LoggerFactory;
 public class SolrIndexSplitter {
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
+  private static final String INDEX_PREFIX = "index.";
+
+  public enum SplitMethod {
+    REWRITE,
+    LINK;
+
+    public static SplitMethod get(String p) {
+      if (p != null) {
+        try {
+          return SplitMethod.valueOf(p.toUpperCase(Locale.ROOT));
+        } catch (Exception ex) {
+          return null;
+        }
+      }
+      return null;
+    }
+
+    public String toLower() {
+      return toString().toLowerCase(Locale.ROOT);
+    }
+  }
+
   SolrIndexSearcher searcher;
   SchemaField field;
   List<DocRouter.Range> ranges;
   DocRouter.Range[] rangesArr; // same as ranges list, but an array for extra speed in inner loops
   List<String> paths;
   List<SolrCore> cores;
-  DocRouter router;
   HashBasedRouter hashRouter;
   int numPieces;
-  int currPartition = 0;
   String routeFieldName;
   String splitKey;
+  SplitMethod splitMethod;
+  RTimerTree timings = new RTimerTree();
 
   public SolrIndexSplitter(SplitIndexCommand cmd) {
     searcher = cmd.getReq().getSearcher();
     ranges = cmd.ranges;
     paths = cmd.paths;
     cores = cmd.cores;
-    router = cmd.router;
-    hashRouter = router instanceof HashBasedRouter ? (HashBasedRouter)router : null;
+    hashRouter = cmd.router instanceof HashBasedRouter ? (HashBasedRouter)cmd.router : null;
 
     if (ranges == null) {
       numPieces =  paths != null ? paths.size() : cores.size();
@@ -86,83 +135,413 @@ public class SolrIndexSplitter {
     if (cmd.splitKey != null) {
       splitKey = getRouteKey(cmd.splitKey);
     }
+    if (cores == null) {
+      this.splitMethod = SplitMethod.REWRITE;
+    } else {
+      this.splitMethod = cmd.splitMethod;
+    }
   }
 
-  public void split() throws IOException {
+  public void split(NamedList<Object> results) throws IOException {
+    SolrCore parentCore = searcher.getCore();
+    Directory parentDirectory = searcher.getRawReader().directory();
+    Lock parentDirectoryLock = null;
+    UpdateLog ulog = parentCore.getUpdateHandler().getUpdateLog();
+    if (ulog == null && splitMethod == SplitMethod.LINK) {
+      log.warn("No updateLog in parent core, switching to use potentially slower 'splitMethod=rewrite'");
+      splitMethod = SplitMethod.REWRITE;
+    }
+    if (splitMethod == SplitMethod.LINK) {
+      RTimerTree t = timings.sub("closeParentIW");
+      try {
+        // start buffering updates
+        ulog.bufferUpdates();
+        parentCore.getSolrCoreState().closeIndexWriter(parentCore, false);
+        // make sure we can lock the directory for our exclusive use
+        parentDirectoryLock = parentDirectory.obtainLock(IndexWriter.WRITE_LOCK_NAME);
+        log.info("Splitting in 'link' mode: closed parent IndexWriter...");
+        t.stop();
+      } catch (Exception e) {
+        if (parentDirectoryLock != null) {
+          IOUtils.closeWhileHandlingException(parentDirectoryLock);
+        }
+        try {
+          parentCore.getSolrCoreState().openIndexWriter(parentCore);
+          ulog.applyBufferedUpdates();
+        } catch (Exception e1) {
+          log.error("Error reopening IndexWriter after failed close", e1);
+          log.error("Original error closing IndexWriter:", e);
+          throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Error reopening IndexWriter after failed close", e1);
+        }
+        throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Error closing current IndexWriter, aborting offline split...", e);
+      }
+    }
+    boolean success = false;
+    try {
+      RTimerTree t = timings.sub("doSplit");
+      doSplit();
+      t.stop();
+      success = true;
+    } catch (Exception e) {
+      results.add("failed", e.toString());
+      throw e;
+    } finally {
+      if (splitMethod == SplitMethod.LINK) {
+        IOUtils.closeWhileHandlingException(parentDirectoryLock);
+        RTimerTree t = timings.sub("reopenParentIW");
+        parentCore.getSolrCoreState().openIndexWriter(parentCore);
+        t.stop();
+        t = timings.sub("parentApplyBufferedUpdates");
+        ulog.applyBufferedUpdates();
+        t.stop();
+        log.info("Splitting in 'offline' mode " + (success? "finished" : "FAILED") +
+            ": re-opened parent IndexWriter.");
+      }
+    }
+    results.add(CommonParams.TIMING, timings.asNamedList());
+  }
+
+  public void doSplit() throws IOException {
 
     List<LeafReaderContext> leaves = searcher.getRawReader().leaves();
+    Directory parentDirectory = searcher.getRawReader().directory();
     List<FixedBitSet[]> segmentDocSets = new ArrayList<>(leaves.size());
-
-    log.info("SolrIndexSplitter: partitions=" + numPieces + " segments="+leaves.size());
-
-    for (LeafReaderContext readerContext : leaves) {
-      assert readerContext.ordInParent == segmentDocSets.size();  // make sure we're going in order
-      FixedBitSet[] docSets = split(readerContext);
-      segmentDocSets.add( docSets );
+    SolrIndexConfig parentConfig = searcher.getCore().getSolrConfig().indexConfig;
+    String timestamp = new SimpleDateFormat(SnapShooter.DATE_FMT, Locale.ROOT).format(new Date());
+
+    log.info("SolrIndexSplitter: partitions=" + numPieces + " segments=" + leaves.size());
+    RTimerTree t;
+
+    if (splitMethod != SplitMethod.LINK) {
+      t = timings.sub("findDocSetsPerLeaf");
+      for (LeafReaderContext readerContext : leaves) {
+        assert readerContext.ordInParent == segmentDocSets.size();  // make sure we're going in order
+        FixedBitSet[] docSets = split(readerContext, numPieces, field, rangesArr, splitKey, hashRouter, false);
+        segmentDocSets.add(docSets);
+      }
+      t.stop();
     }
 
 
+    Map<IndexReader.CacheKey, FixedBitSet[]> docsToDeleteCache = new ConcurrentHashMap<>();
+
     // would it be more efficient to write segment-at-a-time to each new index?
     // - need to worry about number of open descriptors
     // - need to worry about if IW.addIndexes does a sync or not...
     // - would be more efficient on the read side, but prob less efficient merging
-
     for (int partitionNumber=0; partitionNumber<numPieces; partitionNumber++) {
-      log.info("SolrIndexSplitter: partition #" + partitionNumber + " partitionCount=" + numPieces + (ranges != null ? " range=" + ranges.get(partitionNumber) : ""));
+      String partitionName = "SolrIndexSplitter:partition=" + partitionNumber + ",partitionCount=" + numPieces + (ranges != null ? ",range=" + ranges.get(partitionNumber) : "");
+      log.info(partitionName);
 
       boolean success = false;
 
       RefCounted<IndexWriter> iwRef = null;
-      IndexWriter iw = null;
-      if (cores != null) {
+      IndexWriter iw;
+      if (cores != null && splitMethod != SplitMethod.LINK) {
         SolrCore subCore = cores.get(partitionNumber);
         iwRef = subCore.getUpdateHandler().getSolrCoreState().getIndexWriter(subCore);
         iw = iwRef.get();
       } else {
-        SolrCore core = searcher.getCore();
-        String path = paths.get(partitionNumber);
-        iw = SolrIndexWriter.create(core, "SplittingIndexWriter"+partitionNumber + (ranges != null ? " " + ranges.get(partitionNumber) : ""), path,
-                                    core.getDirectoryFactory(), true, core.getLatestSchema(),
-                                    core.getSolrConfig().indexConfig, core.getDeletionPolicy(), core.getCodec());
+        if (splitMethod == SplitMethod.LINK) {
+          SolrCore subCore = cores.get(partitionNumber);
+          String path = subCore.getDataDir() + INDEX_PREFIX + timestamp;
+          t = timings.sub("hardLinkCopy");
+          t.resume();
+          // copy by hard-linking
+          Directory splitDir = subCore.getDirectoryFactory().get(path, DirectoryFactory.DirContext.DEFAULT, subCore.getSolrConfig().indexConfig.lockType);
+          // the wrapper doesn't hold any resources itself so it doesn't need closing
+          HardlinkCopyDirectoryWrapper hardLinkedDir = new HardlinkCopyDirectoryWrapper(splitDir);
+          boolean copiedOk = false;
+          try {
+            for (String file : parentDirectory.listAll()) {
+              // we've closed the IndexWriter, so ignore write.lock
+              // its file may be present even when IndexWriter is closed but
+              // we've already checked that the lock is not held by anyone else
+              if (file.equals(IndexWriter.WRITE_LOCK_NAME)) {
+                continue;
+              }
+              hardLinkedDir.copyFrom(parentDirectory, file, file, IOContext.DEFAULT);
+            }
+            copiedOk = true;
+          } finally {
+            if (!copiedOk) {
+              subCore.getDirectoryFactory().doneWithDirectory(splitDir);
+              subCore.getDirectoryFactory().remove(splitDir);
+            }
+          }
+          t.pause();
+          IndexWriterConfig iwConfig = parentConfig.toIndexWriterConfig(subCore);
+          // don't run merges at this time
+          iwConfig.setMergePolicy(NoMergePolicy.INSTANCE);
+          t = timings.sub("createSubIW");
+          t.resume();
+          iw = new SolrIndexWriter(partitionName, splitDir, iwConfig);
+          t.pause();
+        } else {
+          SolrCore core = searcher.getCore();
+          String path = paths.get(partitionNumber);
+          t = timings.sub("createSubIW");
+          t.resume();
+          iw = SolrIndexWriter.create(core, partitionName, path,
+              core.getDirectoryFactory(), true, core.getLatestSchema(),
+              core.getSolrConfig().indexConfig, core.getDeletionPolicy(), core.getCodec());
+          t.pause();
+        }
       }
 
       try {
-        // This removes deletions but optimize might still be needed because sub-shards will have the same number of segments as the parent shard.
-        for (int segmentNumber = 0; segmentNumber<leaves.size(); segmentNumber++) {
-          log.info("SolrIndexSplitter: partition #" + partitionNumber + " partitionCount=" + numPieces + (ranges != null ? " range=" + ranges.get(partitionNumber) : "") + " segment #"+segmentNumber + " segmentCount=" + leaves.size());
-          CodecReader subReader = SlowCodecReaderWrapper.wrap(leaves.get(segmentNumber).reader());
-          iw.addIndexes(new LiveDocsReader(subReader, segmentDocSets.get(segmentNumber)[partitionNumber]));
+        if (splitMethod == SplitMethod.LINK) {
+          t = timings.sub("deleteDocuments");
+          t.resume();
+          // apply deletions specific to this partition. As a side-effect on the first call this also populates
+          // a cache of docsets to delete per leaf reader per partition, which is reused for subsequent partitions.
+          iw.deleteDocuments(new SplittingQuery(partitionNumber, field, rangesArr, hashRouter, splitKey, docsToDeleteCache));
+          t.pause();
+        } else {
+          // This removes deletions but optimize might still be needed because sub-shards will have the same number of segments as the parent shard.
+          t = timings.sub("addIndexes");
+          t.resume();
+          for (int segmentNumber = 0; segmentNumber<leaves.size(); segmentNumber++) {
+            log.info("SolrIndexSplitter: partition #" + partitionNumber + " partitionCount=" + numPieces + (ranges != null ? " range=" + ranges.get(partitionNumber) : "") + " segment #"+segmentNumber + " segmentCount=" + leaves.size());
+            CodecReader subReader = SlowCodecReaderWrapper.wrap(leaves.get(segmentNumber).reader());
+            iw.addIndexes(new LiveDocsReader(subReader, segmentDocSets.get(segmentNumber)[partitionNumber]));
+          }
+          t.pause();
         }
         // we commit explicitly instead of sending a CommitUpdateCommand through the processor chain
         // because the sub-shard cores will just ignore such a commit because the update log is not
         // in active state at this time.
         //TODO no commitUpdateCommand
         SolrIndexWriter.setCommitData(iw, -1);
+        t = timings.sub("subIWCommit");
+        t.resume();
         iw.commit();
+        t.pause();
         success = true;
       } finally {
         if (iwRef != null) {
           iwRef.decref();
         } else {
           if (success) {
+            t = timings.sub("subIWClose");
+            t.resume();
             iw.close();
+            t.pause();
           } else {
             IOUtils.closeWhileHandlingException(iw);
           }
+          if (splitMethod == SplitMethod.LINK) {
+            SolrCore subCore = cores.get(partitionNumber);
+            subCore.getDirectoryFactory().release(iw.getDirectory());
+          }
+        }
+      }
+    }
+    // all sub-indexes created ok
+    // when using hard-linking switch directories & refresh cores
+    if (splitMethod == SplitMethod.LINK && cores != null) {
+      boolean switchOk = true;
+      t = timings.sub("switchSubIndexes");
+      for (int partitionNumber = 0; partitionNumber < numPieces; partitionNumber++) {
+        SolrCore subCore = cores.get(partitionNumber);
+        String indexDirPath = subCore.getIndexDir();
+
+        log.debug("Switching directories");
+        String hardLinkPath = subCore.getDataDir() + INDEX_PREFIX + timestamp;
+        subCore.modifyIndexProps(INDEX_PREFIX + timestamp);
+        try {
+          subCore.getUpdateHandler().newIndexWriter(false);
+          openNewSearcher(subCore);
+        } catch (Exception e) {
+          log.error("Failed to switch sub-core " + indexDirPath + " to " + hardLinkPath + ", split will fail.", e);
+          switchOk = false;
+          break;
         }
       }
+      t.stop();
+      if (!switchOk) {
+        t = timings.sub("rollbackSubIndexes");
+        // rollback the switch
+        for (int partitionNumber = 0; partitionNumber < numPieces; partitionNumber++) {
+          SolrCore subCore = cores.get(partitionNumber);
+          Directory dir = null;
+          try {
+            dir = subCore.getDirectoryFactory().get(subCore.getDataDir(), DirectoryFactory.DirContext.META_DATA,
+                subCore.getSolrConfig().indexConfig.lockType);
+            dir.deleteFile(IndexFetcher.INDEX_PROPERTIES);
+          } finally {
+            if (dir != null) {
+              subCore.getDirectoryFactory().release(dir);
+            }
+          }
+          // switch back if necessary and remove the hardlinked dir
+          String hardLinkPath = subCore.getDataDir() + INDEX_PREFIX + timestamp;
+          try {
+            dir = subCore.getDirectoryFactory().get(hardLinkPath, DirectoryFactory.DirContext.DEFAULT,
+                subCore.getSolrConfig().indexConfig.lockType);
+            subCore.getDirectoryFactory().doneWithDirectory(dir);
+            subCore.getDirectoryFactory().remove(dir);
+          } finally {
+            if (dir != null) {
+              subCore.getDirectoryFactory().release(dir);
+            }
+          }
+          subCore.getUpdateHandler().newIndexWriter(false);
+          try {
+            openNewSearcher(subCore);
+          } catch (Exception e) {
+            log.warn("Error rolling back failed split of " + hardLinkPath, e);
+          }
+        }
+        t.stop();
+        throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "There were errors during index split");
+      } else {
+        // complete the switch - remove original index
+        t = timings.sub("cleanSubIndex");
+        for (int partitionNumber = 0; partitionNumber < numPieces; partitionNumber++) {
+          SolrCore subCore = cores.get(partitionNumber);
+          String oldIndexPath = subCore.getDataDir() + "index";
+          Directory indexDir = null;
+          try {
+            indexDir = subCore.getDirectoryFactory().get(oldIndexPath,
+                DirectoryFactory.DirContext.DEFAULT, subCore.getSolrConfig().indexConfig.lockType);
+            subCore.getDirectoryFactory().doneWithDirectory(indexDir);
+            subCore.getDirectoryFactory().remove(indexDir);
+          } finally {
+            if (indexDir != null) {
+              subCore.getDirectoryFactory().release(indexDir);
+            }
+          }
+        }
+        t.stop();
+      }
+    }
+  }
+
+  private void openNewSearcher(SolrCore core) throws Exception {
+    Future[] waitSearcher = new Future[1];
+    core.getSearcher(true, false, waitSearcher, true);
+    if (waitSearcher[0] != null) {
+      waitSearcher[0].get();
+    }
+  }
+
+  private class SplittingQuery extends Query {
+    private final int partition;
+    private final SchemaField field;
+    private final DocRouter.Range[] rangesArr;
+    private final HashBasedRouter hashRouter;
+    private final String splitKey;
+    private final Map<IndexReader.CacheKey, FixedBitSet[]> docsToDelete;
+
+    SplittingQuery(int partition, SchemaField field, DocRouter.Range[] rangesArr, HashBasedRouter hashRouter, String splitKey,
+                   Map<IndexReader.CacheKey, FixedBitSet[]> docsToDelete) {
+      this.partition = partition;
+      this.field = field;
+      this.rangesArr = rangesArr;
+      this.hashRouter = hashRouter;
+      this.splitKey = splitKey;
+      this.docsToDelete = docsToDelete;
+    }
+
+    @Override
+    public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException {
+      return new ConstantScoreWeight(this, boost) {
+
+        @Override
+        public Scorer scorer(LeafReaderContext context) throws IOException {
+          RTimerTree t = timings.sub("findDocsToDelete");
+          t.resume();
+          FixedBitSet set = findDocsToDelete(context);
+          t.pause();
+          log.info("### partition=" + partition + ", leaf=" + context + ", maxDoc=" + context.reader().maxDoc() +
+          ", numDels=" + context.reader().numDeletedDocs() + ", setLen=" + set.length() + ", setCard=" + set.cardinality());
+          Bits liveDocs = context.reader().getLiveDocs();
+          if (liveDocs != null) {
+            // check that we don't delete already deleted docs
+            FixedBitSet dels = FixedBitSet.copyOf(liveDocs);
+            dels.flip(0, dels.length());
+            dels.and(set);
+            if (dels.cardinality() > 0) {
+              log.error("### INVALID DELS " + dels.cardinality());
+            }
+          }
+          return new ConstantScoreScorer(this, score(), new BitSetIterator(set, set.length()));
+        }
 
+        @Override
+        public boolean isCacheable(LeafReaderContext ctx) {
+          return false;
+        }
+
+        @Override
+        public String toString() {
+          return "weight(shardSplittingQuery,part" + partition + ")";
+        }
+      };
     }
 
+    private FixedBitSet findDocsToDelete(LeafReaderContext readerContext) throws IOException {
+      // check whether a cached copy of bitsets already exists for this reader
+      FixedBitSet[] perPartition = docsToDelete.get(readerContext.reader().getCoreCacheHelper().getKey());
+      if (perPartition != null) {
+        return perPartition[partition];
+      }
+      synchronized (docsToDelete) {
+        perPartition = docsToDelete.get(readerContext.reader().getCoreCacheHelper().getKey());
+        if (perPartition != null) {
+          return perPartition[partition];
+        }
+
+        perPartition = split(readerContext, numPieces, field, rangesArr, splitKey, hashRouter, true);
+        docsToDelete.put(readerContext.reader().getCoreCacheHelper().getKey(), perPartition);
+        return perPartition[partition];
+      }
+    }
+
+    @Override
+    public String toString(String field) {
+      return "shardSplittingQuery";
+    }
+
+    @Override
+    public boolean equals(Object obj) {
+      if (obj == null) {
+        return false;
+      }
+      if (this == obj) {
+        return true;
+      }
+      if (!(obj instanceof SplittingQuery)) {
+        return false;
+      }
+      SplittingQuery q = (SplittingQuery)obj;
+      return partition == q.partition;
+    }
+
+    @Override
+    public int hashCode() {
+      return partition;
+    }
   }
 
-  FixedBitSet[] split(LeafReaderContext readerContext) throws IOException {
+  static FixedBitSet[] split(LeafReaderContext readerContext, int numPieces, SchemaField field, DocRouter.Range[] rangesArr,
+                             String splitKey, HashBasedRouter hashRouter, boolean delete) throws IOException {
     LeafReader reader = readerContext.reader();
     FixedBitSet[] docSets = new FixedBitSet[numPieces];
     for (int i=0; i<docSets.length; i++) {
       docSets[i] = new FixedBitSet(reader.maxDoc());
+      if (delete) {
+        docSets[i].set(0, reader.maxDoc());
+      }
     }
     Bits liveDocs = reader.getLiveDocs();
+    if (liveDocs != null && delete) {
+      FixedBitSet liveDocsSet = FixedBitSet.copyOf(liveDocs);
+      for (FixedBitSet set : docSets) {
+        set.and(liveDocsSet);
+      }
+    }
 
     Terms terms = reader.terms(field.getName());
     TermsEnum termsEnum = terms==null ? null : terms.iterator();
@@ -172,11 +551,12 @@ public class SolrIndexSplitter {
     PostingsEnum postingsEnum = null;
 
     int[] docsMatchingRanges = null;
-    if (ranges != null) {
+    if (rangesArr != null) {
       // +1 because documents can belong to *zero*, one, several or all ranges in rangesArr
       docsMatchingRanges = new int[rangesArr.length+1];
     }
 
+    int partition = 0;
     CharsRefBuilder idRef = new CharsRefBuilder();
     for (;;) {
       term = termsEnum.next();
@@ -209,14 +589,22 @@ public class SolrIndexSplitter {
       for (;;) {
         int doc = postingsEnum.nextDoc();
         if (doc == DocIdSetIterator.NO_MORE_DOCS) break;
-        if (ranges == null) {
-          docSets[currPartition].set(doc);
-          currPartition = (currPartition + 1) % numPieces;
+        if (rangesArr == null) {
+          if (delete) {
+            docSets[partition].clear(doc);
+          } else {
+            docSets[partition].set(doc);
+          }
+          partition = (partition + 1) % numPieces;
         } else  {
           int matchingRangesCount = 0;
           for (int i=0; i<rangesArr.length; i++) {      // inner-loop: use array here for extra speed.
             if (rangesArr[i].includes(hash)) {
-              docSets[i].set(doc);
+              if (delete) {
+                docSets[i].clear(doc);
+              } else {
+                docSets[i].set(doc);
+              }
               ++matchingRangesCount;
             }
           }
@@ -256,12 +644,10 @@ public class SolrIndexSplitter {
     if (idx <= 0) return null;
     String part1 = idString.substring(0, idx);
     int commaIdx = part1.indexOf(CompositeIdRouter.bitsSeparator);
-    if (commaIdx > 0) {
-      if (commaIdx + 1 < part1.length())  {
-        char ch = part1.charAt(commaIdx + 1);
-        if (ch >= '0' && ch <= '9') {
-          part1 = part1.substring(0, commaIdx);
-        }
+    if (commaIdx > 0 && commaIdx + 1 < part1.length())  {
+      char ch = part1.charAt(commaIdx + 1);
+      if (ch >= '0' && ch <= '9') {
+        part1 = part1.substring(0, commaIdx);
       }
     }
     return part1;
@@ -273,7 +659,7 @@ public class SolrIndexSplitter {
     final FixedBitSet liveDocs;
     final int numDocs;
 
-    public LiveDocsReader(CodecReader in, FixedBitSet liveDocs) throws IOException {
+    public LiveDocsReader(CodecReader in, FixedBitSet liveDocs) {
       super(in);
       this.liveDocs = liveDocs;
       this.numDocs = liveDocs.cardinality();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/1133bf98/solr/core/src/java/org/apache/solr/update/SplitIndexCommand.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/SplitIndexCommand.java b/solr/core/src/java/org/apache/solr/update/SplitIndexCommand.java
index eaa1e59..7ea8a2a 100644
--- a/solr/core/src/java/org/apache/solr/update/SplitIndexCommand.java
+++ b/solr/core/src/java/org/apache/solr/update/SplitIndexCommand.java
@@ -19,6 +19,7 @@ package org.apache.solr.update;
 import org.apache.solr.common.cloud.DocRouter;
 import org.apache.solr.core.SolrCore;
 import org.apache.solr.request.SolrQueryRequest;
+import org.apache.solr.response.SolrQueryResponse;
 
 import java.util.List;
 
@@ -29,22 +30,26 @@ import java.util.List;
  *
  */
 public class SplitIndexCommand extends UpdateCommand {
-  // public List<Directory> dirs;
-  public List<String> paths;
-  public List<SolrCore> cores;  // either paths or cores should be specified
-  public List<DocRouter.Range> ranges;
-  public DocRouter router;
-  public String routeFieldName;
-  public String splitKey;
+  public final SolrQueryResponse rsp;
+  public final List<String> paths;
+  public final List<SolrCore> cores;  // either paths or cores should be specified
+  public final List<DocRouter.Range> ranges;
+  public final DocRouter router;
+  public final String routeFieldName;
+  public final String splitKey;
+  public final SolrIndexSplitter.SplitMethod splitMethod;
 
-  public SplitIndexCommand(SolrQueryRequest req, List<String> paths, List<SolrCore> cores, List<DocRouter.Range> ranges, DocRouter router, String routeFieldName, String splitKey) {
+  public SplitIndexCommand(SolrQueryRequest req, SolrQueryResponse rsp, List<String> paths, List<SolrCore> cores, List<DocRouter.Range> ranges,
+                           DocRouter router, String routeFieldName, String splitKey, SolrIndexSplitter.SplitMethod splitMethod) {
     super(req);
+    this.rsp = rsp;
     this.paths = paths;
     this.cores = cores;
     this.ranges = ranges;
     this.router = router;
     this.routeFieldName = routeFieldName;
     this.splitKey = splitKey;
+    this.splitMethod = splitMethod;
   }
 
   @Override
@@ -65,6 +70,7 @@ public class SplitIndexCommand extends UpdateCommand {
     if (splitKey != null) {
       sb.append(",split.key=" + splitKey);
     }
+    sb.append(",method=" + splitMethod.toLower());
     sb.append('}');
     return sb.toString();
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/1133bf98/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyShardSplitTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyShardSplitTest.java b/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyShardSplitTest.java
index 22862b4..dd5f9f3 100644
--- a/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyShardSplitTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyShardSplitTest.java
@@ -134,7 +134,7 @@ public class ChaosMonkeyShardSplitTest extends ShardSplitTest {
       killerThread.start();
       killCounter.incrementAndGet();
 
-      splitShard(AbstractDistribZkTestBase.DEFAULT_COLLECTION, SHARD1, null, null);
+      splitShard(AbstractDistribZkTestBase.DEFAULT_COLLECTION, SHARD1, null, null, false);
 
       log.info("Layout after split: \n");
       printLayout();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/1133bf98/solr/core/src/test/org/apache/solr/cloud/api/collections/ShardSplitTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/api/collections/ShardSplitTest.java b/solr/core/src/test/org/apache/solr/cloud/api/collections/ShardSplitTest.java
index 92fd4d5..f6ee7b4 100644
--- a/solr/core/src/test/org/apache/solr/cloud/api/collections/ShardSplitTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/api/collections/ShardSplitTest.java
@@ -62,7 +62,10 @@ import org.apache.solr.common.cloud.ZkCoreNodeProps;
 import org.apache.solr.common.cloud.ZkStateReader;
 import org.apache.solr.common.params.CollectionParams;
 import org.apache.solr.common.params.ModifiableSolrParams;
+import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.Utils;
+import org.apache.solr.update.SolrIndexSplitter;
+import org.apache.solr.util.LogLevel;
 import org.apache.solr.util.TestInjection;
 import org.junit.Test;
 import org.slf4j.Logger;
@@ -72,6 +75,7 @@ import static org.apache.solr.common.cloud.ZkStateReader.BASE_URL_PROP;
 import static org.apache.solr.common.cloud.ZkStateReader.MAX_SHARDS_PER_NODE;
 import static org.apache.solr.common.cloud.ZkStateReader.REPLICATION_FACTOR;
 
+@LogLevel("org.apache.solr.cloud.api.collections=DEBUG")
 @Slow
 public class ShardSplitTest extends BasicDistributedZkTest {
 
@@ -116,14 +120,24 @@ public class ShardSplitTest extends BasicDistributedZkTest {
   Creates a collection with replicationFactor=1, splits a shard. Restarts the sub-shard leader node.
   Add a replica. Ensure count matches in leader and replica.
    */
+  @Test
   public void testSplitStaticIndexReplication() throws Exception {
+    doSplitStaticIndexReplication(SolrIndexSplitter.SplitMethod.REWRITE);
+  }
+
+  @Test
+  public void testSplitStaticIndexReplicationLink() throws Exception {
+    doSplitStaticIndexReplication(SolrIndexSplitter.SplitMethod.LINK);
+  }
+
+  private void doSplitStaticIndexReplication(SolrIndexSplitter.SplitMethod splitMethod) throws Exception {
     waitForThingsToLevelOut(15);
 
     DocCollection defCol = cloudClient.getZkStateReader().getClusterState().getCollection(AbstractDistribZkTestBase.DEFAULT_COLLECTION);
     Replica replica = defCol.getReplicas().get(0);
     String nodeName = replica.getNodeName();
 
-    String collectionName = "testSplitStaticIndexReplication";
+    String collectionName = "testSplitStaticIndexReplication_" + splitMethod.toLower();
     CollectionAdminRequest.Create create = CollectionAdminRequest.createCollection(collectionName, "conf1", 1, 1);
     create.setMaxShardsPerNode(5); // some high number so we can create replicas without hindrance
     create.setCreateNodeSet(nodeName); // we want to create the leader on a fixed node so that we know which one to restart later
@@ -141,6 +155,7 @@ public class ShardSplitTest extends BasicDistributedZkTest {
 
         CollectionAdminRequest.SplitShard splitShard = CollectionAdminRequest.splitShard(collectionName);
         splitShard.setShardName(SHARD1);
+        splitShard.setSplitMethod(splitMethod.toLower());
         String asyncId = splitShard.processAsync(client);
         RequestStatusState state = CollectionAdminRequest.requestStatus(asyncId).waitFor(client, 120);
         if (state == RequestStatusState.COMPLETED)  {
@@ -351,16 +366,31 @@ public class ShardSplitTest extends BasicDistributedZkTest {
 
   @Test
   public void testSplitMixedReplicaTypes() throws Exception {
+    doSplitMixedReplicaTypes(SolrIndexSplitter.SplitMethod.REWRITE);
+  }
+
+  @Test
+  public void testSplitMixedReplicaTypesLink() throws Exception {
+    doSplitMixedReplicaTypes(SolrIndexSplitter.SplitMethod.LINK);
+  }
+
+  private void doSplitMixedReplicaTypes(SolrIndexSplitter.SplitMethod splitMethod) throws Exception {
     waitForThingsToLevelOut(15);
-    String collectionName = "testSplitMixedReplicaTypes";
+    String collectionName = "testSplitMixedReplicaTypes_" + splitMethod.toLower();
     CollectionAdminRequest.Create create = CollectionAdminRequest.createCollection(collectionName, "conf1", 1, 2, 2, 2);
     create.setMaxShardsPerNode(5); // some high number so we can create replicas without hindrance
     create.process(cloudClient);
     waitForRecoveriesToFinish(collectionName, false);
 
+    for (int i = 0; i < 100; i++) {
+      cloudClient.add(collectionName, getDoc("id", "id-" + i, "foo_s", "bar " + i));
+    }
+    cloudClient.commit(collectionName);
+
     CollectionAdminRequest.SplitShard splitShard = CollectionAdminRequest.splitShard(collectionName);
     splitShard.setShardName(SHARD1);
-    splitShard.process(cloudClient);
+    splitShard.setSplitMethod(splitMethod.toLower());
+    CollectionAdminResponse rsp = splitShard.process(cloudClient);
     waitForThingsToLevelOut(15);
 
     cloudClient.getZkStateReader().forceUpdateCollection(collectionName);
@@ -393,7 +423,7 @@ public class ShardSplitTest extends BasicDistributedZkTest {
     assertEquals("actual PULL", numPull, actualPull.get());
   }
 
-    @Test
+  @Test
   @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028")
   public void testSplitWithChaosMonkey() throws Exception {
     waitForThingsToLevelOut(15);
@@ -600,6 +630,15 @@ public class ShardSplitTest extends BasicDistributedZkTest {
 
   @Test
   public void testSplitShardWithRule() throws Exception {
+    doSplitShardWithRule(SolrIndexSplitter.SplitMethod.LINK);
+  }
+
+  @Test
+  public void testSplitShardWithRuleLink() throws Exception {
+    doSplitShardWithRule(SolrIndexSplitter.SplitMethod.LINK);
+  }
+
+  private void doSplitShardWithRule(SolrIndexSplitter.SplitMethod splitMethod) throws Exception {
     waitForThingsToLevelOut(15);
 
     if (usually()) {
@@ -609,14 +648,14 @@ public class ShardSplitTest extends BasicDistributedZkTest {
     }
 
     log.info("Starting testSplitShardWithRule");
-    String collectionName = "shardSplitWithRule";
+    String collectionName = "shardSplitWithRule_" + splitMethod.toLower();
     CollectionAdminRequest.Create createRequest = CollectionAdminRequest.createCollection(collectionName, "conf1", 1, 2)
         .setRule("shard:*,replica:<2,node:*");
     CollectionAdminResponse response = createRequest.process(cloudClient);
     assertEquals(0, response.getStatus());
 
     CollectionAdminRequest.SplitShard splitShardRequest = CollectionAdminRequest.splitShard(collectionName)
-        .setShardName("shard1");
+        .setShardName("shard1").setSplitMethod(splitMethod.toLower());
     response = splitShardRequest.process(cloudClient);
     assertEquals(String.valueOf(response.getErrorMessages()), 0, response.getStatus());
   }
@@ -633,7 +672,7 @@ public class ShardSplitTest extends BasicDistributedZkTest {
     // test with only one range
     subRanges.add(ranges.get(0));
     try {
-      splitShard(AbstractDistribZkTestBase.DEFAULT_COLLECTION, SHARD1, subRanges, null);
+      splitShard(AbstractDistribZkTestBase.DEFAULT_COLLECTION, SHARD1, subRanges, null, false);
       fail("Shard splitting with just one custom hash range should not succeed");
     } catch (HttpSolrClient.RemoteSolrException e) {
       log.info("Expected exception:", e);
@@ -644,7 +683,7 @@ public class ShardSplitTest extends BasicDistributedZkTest {
     subRanges.add(ranges.get(3)); // order shouldn't matter
     subRanges.add(ranges.get(0));
     try {
-      splitShard(AbstractDistribZkTestBase.DEFAULT_COLLECTION, SHARD1, subRanges, null);
+      splitShard(AbstractDistribZkTestBase.DEFAULT_COLLECTION, SHARD1, subRanges, null, false);
       fail("Shard splitting with missing hashes in between given ranges should not succeed");
     } catch (HttpSolrClient.RemoteSolrException e) {
       log.info("Expected exception:", e);
@@ -657,7 +696,7 @@ public class ShardSplitTest extends BasicDistributedZkTest {
     subRanges.add(ranges.get(2));
     subRanges.add(new DocRouter.Range(ranges.get(3).min - 15, ranges.get(3).max));
     try {
-      splitShard(AbstractDistribZkTestBase.DEFAULT_COLLECTION, SHARD1, subRanges, null);
+      splitShard(AbstractDistribZkTestBase.DEFAULT_COLLECTION, SHARD1, subRanges, null, false);
       fail("Shard splitting with overlapping ranges should not succeed");
     } catch (HttpSolrClient.RemoteSolrException e) {
       log.info("Expected exception:", e);
@@ -683,6 +722,9 @@ public class ShardSplitTest extends BasicDistributedZkTest {
     final int[] docCounts = new int[ranges.size()];
     int numReplicas = shard1.getReplicas().size();
 
+    cloudClient.getZkStateReader().forceUpdateCollection(AbstractDistribZkTestBase.DEFAULT_COLLECTION);
+    clusterState = cloudClient.getZkStateReader().getClusterState();
+    log.debug("-- COLLECTION: {}", clusterState.getCollection(AbstractDistribZkTestBase.DEFAULT_COLLECTION));
     del("*:*");
     for (int id = 0; id <= 100; id++) {
       String shardKey = "" + (char)('a' + (id % 26)); // See comment in ShardRoutingTest for hash distribution
@@ -725,7 +767,7 @@ public class ShardSplitTest extends BasicDistributedZkTest {
     try {
       for (int i = 0; i < 3; i++) {
         try {
-          splitShard(AbstractDistribZkTestBase.DEFAULT_COLLECTION, SHARD1, subRanges, null);
+          splitShard(AbstractDistribZkTestBase.DEFAULT_COLLECTION, SHARD1, subRanges, null, false);
           log.info("Layout after split: \n");
           printLayout();
           break;
@@ -807,7 +849,7 @@ public class ShardSplitTest extends BasicDistributedZkTest {
 
       for (int i = 0; i < 3; i++) {
         try {
-          splitShard(collectionName, SHARD1, null, null);
+          splitShard(collectionName, SHARD1, null, null, false);
           break;
         } catch (HttpSolrClient.RemoteSolrException e) {
           if (e.code() != 500) {
@@ -889,7 +931,7 @@ public class ShardSplitTest extends BasicDistributedZkTest {
 
       for (int i = 0; i < 3; i++) {
         try {
-          splitShard(collectionName, null, null, splitKey);
+          splitShard(collectionName, null, null, splitKey, false);
           break;
         } catch (HttpSolrClient.RemoteSolrException e) {
           if (e.code() != 500) {
@@ -992,9 +1034,11 @@ public class ShardSplitTest extends BasicDistributedZkTest {
     }
   }
 
-  protected void splitShard(String collection, String shardId, List<DocRouter.Range> subRanges, String splitKey) throws SolrServerException, IOException {
+  protected void splitShard(String collection, String shardId, List<DocRouter.Range> subRanges, String splitKey, boolean offline) throws SolrServerException, IOException {
     ModifiableSolrParams params = new ModifiableSolrParams();
     params.set("action", CollectionParams.CollectionAction.SPLITSHARD.toString());
+    params.set("timing", "true");
+    params.set("offline", String.valueOf(offline));
     params.set("collection", collection);
     if (shardId != null)  {
       params.set("shard", shardId);
@@ -1019,7 +1063,8 @@ public class ShardSplitTest extends BasicDistributedZkTest {
     baseUrl = baseUrl.substring(0, baseUrl.length() - "collection1".length());
 
     try (HttpSolrClient baseServer = getHttpSolrClient(baseUrl, 30000, 60000 * 5)) {
-      baseServer.request(request);
+      NamedList<Object> rsp = baseServer.request(request);
+      log.info("Shard split response: " + Utils.toJSONString(rsp));
     }
   }
 


[31/48] lucene-solr:jira/http2: SOLR-12344: SolrSlf4jReporter doesn't set MDC context.

Posted by da...@apache.org.
SOLR-12344: SolrSlf4jReporter doesn't set MDC context.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/5de10c79
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/5de10c79
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/5de10c79

Branch: refs/heads/jira/http2
Commit: 5de10c79668bf786d9699db992bf85e2f4beb8b4
Parents: 868e970
Author: Andrzej Bialecki <ab...@apache.org>
Authored: Thu Aug 2 14:29:19 2018 +0200
Committer: Andrzej Bialecki <ab...@apache.org>
Committed: Thu Aug 2 14:29:47 2018 +0200

----------------------------------------------------------------------
 solr/CHANGES.txt                                |  2 +
 .../apache/solr/metrics/SolrMetricManager.java  | 14 +++++
 .../metrics/reporters/SolrSlf4jReporter.java    | 63 +++++++++++++++++---
 .../src/test-files/solr/solr-slf4jreporter.xml  |  7 +++
 .../reporters/SolrSlf4jReporterTest.java        |  5 +-
 solr/solr-ref-guide/src/metrics-reporting.adoc  | 29 ++++++++-
 6 files changed, 109 insertions(+), 11 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5de10c79/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 93171af..1b45436 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -196,6 +196,8 @@ Bug Fixes
   even though they remain the transaction log. The second case is when synchronously forwarding updates to sub-shard
   leader fails and the underlying errors are not propagated to the client. (Cao Manh Dat, shalin)
 
+* SOLR-12344: SolrSlf4jReporter doesn't set MDC context. (ab)
+
 Optimizations
 ----------------------
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5de10c79/solr/core/src/java/org/apache/solr/metrics/SolrMetricManager.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/metrics/SolrMetricManager.java b/solr/core/src/java/org/apache/solr/metrics/SolrMetricManager.java
index 5fa3659..f1b7923 100644
--- a/solr/core/src/java/org/apache/solr/metrics/SolrMetricManager.java
+++ b/solr/core/src/java/org/apache/solr/metrics/SolrMetricManager.java
@@ -55,8 +55,10 @@ import org.apache.solr.core.PluginInfo;
 import org.apache.solr.core.SolrCore;
 import org.apache.solr.core.SolrInfoBean;
 import org.apache.solr.core.SolrResourceLoader;
+import org.apache.solr.logging.MDCLoggingContext;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+import org.slf4j.MDC;
 
 /**
  * This class maintains a repository of named {@link MetricRegistry} instances, and provides several
@@ -921,6 +923,15 @@ public class SolrMetricManager {
         new Class[]{SolrMetricManager.class, String.class},
         new Object[]{this, registry}
     );
+    // prepare MDC for plugins that want to use its properties
+    MDCLoggingContext.setNode(coreContainer);
+    if (solrCore != null) {
+      MDCLoggingContext.setCore(solrCore);
+    }
+    if (tag != null) {
+      // add instance tag to MDC
+      MDC.put("tag", "t:" + tag);
+    }
     try {
       if (reporter instanceof SolrCoreReporter) {
         ((SolrCoreReporter)reporter).init(pluginInfo, solrCore);
@@ -931,6 +942,9 @@ public class SolrMetricManager {
       }
     } catch (IllegalStateException e) {
       throw new IllegalArgumentException("reporter init failed: " + pluginInfo, e);
+    } finally {
+      MDCLoggingContext.clear();
+      MDC.remove("tag");
     }
     registerReporter(registry, pluginInfo.name, tag, reporter);
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5de10c79/solr/core/src/java/org/apache/solr/metrics/reporters/SolrSlf4jReporter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/metrics/reporters/SolrSlf4jReporter.java b/solr/core/src/java/org/apache/solr/metrics/reporters/SolrSlf4jReporter.java
index fdb0e2a..ff00a00 100644
--- a/solr/core/src/java/org/apache/solr/metrics/reporters/SolrSlf4jReporter.java
+++ b/solr/core/src/java/org/apache/solr/metrics/reporters/SolrSlf4jReporter.java
@@ -18,14 +18,23 @@ package org.apache.solr.metrics.reporters;
 
 import java.io.IOException;
 import java.lang.invoke.MethodHandles;
+import java.util.Map;
+import java.util.SortedMap;
 import java.util.concurrent.TimeUnit;
 
+import com.codahale.metrics.Counter;
+import com.codahale.metrics.Gauge;
+import com.codahale.metrics.Histogram;
+import com.codahale.metrics.Meter;
 import com.codahale.metrics.MetricFilter;
+import com.codahale.metrics.ScheduledReporter;
 import com.codahale.metrics.Slf4jReporter;
+import com.codahale.metrics.Timer;
 import org.apache.solr.metrics.FilteringSolrMetricReporter;
 import org.apache.solr.metrics.SolrMetricManager;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+import org.slf4j.MDC;
 
 /**
  * Metrics reporter that wraps {@link com.codahale.metrics.Slf4jReporter}.
@@ -37,7 +46,7 @@ import org.slf4j.LoggerFactory;
  *   <li><code>filter</code>: (optional, str) if not empty only metric names that start
  *   with this value will be reported, default is all metrics from a registry,</li>
  *   <li><code>logger</code>: (optional, str) logger name to use. Default is the
- *   metrics group, eg. <code>solr.jvm</code></li>
+ *   metrics group, eg. <code>solr.jvm</code>, <code>solr.core</code>, etc</li>
  * </ul>
  */
 public class SolrSlf4jReporter extends FilteringSolrMetricReporter {
@@ -47,9 +56,45 @@ public class SolrSlf4jReporter extends FilteringSolrMetricReporter {
 
   private String instancePrefix = null;
   private String logger = null;
-  private Slf4jReporter reporter;
+  private Map<String, String> mdcContext;
+  private Slf4jReporterWrapper reporter;
   private boolean active;
 
+  // this wrapper allows us to set MDC context - unfortunately it's not possible to
+  // simply override {@link Slf4jReporter#report()} because its constructor is private
+  private class Slf4jReporterWrapper extends ScheduledReporter {
+    final Slf4jReporter delegate;
+    final Map<String, String> mdcContext;
+
+    Slf4jReporterWrapper(String logger, Map<String, String> mdcContext, Slf4jReporter delegate, TimeUnit rateUnit, TimeUnit durationUnit) {
+      super(null, logger, null, rateUnit, durationUnit);
+      this.delegate = delegate;
+      this.mdcContext = mdcContext;
+    }
+
+    @Override
+    public void report() {
+      // set up MDC
+      MDC.setContextMap(mdcContext);
+      try {
+        delegate.report();
+      } finally {
+        // clear MDC
+        MDC.clear();
+      }
+    }
+
+    @Override
+    public void report(SortedMap<String, Gauge> gauges, SortedMap<String, Counter> counters, SortedMap<String, Histogram> histograms, SortedMap<String, Meter> meters, SortedMap<String, Timer> timers) {
+      throw new UnsupportedOperationException("this method should never be called here!");
+    }
+
+    @Override
+    public void close() {
+      super.close();
+      delegate.close();
+    }
+  }
   /**
    * Create a SLF4J reporter for metrics managed in a named registry.
    *
@@ -71,11 +116,8 @@ public class SolrSlf4jReporter extends FilteringSolrMetricReporter {
 
   @Override
   protected void doInit() {
-    if (instancePrefix == null) {
-      instancePrefix = registryName;
-    } else {
-      instancePrefix = instancePrefix + "." + registryName;
-    }
+    mdcContext = MDC.getCopyOfContextMap();
+    mdcContext.put("registry", "m:" + registryName);
     Slf4jReporter.Builder builder = Slf4jReporter
         .forRegistry(metricManager.registry(registryName))
         .convertRatesTo(TimeUnit.SECONDS)
@@ -83,6 +125,9 @@ public class SolrSlf4jReporter extends FilteringSolrMetricReporter {
 
     final MetricFilter filter = newMetricFilter();
     builder = builder.filter(filter);
+    if (instancePrefix != null) {
+      builder = builder.prefixedWith(instancePrefix);
+    }
     if (logger == null || logger.isEmpty()) {
       // construct logger name from Group
       if (pluginInfo.attributes.containsKey("group")) {
@@ -98,7 +143,9 @@ public class SolrSlf4jReporter extends FilteringSolrMetricReporter {
       }
     }
     builder = builder.outputTo(LoggerFactory.getLogger(logger));
-    reporter = builder.build();
+    // build BUT don't start - scheduled execution is handled by the wrapper
+    Slf4jReporter delegate = builder.build();
+    reporter = new Slf4jReporterWrapper(logger, mdcContext, delegate, TimeUnit.SECONDS, TimeUnit.MILLISECONDS);
     reporter.start(period, TimeUnit.SECONDS);
     active = true;
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5de10c79/solr/core/src/test-files/solr/solr-slf4jreporter.xml
----------------------------------------------------------------------
diff --git a/solr/core/src/test-files/solr/solr-slf4jreporter.xml b/solr/core/src/test-files/solr/solr-slf4jreporter.xml
index f3144ca..6b91fe5 100644
--- a/solr/core/src/test-files/solr/solr-slf4jreporter.xml
+++ b/solr/core/src/test-files/solr/solr-slf4jreporter.xml
@@ -31,5 +31,12 @@
     <str name="filter">CONTAINER.cores</str>
     <str name="logger">foobar</str>
   </reporter>
+   <reporter name="test2" group="core" class="org.apache.solr.metrics.reporters.SolrSlf4jReporter">
+     <!-- for unit tests this is set to 1 second - DO NOT USE THIS VALUE IN PRODUCTION! -->
+     <int name="period">1</int>
+     <str name="prefix">test</str>
+     <str name="filter">INDEX.sizeInBytes</str>
+     <str name="logger">foobar</str>
+   </reporter>
  </metrics>
 </solr>

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5de10c79/solr/core/src/test/org/apache/solr/metrics/reporters/SolrSlf4jReporterTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/metrics/reporters/SolrSlf4jReporterTest.java b/solr/core/src/test/org/apache/solr/metrics/reporters/SolrSlf4jReporterTest.java
index 4cad788..7646864 100644
--- a/solr/core/src/test/org/apache/solr/metrics/reporters/SolrSlf4jReporterTest.java
+++ b/solr/core/src/test/org/apache/solr/metrics/reporters/SolrSlf4jReporterTest.java
@@ -82,7 +82,7 @@ public class SolrSlf4jReporterTest extends SolrTestCaseJ4 {
     if (!active) {
       fail("One or more reporters didn't become active in 20 seconds");
     }
-    Thread.sleep(5000);
+    Thread.sleep(10000);
 
     SolrDocumentList history = watcher.getHistory(-1, null);
     // dot-separated names are treated like class names and collapsed
@@ -93,6 +93,9 @@ public class SolrSlf4jReporterTest extends SolrTestCaseJ4 {
     if (history.stream().filter(d -> "foobar".equals(d.getFirstValue("logger"))).count() == 0) {
       fail("No 'foobar' logs in: " + history.toString());
     }
+    if (history.stream().filter(d -> "x:collection1".equals(d.getFirstValue("core"))).count() == 0) {
+      fail("No 'solr.core' or MDC context in logs: " + history.toString());
+    }
   }
 
   private static void ensureLoggingConfiguredAppropriately() throws Exception {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5de10c79/solr/solr-ref-guide/src/metrics-reporting.adoc
----------------------------------------------------------------------
diff --git a/solr/solr-ref-guide/src/metrics-reporting.adoc b/solr/solr-ref-guide/src/metrics-reporting.adoc
index f66df3e..631b5a1 100644
--- a/solr/solr-ref-guide/src/metrics-reporting.adoc
+++ b/solr/solr-ref-guide/src/metrics-reporting.adoc
@@ -244,7 +244,7 @@ The SLF4J Reporter uses the `org.apache.solr.metrics.reporters.SolrSlf4jReporter
 It takes the following arguments, in addition to the common arguments <<Reporter Arguments,above>>.
 
 `logger`::
-The name of the logger to use. Default is empty, in which case the group or registry name will be used if specified in the plugin configuration.
+The name of the logger to use. Default is empty, in which case the group (or the initial part of the registry name that identifies a metrics group) will be used if specified in the plugin configuration.
 
 Users can specify logger name (and the corresponding logger configuration in e.g., Log4j configuration) to output metrics-related logging to separate file(s), which can then be processed by external applications.
 
@@ -263,7 +263,32 @@ type=METER, name={}, count={}, mean_rate={}, m1={}, m5={}, m15={}, rate_unit={}
 type=HISTOGRAM, name={}, count={}, min={}, max={}, mean={}, stddev={}, median={}, p75={}, p95={}, p98={}, p99={}, p999={}
 ----
 
-(curly braces added only as placeholders for actual values).
+(curly braces added here only as placeholders for actual values).
+
+Additionally, the following MDC context properties are passed to the logger and can be used in log formats:
+
+`node_name`::
+Solr node name (for SolrCloud deployments, otherwise null), prefixed with `n:`.
+
+`registry`::
+Metric registry name, prefixed with `m:`.
+
+For reporters that are specific to a SolrCore also the following properties are available:
+
+`collection`::
+Collection name, prefixed with `c:`.
+
+`shard`::
+Shard name, prefixed with `s:`.
+
+`replica`::
+Replica name (core node name), prefixed with `r:`.
+
+`core`::
+SolrCore name, prefixed with `x:`.
+
+`tag`::
+Reporter instance tag, prefixed with `t:`.
 
 === Graphite Reporter
 


[14/48] lucene-solr:jira/http2: SOLR-12412: Leak transaction log on tragic event

Posted by da...@apache.org.
SOLR-12412: Leak transaction log on tragic event


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/eada799f
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/eada799f
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/eada799f

Branch: refs/heads/jira/http2
Commit: eada799f576a2a1cb6dd16179a34ef283cdb4101
Parents: c9e3c45
Author: Cao Manh Dat <da...@apache.org>
Authored: Wed Aug 1 07:13:41 2018 +0700
Committer: Cao Manh Dat <da...@apache.org>
Committed: Wed Aug 1 07:13:41 2018 +0700

----------------------------------------------------------------------
 solr/core/src/java/org/apache/solr/update/UpdateLog.java |  6 ++++++
 .../org/apache/solr/cloud/LeaderTragicEventTest.java     | 11 +++++------
 2 files changed, 11 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/eada799f/solr/core/src/java/org/apache/solr/update/UpdateLog.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/UpdateLog.java b/solr/core/src/java/org/apache/solr/update/UpdateLog.java
index 46d8435..1abf23c 100644
--- a/solr/core/src/java/org/apache/solr/update/UpdateLog.java
+++ b/solr/core/src/java/org/apache/solr/update/UpdateLog.java
@@ -188,6 +188,7 @@ public class UpdateLog implements PluginInfoInitialized, SolrMetricProducer {
   protected TransactionLog bufferTlog;
   protected TransactionLog tlog;
   protected TransactionLog prevTlog;
+  protected TransactionLog prevTlogOnPrecommit;
   protected final Deque<TransactionLog> logs = new LinkedList<>();  // list of recent logs, newest first
   protected LinkedList<TransactionLog> newestLogsOnStartup = new LinkedList<>();
   protected int numOldRecords;  // number of records in the recent logs
@@ -810,6 +811,11 @@ public class UpdateLog implements PluginInfoInitialized, SolrMetricProducer {
       // since document additions can happen concurrently with commit, create
       // a new transaction log first so that we know the old one is definitely
       // in the index.
+      if (prevTlog != null) {
+        // postCommit for prevTlog is not called, may be the index is corrupted
+        // if we override prevTlog value, the correspond tlog will be leaked, close it first
+        postCommit(cmd);
+      }
       prevTlog = tlog;
       tlog = null;
       id++;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/eada799f/solr/core/src/test/org/apache/solr/cloud/LeaderTragicEventTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/LeaderTragicEventTest.java b/solr/core/src/test/org/apache/solr/cloud/LeaderTragicEventTest.java
index e432493..83bd3c3 100644
--- a/solr/core/src/test/org/apache/solr/cloud/LeaderTragicEventTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/LeaderTragicEventTest.java
@@ -38,6 +38,7 @@ import org.apache.solr.common.cloud.ClusterStateUtil;
 import org.apache.solr.common.cloud.DocCollection;
 import org.apache.solr.common.cloud.Replica;
 import org.apache.solr.common.cloud.Slice;
+import org.apache.solr.common.util.ObjectReleaseTracker;
 import org.apache.solr.core.CoreContainer;
 import org.apache.solr.core.DirectoryFactory;
 import org.apache.solr.core.MockDirectoryFactory;
@@ -181,15 +182,13 @@ public class LeaderTragicEventTest extends SolrCloudTestCase {
 
       Replica oldLeader = corruptLeader(collection, new ArrayList<>());
 
-      //TODO better way to test this
-      Thread.sleep(5000);
-      Replica leader = getCollectionState(collection).getSlice("shard1").getLeader();
-      assertEquals(leader.getName(), oldLeader.getName());
-
       if (otherReplicaJetty != null) {
-        // won't be able to do anything here, since this replica can't recovery from the leader
         otherReplicaJetty.start();
       }
+      //TODO better way to test this
+      Thread.sleep(2000);
+      Replica leader = getCollectionState(collection).getSlice("shard1").getLeader();
+      assertEquals(leader.getName(), oldLeader.getName());
     } finally {
       CollectionAdminRequest.deleteCollection(collection).process(cluster.getSolrClient());
     }


[30/48] lucene-solr:jira/http2: SOLR-12592: support #EQUAL function in cores in autoscaling policies

Posted by da...@apache.org.
SOLR-12592: support #EQUAL function in cores in autoscaling policies


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/868e9708
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/868e9708
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/868e9708

Branch: refs/heads/jira/http2
Commit: 868e970816d8bb52f138a1181416438c348c750e
Parents: 259bc2b
Author: Noble Paul <no...@apache.org>
Authored: Thu Aug 2 15:20:46 2018 +1000
Committer: Noble Paul <no...@apache.org>
Committed: Thu Aug 2 15:20:46 2018 +1000

----------------------------------------------------------------------
 solr/CHANGES.txt                                |  2 +
 .../autoscaling/AutoScalingHandlerTest.java     |  2 +-
 .../client/solrj/cloud/autoscaling/Clause.java  | 15 +++-
 .../solrj/cloud/autoscaling/CoresVariable.java  | 48 ++++++++++---
 .../cloud/autoscaling/ReplicaVariable.java      |  4 ++
 .../solrj/cloud/autoscaling/Variable.java       | 43 ++++++------
 .../solrj/cloud/autoscaling/TestPolicy.java     | 55 ++++++++++-----
 .../solrj/cloud/autoscaling/TestPolicy2.java    | 74 +++++++++++++++++++-
 8 files changed, 192 insertions(+), 51 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/868e9708/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 6864ce7..93171af 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -131,6 +131,8 @@ New Features
 
 * SOLR-12402: Factor out SolrDefaultStreamFactory class. (Christine Poerschke)
 
+* SOLR-12592: support #EQUAL function in cores in autoscaling policies (noble)
+
 Bug Fixes
 ----------------------
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/868e9708/solr/core/src/test/org/apache/solr/cloud/autoscaling/AutoScalingHandlerTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/AutoScalingHandlerTest.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/AutoScalingHandlerTest.java
index afce5a4..5b8ec21 100644
--- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/AutoScalingHandlerTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/AutoScalingHandlerTest.java
@@ -769,7 +769,7 @@ public class AutoScalingHandlerTest extends SolrCloudTestCase {
       assertEquals(6, node.size());
       assertNotNull(node.get("node"));
       assertNotNull(node.get("cores"));
-      assertEquals(0L, node.get("cores"));
+      assertEquals(0d, node.get("cores"));
       assertNotNull(node.get("freedisk"));
       assertTrue(node.get("freedisk") instanceof Double);
       assertNotNull(node.get("sysLoadAvg"));

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/868e9708/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Clause.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Clause.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Clause.java
index 5fd9f8b..5fe6894 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Clause.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Clause.java
@@ -382,10 +382,12 @@ public class Clause implements MapWriter, Comparable<Clause> {
       }
     } else {
       for (Row r : session.matrix) {
+        computedValueEvaluator.node = r.node;
         SealedClause sealedClause = getSealedClause(computedValueEvaluator);
         if (!sealedClause.getGlobalTag().isPass(r)) {
           sealedClause.getGlobalTag().varType.addViolatingReplicas(ctx.reset(null, null,
-              new Violation(sealedClause, null, null, r.node, r.getVal(sealedClause.globalTag.name), sealedClause.globalTag.delta(r.getVal(globalTag.name)), null)));
+              new Violation(sealedClause, null, null, r.node, r.getVal(sealedClause.globalTag.name),
+                  sealedClause.globalTag.delta(r.getVal(globalTag.name)), null)));
         }
       }
     }
@@ -428,6 +430,17 @@ public class Clause implements MapWriter, Comparable<Clause> {
         }
       }
     }
+
+    if (this.getTag().op != LESS_THAN && this.getTag().varType == Type.NODE) {
+      collVsShardVsTagVsCount.forEach((coll, shardVsNodeVsCount) ->
+          shardVsNodeVsCount.forEach((shard, nodeVsCount) -> {
+            for (Row row : allRows) {
+              if (!nodeVsCount.containsKey(row.node)) {
+                nodeVsCount.put(row.node, new ReplicaCount());
+              }
+            }
+          }));
+    }
     return collVsShardVsTagVsCount;
   }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/868e9708/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/CoresVariable.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/CoresVariable.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/CoresVariable.java
index 577717f..45f9eb7 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/CoresVariable.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/CoresVariable.java
@@ -19,8 +19,6 @@ package org.apache.solr.client.solrj.cloud.autoscaling;
 
 import java.util.function.Consumer;
 
-import org.apache.solr.common.cloud.rule.ImplicitSnitch;
-
 import static org.apache.solr.common.params.CollectionParams.CollectionAction.MOVEREPLICA;
 
 public class CoresVariable extends VariableBase {
@@ -35,14 +33,15 @@ public class CoresVariable extends VariableBase {
 
   @Override
   public void addViolatingReplicas(Violation.Ctx ctx) {
-    for (Row r : ctx.allRows) {
-      if (!ctx.clause.tag.isPass(r)) {
-        r.forEachReplica(replicaInfo -> ctx.currentViolation
+    for (Row row : ctx.allRows) {
+      if (row.node.equals(ctx.currentViolation.node)) {
+        row.forEachReplica(replicaInfo -> ctx.currentViolation
             .addReplica(new Violation.ReplicaInfoAndErr(replicaInfo)
-                .withDelta(ctx.clause.tag.delta(r.getVal(ImplicitSnitch.CORES)))));
+                .withDelta(ctx.currentViolation.replicaCountDelta)));
       }
     }
 
+
   }
 
   @Override
@@ -52,18 +51,49 @@ public class CoresVariable extends VariableBase {
       for (int i = 0; i < Math.abs(ctx.violation.replicaCountDelta); i++) {
         Suggester suggester = ctx.session.getSuggester(MOVEREPLICA)
             .hint(Suggester.Hint.SRC_NODE, ctx.violation.node);
-        ctx.addSuggestion(suggester);
+        if (ctx.addSuggestion(suggester) == null) break;
       }
     }
   }
 
   @Override
   public void projectAddReplica(Cell cell, ReplicaInfo ri, Consumer<Row.OperationInfo> ops, boolean strictMode) {
-    cell.val = cell.val == null ? 0 : ((Number) cell.val).longValue() + 1;
+    cell.val = cell.val == null ? 0 : ((Number) cell.val).doubleValue() + 1;
   }
 
   @Override
   public void projectRemoveReplica(Cell cell, ReplicaInfo ri, Consumer<Row.OperationInfo> opCollector) {
-    cell.val = cell.val == null ? 0 : ((Number) cell.val).longValue() - 1;
+    cell.val = cell.val == null ? 0 : ((Number) cell.val).doubleValue() - 1;
+  }
+
+  @Override
+  public Object computeValue(Policy.Session session, Clause.Condition condition, String collection, String shard, String node) {
+    if (condition.computedType == Clause.ComputedType.EQUAL) {
+      int[] coresCount = new int[1];
+      int[] liveNodes = new int[1];
+      for (Row row : session.matrix) {
+        if (!row.isLive) continue;
+        liveNodes[0]++;
+        row.forEachReplica(replicaInfo -> coresCount[0]++);
+      }
+      return liveNodes[0] == 0 || coresCount[0] == 0 ? 0d : (double) coresCount[0] / (double) liveNodes[0];
+    } else {
+      throw new IllegalArgumentException("Invalid computed type in " + condition);
+    }
+  }
+
+  @Override
+  public String postValidate(Clause.Condition condition) {
+    Clause.Condition nodeTag = condition.getClause().getTag();
+    if (nodeTag.name.equals("node") && nodeTag.op == Operand.WILDCARD) {
+      return null;
+    } else {
+      throw new IllegalArgumentException("cores: '#EQUAL' can be used only with node: '#ANY'");
+    }
+  }
+
+  @Override
+  public Operand getOperand(Operand expected, Object strVal, Clause.ComputedType computedType) {
+    return ReplicaVariable.checkForRangeOperand(expected, strVal, computedType);
   }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/868e9708/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/ReplicaVariable.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/ReplicaVariable.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/ReplicaVariable.java
index 2f66609..ab0a03a 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/ReplicaVariable.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/ReplicaVariable.java
@@ -52,6 +52,10 @@ class ReplicaVariable extends VariableBase {
   @Override
   public Operand getOperand(Operand expected, Object strVal, Clause.ComputedType computedType) {
     if (computedType == Clause.ComputedType.ALL) return expected;
+    return checkForRangeOperand(expected, strVal, computedType);
+  }
+
+  static Operand checkForRangeOperand(Operand expected, Object strVal, Clause.ComputedType computedType) {
     if (strVal instanceof String) {
       String s = ((String) strVal).trim();
       int hyphenIdx = s.indexOf('-');

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/868e9708/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Variable.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Variable.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Variable.java
index 8df74bf..c3d8ca2 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Variable.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Variable.java
@@ -86,7 +86,7 @@ public interface Variable {
 
     @Meta(name = "collection",
         type = String.class)
-    COLL(),
+    COLL,
     @Meta(
         name = "shard",
         type = String.class,
@@ -98,7 +98,7 @@ public interface Variable {
         min = 0, max = -1,
         implementation = ReplicaVariable.class,
         computedValues = {Clause.ComputedType.EQUAL, Clause.ComputedType.PERCENT, Clause.ComputedType.ALL})
-    REPLICA(),
+    REPLICA,
     @Meta(name = ImplicitSnitch.PORT,
         type = Long.class,
         min = 1,
@@ -106,35 +106,35 @@ public interface Variable {
         supportArrayVals = true,
         wildCards = Policy.EACH
     )
-    PORT(),
+    PORT,
     @Meta(name = "ip_1",
         type = Long.class,
         min = 0,
         max = 255,
         supportArrayVals = true,
         wildCards = Policy.EACH)
-    IP_1(),
+    IP_1,
     @Meta(name = "ip_2",
         type = Long.class,
         min = 0,
         max = 255,
         supportArrayVals = true,
         wildCards = Policy.EACH)
-    IP_2(),
+    IP_2,
     @Meta(name = "ip_3",
         type = Long.class,
         min = 0,
         max = 255,
         supportArrayVals = true,
         wildCards = Policy.EACH)
-    IP_3(),
+    IP_3,
     @Meta(name = "ip_4",
         type = Long.class,
         min = 0,
         max = 255,
         supportArrayVals = true,
         wildCards = Policy.EACH)
-    IP_4(),
+    IP_4,
     @Meta(name = ImplicitSnitch.DISK,
         type = Double.class,
         min = 0,
@@ -143,12 +143,12 @@ public interface Variable {
         associatedPerNodeValue = "totaldisk",
         implementation = FreeDiskVariable.class,
         computedValues = Clause.ComputedType.PERCENT)
-    FREEDISK(),
+    FREEDISK,
 
     @Meta(name = "totaldisk",
         type = Double.class,
         isHidden = true, implementation = VariableBase.TotalDiskVariable.class)
-    TOTALDISK(),
+    TOTALDISK,
 
     @Meta(name = Variable.coreidxsize,
         type = Double.class,
@@ -157,40 +157,41 @@ public interface Variable {
         min = 0,
         implementation = VariableBase.CoreIndexSizeVariable.class,
         metricsKey = "INDEX.sizeInBytes")
-    CORE_IDX(),
+    CORE_IDX,
     @Meta(name = ImplicitSnitch.NODEROLE,
         type = String.class,
         enumVals = "overseer")
-    NODE_ROLE(),
+    NODE_ROLE,
 
     @Meta(name = ImplicitSnitch.CORES,
-        type = Long.class,
-        min = 0,
+        type = Double.class,
+        min = 0, max = -1,
+        computedValues = Clause.ComputedType.EQUAL,
         implementation = CoresVariable.class)
-    CORES(),
+    CORES,
 
     @Meta(name = ImplicitSnitch.SYSLOADAVG,
         type = Double.class,
         min = 0,
         max = 100,
         isNodeSpecificVal = true)
-    SYSLOADAVG(),
+    SYSLOADAVG,
 
     @Meta(name = ImplicitSnitch.HEAPUSAGE,
         type = Double.class,
         min = 0,
         isNodeSpecificVal = true)
-    HEAPUSAGE(),
+    HEAPUSAGE,
     @Meta(name = "NUMBER",
         type = Long.class,
         min = 0)
-    NUMBER(),
+    NUMBER,
 
     @Meta(name = "STRING",
         type = String.class,
         wildCards = Policy.EACH,
         supportArrayVals = true)
-    STRING(),
+    STRING,
 
     @Meta(name = "node",
         type = String.class,
@@ -198,19 +199,19 @@ public interface Variable {
         wildCards = {Policy.ANY, Policy.EACH},
         implementation = NodeVariable.class,
         supportArrayVals = true)
-    NODE(),
+    NODE,
 
     @Meta(name = "LAZY",
         type = void.class,
         implementation = VariableBase.LazyVariable.class)
-    LAZY(),
+    LAZY,
 
     @Meta(name = ImplicitSnitch.DISKTYPE,
         type = String.class,
         enumVals = {"ssd", "rotational"},
         implementation = VariableBase.DiskTypeVariable.class,
         supportArrayVals = true)
-    DISKTYPE();
+    DISKTYPE;
 
     public final String tagName;
     public final Class type;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/868e9708/solr/solrj/src/test/org/apache/solr/client/solrj/cloud/autoscaling/TestPolicy.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/cloud/autoscaling/TestPolicy.java b/solr/solrj/src/test/org/apache/solr/client/solrj/cloud/autoscaling/TestPolicy.java
index 16dfdcd..48d8f2e 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/cloud/autoscaling/TestPolicy.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/cloud/autoscaling/TestPolicy.java
@@ -869,6 +869,11 @@ public class TestPolicy extends SolrTestCaseJ4 {
     expectThrows(IllegalArgumentException.class,
         () -> Clause.create("{replica: '#EQUAL' , shard: '#EACH' , sysprop.zone:[east]}"));
 
+    clause = Clause.create("{cores: '#EQUAL' , node:'#ANY'}");
+    assertEquals(Clause.ComputedType.EQUAL, clause.globalTag.computedType);
+    expectThrows(IllegalArgumentException.class,
+        () -> Clause.create("{cores: '#EQUAL' , node:'node1'}"));
+
   }
 
 
@@ -997,20 +1002,30 @@ public class TestPolicy extends SolrTestCaseJ4 {
       }
     });
     List<Violation> violations = session.getViolations();
-    assertEquals(1, violations.size());
-    Violation violation = violations.get(0);
-    assertEquals("node1", violation.node);
-    RangeVal val = (RangeVal) violation.getClause().replica.val;
-    assertEquals(1.0d, val.min.doubleValue(), 0.01);
-    assertEquals(2.0, val.max.doubleValue(), 0.01);
-    assertEquals(1.2d, val.actual.doubleValue(), 0.01d);
-    assertEquals(1, violation.replicaCountDelta.doubleValue(), 0.01);
-    assertEquals(3, violation.getViolatingReplicas().size());
-    Set<String> expected = ImmutableSet.of("r1", "r3", "r5");
-    for (Violation.ReplicaInfoAndErr replicaInfoAndErr : violation.getViolatingReplicas()) {
-      assertTrue(expected.contains(replicaInfoAndErr.replicaInfo.getCore()));
+    assertEquals(2, violations.size());
+    for (Violation violation : violations) {
+      if (violation.node.equals("node1")) {
+        RangeVal val = (RangeVal) violation.getClause().replica.val;
+        assertEquals(1.0d, val.min.doubleValue(), 0.01);
+        assertEquals(2.0, val.max.doubleValue(), 0.01);
+        assertEquals(1.2d, val.actual.doubleValue(), 0.01d);
+        assertEquals(1, violation.replicaCountDelta.doubleValue(), 0.01);
+        assertEquals(3, violation.getViolatingReplicas().size());
+        Set<String> expected = ImmutableSet.of("r1", "r3", "r5");
+        for (Violation.ReplicaInfoAndErr replicaInfoAndErr : violation.getViolatingReplicas()) {
+          assertTrue(expected.contains(replicaInfoAndErr.replicaInfo.getCore()));
+        }
+      } else if (violation.node.equals("node5")) {
+        assertEquals(-1, violation.replicaCountDelta.doubleValue(), 0.01);
+
+      } else{
+        fail();
+      }
     }
-    System.out.println();
+//    Violation violation = violations.get(0);
+//    assertEquals("node1", violation.node);
+
+
 
   }
 
@@ -2341,9 +2356,16 @@ public class TestPolicy extends SolrTestCaseJ4 {
     AutoScalingConfig autoScalingConfig = new AutoScalingConfig((Map<String, Object>) Utils.fromJSONString(autoScalingjson));
     Policy.Session session = autoScalingConfig.getPolicy().createSession(cloudManagerWithData(dataproviderdata));
     List<Violation> violations = session.getViolations();
-    assertEquals(1, violations.size());
-    assertEquals(1.0d, violations.get(0).replicaCountDelta, 0.01);
-    assertEquals(1.53d, ((RangeVal) violations.get(0).getClause().getReplica().val).actual);
+    assertEquals(2, violations.size());
+    for (Violation violation : violations) {
+      if(violation.node.equals("10.0.0.6:8983_solr")){
+        assertEquals(1.0d, violation.replicaCountDelta, 0.01);
+        assertEquals(1.53d, ((RangeVal) violation.getClause().getReplica().val).actual);
+      } else if(violation.node.equals("10.0.0.6:7574_solr")){
+        assertEquals(-1.0d, violation.replicaCountDelta, 0.01);
+      }
+
+    }
 
 
     dataproviderdata = "{" +
@@ -2493,6 +2515,7 @@ public class TestPolicy extends SolrTestCaseJ4 {
       assertEquals(500d, r.delta, 0.1);
 
     }
+
     List<Suggester.SuggestionInfo> l = PolicyHelper.getSuggestions(cfg, cloudManagerWithData(dataproviderdata));
     assertEquals(3, l.size());
     Map m = l.get(0).toMap(new LinkedHashMap<>());

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/868e9708/solr/solrj/src/test/org/apache/solr/client/solrj/cloud/autoscaling/TestPolicy2.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/cloud/autoscaling/TestPolicy2.java b/solr/solrj/src/test/org/apache/solr/client/solrj/cloud/autoscaling/TestPolicy2.java
index 678600f..c1d69b9 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/cloud/autoscaling/TestPolicy2.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/cloud/autoscaling/TestPolicy2.java
@@ -27,6 +27,7 @@ import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
+import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicInteger;
 
 import org.apache.solr.SolrTestCaseJ4;
@@ -106,7 +107,8 @@ public class TestPolicy2 extends SolrTestCaseJ4 {
             "    'node1':{'cores' : 3, 'freedisk' : 700, 'totaldisk' :1000, 'sysprop.zone' : 'east'}," +
             "    'node2':{'cores' : 1, 'freedisk' : 900, 'totaldisk' :1000, 'sysprop.zone' : 'west'}," +
             "    'node3':{'cores' : 1, 'freedisk' : 900, 'totaldisk' :1000, 'sysprop.zone': 'east'}," +
-            "    'node4':{'cores' : 1, 'freedisk' : 900, 'totaldisk' :1000, 'sysprop.zone': 'west'}" +
+            "    'node4':{'cores' : 1, 'freedisk' : 900, 'totaldisk' :1000, 'sysprop.zone': 'west'}," +
+            "    'node5':{'cores' : 0, 'freedisk' : 1000, 'totaldisk' :1000, 'sysprop.zone': 'west'}" +
             "  }," +
             "  'replicaValues':[" +
             "    {'INDEX.sizeInGB': 100, core : r1}," +
@@ -118,7 +120,7 @@ public class TestPolicy2 extends SolrTestCaseJ4 {
 
     String autoScalingjson = "{cluster-policy:[" +
         "    { replica : '<3' , shard : '#EACH', sysprop.zone: [east,west] } ]," +
-        "  'cluster-preferences':[{ minimize : cores},{minimize : freedisk, precision : 50}]}";
+        "  'cluster-preferences':[{ minimize : cores},{maximize : freedisk, precision : 50}]}";
     Policy policy = new Policy((Map<String, Object>) Utils.fromJSONString(autoScalingjson));
     Policy.Session session = policy.createSession(createCloudManager(state, metaData));
     List<Violation> violations = session.getViolations();
@@ -131,7 +133,7 @@ public class TestPolicy2 extends SolrTestCaseJ4 {
 
     autoScalingjson = "{cluster-policy:[" +
         "    { replica : '<3' , shard : '#EACH', sysprop.zone: '#EACH' } ]," +
-        "  'cluster-preferences':[{ minimize : cores},{minimize : freedisk, precision : 50}]}";
+        "  'cluster-preferences':[{ minimize : cores},{maximize : freedisk, precision : 50}]}";
     policy = new Policy((Map<String, Object>) Utils.fromJSONString(autoScalingjson));
     session = policy.createSession(createCloudManager(state, metaData));
     violations = session.getViolations();
@@ -141,6 +143,72 @@ public class TestPolicy2 extends SolrTestCaseJ4 {
     for (Violation.ReplicaInfoAndErr r : violations.get(0).getViolatingReplicas()) {
       assertEquals("shard2", r.replicaInfo.getShard());
     }
+    autoScalingjson = "{cluster-policy:[" +
+        "    { replica : '#EQUAL' , node: '#ANY' } ]," +
+        "  'cluster-preferences':[{ minimize : cores},{maximize : freedisk, precision : 50}]}";
+    policy = new Policy((Map<String, Object>) Utils.fromJSONString(autoScalingjson));
+    session = policy.createSession(createCloudManager(state, metaData));
+    violations = session.getViolations();
+    List<Suggester.SuggestionInfo> suggestions = null;
+    assertEquals(2, violations.size());
+    for (Violation violation : violations) {
+      if (violation.node.equals("node1")) {
+        assertEquals(1.0d, violation.replicaCountDelta, 0.001);
+        assertEquals(3, violation.getViolatingReplicas().size());
+      } else if (violation.node.equals("node5")) {
+        assertEquals(-1.0d, violation.replicaCountDelta, 0.001);
+        assertEquals(0, violation.getViolatingReplicas().size());
+      } else {
+        fail();
+      }
+    }
+    suggestions = PolicyHelper.getSuggestions(new AutoScalingConfig((Map<String, Object>) Utils.fromJSONString(autoScalingjson))
+        , createCloudManager(state, metaData));
+    assertEquals(1, suggestions.size());
+    String repName = (String) Utils.getObjectByPath(suggestions.get(0).operation, true, "command/move-replica/replica");
+
+    AtomicBoolean found = new AtomicBoolean(false);
+    session.getNode("node1").forEachReplica(replicaInfo -> {
+      if (replicaInfo.getName().equals(repName)) {
+        found.set(true);
+      }
+    });
+    assertTrue(found.get());
+
+    autoScalingjson = "{cluster-policy:[" +
+        "    { cores : '#EQUAL' , node: '#ANY' } ]," +
+        "  'cluster-preferences':[{ minimize : cores},{minimize : freedisk, precision : 50}]}";
+    policy = new Policy((Map<String, Object>) Utils.fromJSONString(autoScalingjson));
+    session = policy.createSession(createCloudManager(state, metaData));
+    violations = session.getViolations();
+    assertEquals(2, violations.size());
+    for (Violation violation : violations) {
+      if(violation.node.equals("node1")) {
+        assertEquals(1.0d, violation.replicaCountDelta, 0.001);
+        assertEquals(3, violation.getViolatingReplicas().size());
+      } else if(violation.node.equals("node5")){
+        assertEquals(-1.0d, violation.replicaCountDelta, 0.001);
+        assertEquals(0, violation.getViolatingReplicas().size());
+      } else {
+        fail();
+      }
+
+    }
+
+    suggestions = PolicyHelper.getSuggestions(new AutoScalingConfig((Map<String, Object>) Utils.fromJSONString(autoScalingjson)),
+        createCloudManager(state, metaData));
+    assertEquals(1, suggestions.size());
+    assertEquals("node5", Utils.getObjectByPath(suggestions.get(0).operation, true, "command/move-replica/targetNode"));
+
+    String rName = (String) Utils.getObjectByPath(suggestions.get(0).operation, true, "command/move-replica/replica");
+
+    found.set(false);
+    session.getNode("node1").forEachReplica(replicaInfo -> {
+      if (replicaInfo.getName().equals(rName)) {
+        found.set(true);
+      }
+    });
+    assertTrue(found.get());
 
   }
 


[05/48] lucene-solr:jira/http2: SOLR-12558: solr/core (private) logger renames

Posted by da...@apache.org.
SOLR-12558: solr/core (private) logger renames


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/9262ed7e
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/9262ed7e
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/9262ed7e

Branch: refs/heads/jira/http2
Commit: 9262ed7e56085f125f666d24a375d12e9cdef089
Parents: abd6b07
Author: Christine Poerschke <cp...@apache.org>
Authored: Tue Jul 31 16:58:51 2018 +0100
Committer: Christine Poerschke <cp...@apache.org>
Committed: Tue Jul 31 16:58:51 2018 +0100

----------------------------------------------------------------------
 .../org/apache/solr/core/ConfigSetService.java  |  8 +++----
 .../apache/solr/core/CorePropertiesLocator.java | 22 ++++++++++----------
 .../org/apache/solr/handler/GraphHandler.java   |  4 ++--
 .../org/apache/solr/handler/SQLHandler.java     |  4 ++--
 .../org/apache/solr/handler/StreamHandler.java  |  4 ++--
 .../apache/solr/handler/sql/SolrEnumerator.java |  4 ++--
 .../java/org/apache/solr/search/Grouping.java   |  8 +++----
 .../solr/search/grouping/CommandHandler.java    |  4 ++--
 .../solr/store/hdfs/HdfsLocalityReporter.java   |  8 +++----
 9 files changed, 33 insertions(+), 33 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9262ed7e/solr/core/src/java/org/apache/solr/core/ConfigSetService.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/ConfigSetService.java b/solr/core/src/java/org/apache/solr/core/ConfigSetService.java
index 13ac9ce..69e160b 100644
--- a/solr/core/src/java/org/apache/solr/core/ConfigSetService.java
+++ b/solr/core/src/java/org/apache/solr/core/ConfigSetService.java
@@ -44,7 +44,7 @@ import org.slf4j.LoggerFactory;
  */
 public abstract class ConfigSetService {
 
-  private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   public static ConfigSetService createConfigSetService(NodeConfig nodeConfig, SolrResourceLoader loader, ZkController zkController) {
     if (zkController != null)
       return new CloudConfigSetService(loader, zkController);
@@ -228,15 +228,15 @@ public abstract class ConfigSetService {
         try {
           String cachedName = cacheName(schemaFile);
           return schemaCache.get(cachedName, () -> {
-            logger.info("Creating new index schema for core {}", cd.getName());
+            log.info("Creating new index schema for core {}", cd.getName());
             return IndexSchemaFactory.buildIndexSchema(cd.getSchemaName(), solrConfig);
           });
         } catch (ExecutionException e) {
           throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
               "Error creating index schema for core " + cd.getName(), e);
         } catch (IOException e) {
-          logger.warn("Couldn't get last modified time for schema file {}: {}", schemaFile, e.getMessage());
-          logger.warn("Will not use schema cache");
+          log.warn("Couldn't get last modified time for schema file {}: {}", schemaFile, e.getMessage());
+          log.warn("Will not use schema cache");
         }
       }
       return IndexSchemaFactory.buildIndexSchema(cd.getSchemaName(), solrConfig);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9262ed7e/solr/core/src/java/org/apache/solr/core/CorePropertiesLocator.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/CorePropertiesLocator.java b/solr/core/src/java/org/apache/solr/core/CorePropertiesLocator.java
index 3c8a40d..76eb5c4 100644
--- a/solr/core/src/java/org/apache/solr/core/CorePropertiesLocator.java
+++ b/solr/core/src/java/org/apache/solr/core/CorePropertiesLocator.java
@@ -50,13 +50,13 @@ public class CorePropertiesLocator implements CoresLocator {
 
   public static final String PROPERTIES_FILENAME = "core.properties";
 
-  private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   private final Path rootDirectory;
 
   public CorePropertiesLocator(Path coreDiscoveryRoot) {
     this.rootDirectory = coreDiscoveryRoot;
-    logger.debug("Config-defined core root directory: {}", this.rootDirectory);
+    log.debug("Config-defined core root directory: {}", this.rootDirectory);
   }
 
   @Override
@@ -92,7 +92,7 @@ public class CorePropertiesLocator implements CoresLocator {
       }
     }
     catch (IOException e) {
-      logger.error("Couldn't persist core properties to {}: {}", propfile, e.getMessage());
+      log.error("Couldn't persist core properties to {}: {}", propfile, e.getMessage());
       throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
           "Couldn't persist core properties to " + propfile.toAbsolutePath().toString() + " : " + e.getMessage());
     }
@@ -109,7 +109,7 @@ public class CorePropertiesLocator implements CoresLocator {
       try {
         Files.deleteIfExists(propfile);
       } catch (IOException e) {
-        logger.warn("Couldn't delete core properties file {}: {}", propfile, e.getMessage());
+        log.warn("Couldn't delete core properties file {}: {}", propfile, e.getMessage());
       }
     }
   }
@@ -132,7 +132,7 @@ public class CorePropertiesLocator implements CoresLocator {
 
   @Override
   public List<CoreDescriptor> discover(final CoreContainer cc) {
-    logger.debug("Looking for core definitions underneath {}", rootDirectory);
+    log.debug("Looking for core definitions underneath {}", rootDirectory);
     final List<CoreDescriptor> cds = Lists.newArrayList();
     try {
       Set<FileVisitOption> options = new HashSet<>();
@@ -144,7 +144,7 @@ public class CorePropertiesLocator implements CoresLocator {
           if (file.getFileName().toString().equals(PROPERTIES_FILENAME)) {
             CoreDescriptor cd = buildCoreDescriptor(file, cc);
             if (cd != null) {
-              logger.debug("Found core {} in {}", cd.getName(), cd.getInstanceDir());
+              log.debug("Found core {} in {}", cd.getName(), cd.getInstanceDir());
               cds.add(cd);
             }
             return FileVisitResult.SKIP_SIBLINGS;
@@ -157,19 +157,19 @@ public class CorePropertiesLocator implements CoresLocator {
           // if we get an error on the root, then fail the whole thing
           // otherwise, log a warning and continue to try and load other cores
           if (file.equals(rootDirectory)) {
-            logger.error("Error reading core root directory {}: {}", file, exc);
+            log.error("Error reading core root directory {}: {}", file, exc);
             throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Error reading core root directory");
           }
-          logger.warn("Error visiting {}: {}", file, exc);
+          log.warn("Error visiting {}: {}", file, exc);
           return FileVisitResult.CONTINUE;
         }
       });
     } catch (IOException e) {
       throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Couldn't walk file tree under " + this.rootDirectory, e);
     }
-    logger.info("Found {} core definitions underneath {}", cds.size(), rootDirectory);
+    log.info("Found {} core definitions underneath {}", cds.size(), rootDirectory);
     if (cds.size() > 0) {
-      logger.info("Cores are: {}", cds.stream().map(CoreDescriptor::getName).collect(Collectors.toList()));
+      log.info("Cores are: {}", cds.stream().map(CoreDescriptor::getName).collect(Collectors.toList()));
     }
     return cds;
   }
@@ -190,7 +190,7 @@ public class CorePropertiesLocator implements CoresLocator {
       return ret;
     }
     catch (IOException e) {
-      logger.error("Couldn't load core descriptor from {}:{}", propertiesFile, e.toString());
+      log.error("Couldn't load core descriptor from {}:{}", propertiesFile, e.toString());
       return null;
     }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9262ed7e/solr/core/src/java/org/apache/solr/handler/GraphHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/GraphHandler.java b/solr/core/src/java/org/apache/solr/handler/GraphHandler.java
index 13874c9..ed5ae0a 100644
--- a/solr/core/src/java/org/apache/solr/handler/GraphHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/GraphHandler.java
@@ -54,7 +54,7 @@ import org.slf4j.LoggerFactory;
 public class GraphHandler extends RequestHandlerBase implements SolrCoreAware, PermissionNameProvider {
 
   private StreamFactory streamFactory = new DefaultStreamFactory();
-  private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   private String coreName;
 
   @Override
@@ -110,7 +110,7 @@ public class GraphHandler extends RequestHandlerBase implements SolrCoreAware, P
       tupleStream = this.streamFactory.constructStream(params.get("expr"));
     } catch (Exception e) {
       //Catch exceptions that occur while the stream is being created. This will include streaming expression parse rules.
-      SolrException.log(logger, e);
+      SolrException.log(log, e);
       Map requestContext = req.getContext();
       requestContext.put("stream", new DummyErrorStream(e));
       return;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9262ed7e/solr/core/src/java/org/apache/solr/handler/SQLHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/SQLHandler.java b/solr/core/src/java/org/apache/solr/handler/SQLHandler.java
index 67ea1f6..6b0330a 100644
--- a/solr/core/src/java/org/apache/solr/handler/SQLHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/SQLHandler.java
@@ -49,7 +49,7 @@ import org.slf4j.LoggerFactory;
 
 public class SQLHandler extends RequestHandlerBase implements SolrCoreAware, PermissionNameProvider {
 
-  private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   private static String defaultZkhost = null;
   private static String defaultWorkerCollection = null;
@@ -124,7 +124,7 @@ public class SQLHandler extends RequestHandlerBase implements SolrCoreAware, Per
       if(tupleStream != null) {
         tupleStream.close();
       }
-      SolrException.log(logger, e);
+      SolrException.log(log, e);
       rsp.add("result-set", new StreamHandler.DummyErrorStream(e));
     }
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9262ed7e/solr/core/src/java/org/apache/solr/handler/StreamHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/StreamHandler.java b/solr/core/src/java/org/apache/solr/handler/StreamHandler.java
index 269d12d..4e43e1c 100644
--- a/solr/core/src/java/org/apache/solr/handler/StreamHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/StreamHandler.java
@@ -66,7 +66,7 @@ public class StreamHandler extends RequestHandlerBase implements SolrCoreAware,
   static SolrClientCache clientCache = new SolrClientCache();
   static ModelCache modelCache = null;
   private StreamFactory streamFactory = new DefaultStreamFactory();
-  private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   private String coreName;
   private Map<String,DaemonStream> daemons = Collections.synchronizedMap(new HashMap());
 
@@ -157,7 +157,7 @@ public class StreamHandler extends RequestHandlerBase implements SolrCoreAware,
     } catch (Exception e) {
       // Catch exceptions that occur while the stream is being created. This will include streaming expression parse
       // rules.
-      SolrException.log(logger, e);
+      SolrException.log(log, e);
       rsp.add("result-set", new DummyErrorStream(e));
 
       return;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9262ed7e/solr/core/src/java/org/apache/solr/handler/sql/SolrEnumerator.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/sql/SolrEnumerator.java b/solr/core/src/java/org/apache/solr/handler/sql/SolrEnumerator.java
index 7ba3838..8c06f32 100644
--- a/solr/core/src/java/org/apache/solr/handler/sql/SolrEnumerator.java
+++ b/solr/core/src/java/org/apache/solr/handler/sql/SolrEnumerator.java
@@ -30,7 +30,7 @@ import java.util.Map;
 
 /** Enumerator that reads from a Solr collection. */
 class SolrEnumerator implements Enumerator<Object> {
-  private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   private final TupleStream tupleStream;
   private final List<Map.Entry<String, Class>> fields;
@@ -126,7 +126,7 @@ class SolrEnumerator implements Enumerator<Object> {
         return true;
       }
     } catch (IOException e) {
-      logger.error("IOException", e);
+      log.error("IOException", e);
       return false;
     }
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9262ed7e/solr/core/src/java/org/apache/solr/search/Grouping.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/Grouping.java b/solr/core/src/java/org/apache/solr/search/Grouping.java
index 8342bb7..938562d 100644
--- a/solr/core/src/java/org/apache/solr/search/Grouping.java
+++ b/solr/core/src/java/org/apache/solr/search/Grouping.java
@@ -77,7 +77,7 @@ import org.slf4j.LoggerFactory;
  */
 public class Grouping {
 
-  private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   private final SolrIndexSearcher searcher;
   private final QueryResult qr;
@@ -384,8 +384,8 @@ public class Grouping {
             cachedCollector.replay(secondPhaseCollectors);
           } else {
             signalCacheWarning = true;
-            logger.warn(String.format(Locale.ROOT, "The grouping cache is active, but not used because it exceeded the max cache limit of %d percent", maxDocsPercentageToCache));
-            logger.warn("Please increase cache size or disable group caching.");
+            log.warn(String.format(Locale.ROOT, "The grouping cache is active, but not used because it exceeded the max cache limit of %d percent", maxDocsPercentageToCache));
+            log.warn("Please increase cache size or disable group caching.");
             searchWithTimeLimiter(luceneFilter, secondPhaseCollectors);
           }
         } else {
@@ -447,7 +447,7 @@ public class Grouping {
       }
       searcher.search(q, collector);
     } catch (TimeLimitingCollector.TimeExceededException | ExitableDirectoryReader.ExitingReaderException x) {
-      logger.warn( "Query: " + query + "; " + x.getMessage() );
+      log.warn( "Query: " + query + "; " + x.getMessage() );
       qr.setPartialResults(true);
     }
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9262ed7e/solr/core/src/java/org/apache/solr/search/grouping/CommandHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/grouping/CommandHandler.java b/solr/core/src/java/org/apache/solr/search/grouping/CommandHandler.java
index ec421e4..336c27b 100644
--- a/solr/core/src/java/org/apache/solr/search/grouping/CommandHandler.java
+++ b/solr/core/src/java/org/apache/solr/search/grouping/CommandHandler.java
@@ -115,7 +115,7 @@ public class CommandHandler {
 
   }
 
-  private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   private final QueryCommand queryCommand;
   private final List<Command> commands;
@@ -243,7 +243,7 @@ public class CommandHandler {
       searcher.search(query, collector);
     } catch (TimeLimitingCollector.TimeExceededException | ExitableDirectoryReader.ExitingReaderException x) {
       partialResults = true;
-      logger.warn( "Query: " + query + "; " + x.getMessage() );
+      log.warn( "Query: " + query + "; " + x.getMessage() );
     }
 
     if (includeHitCount) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9262ed7e/solr/core/src/java/org/apache/solr/store/hdfs/HdfsLocalityReporter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/store/hdfs/HdfsLocalityReporter.java b/solr/core/src/java/org/apache/solr/store/hdfs/HdfsLocalityReporter.java
index d10216b..2bf60cb 100644
--- a/solr/core/src/java/org/apache/solr/store/hdfs/HdfsLocalityReporter.java
+++ b/solr/core/src/java/org/apache/solr/store/hdfs/HdfsLocalityReporter.java
@@ -45,7 +45,7 @@ public class HdfsLocalityReporter implements SolrInfoBean, SolrMetricProducer {
   public static final String LOCALITY_BLOCKS_LOCAL = "locality.blocks.local";
   public static final String LOCALITY_BLOCKS_RATIO = "locality.blocks.ratio";
 
-  private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   private String hostname;
   private final ConcurrentMap<HdfsDirectory,ConcurrentMap<FileStatus,BlockLocation[]>> cache;
@@ -129,7 +129,7 @@ public class HdfsLocalityReporter implements SolrInfoBean, SolrMetricProducer {
               }
             }
           } catch (IOException e) {
-            logger.warn("Could not retrieve locality information for {} due to exception: {}",
+            log.warn("Could not retrieve locality information for {} due to exception: {}",
                 hdfsDirectory.getHdfsDirPath(), e);
           }
         }
@@ -160,7 +160,7 @@ public class HdfsLocalityReporter implements SolrInfoBean, SolrMetricProducer {
    *          The directory to keep metrics on.
    */
   public void registerDirectory(HdfsDirectory dir) {
-    logger.info("Registering direcotry {} for locality metrics.", dir.getHdfsDirPath().toString());
+    log.info("Registering direcotry {} for locality metrics.", dir.getHdfsDirPath().toString());
     cache.put(dir, new ConcurrentHashMap<FileStatus, BlockLocation[]>());
   }
 
@@ -181,7 +181,7 @@ public class HdfsLocalityReporter implements SolrInfoBean, SolrMetricProducer {
     FileStatus[] statuses = fs.listStatus(dir.getHdfsDirPath());
     List<FileStatus> statusList = Arrays.asList(statuses);
 
-    logger.debug("Updating locality information for: {}", statusList);
+    log.debug("Updating locality information for: {}", statusList);
 
     // Keep only the files that still exist
     cachedStatuses.retainAll(statusList);


[40/48] lucene-solr:jira/http2: SOLR-12617: remove beanutils license and notice files

Posted by da...@apache.org.
SOLR-12617: remove beanutils license and notice files


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/0b59b0ed
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/0b59b0ed
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/0b59b0ed

Branch: refs/heads/jira/http2
Commit: 0b59b0ed1da4919a7ccd87dd2cbac1148ea64ff9
Parents: e3cdb39
Author: Varun Thacker <va...@apache.org>
Authored: Fri Aug 3 13:38:10 2018 -0700
Committer: Varun Thacker <va...@apache.org>
Committed: Fri Aug 3 13:38:10 2018 -0700

----------------------------------------------------------------------
 solr/licenses/commons-beanutils-LICENSE-ASL.txt | 202 -------------------
 solr/licenses/commons-beanutils-NOTICE.txt      |   2 -
 2 files changed, 204 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0b59b0ed/solr/licenses/commons-beanutils-LICENSE-ASL.txt
----------------------------------------------------------------------
diff --git a/solr/licenses/commons-beanutils-LICENSE-ASL.txt b/solr/licenses/commons-beanutils-LICENSE-ASL.txt
deleted file mode 100644
index d645695..0000000
--- a/solr/licenses/commons-beanutils-LICENSE-ASL.txt
+++ /dev/null
@@ -1,202 +0,0 @@
-
-                                 Apache License
-                           Version 2.0, January 2004
-                        http://www.apache.org/licenses/
-
-   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-   1. Definitions.
-
-      "License" shall mean the terms and conditions for use, reproduction,
-      and distribution as defined by Sections 1 through 9 of this document.
-
-      "Licensor" shall mean the copyright owner or entity authorized by
-      the copyright owner that is granting the License.
-
-      "Legal Entity" shall mean the union of the acting entity and all
-      other entities that control, are controlled by, or are under common
-      control with that entity. For the purposes of this definition,
-      "control" means (i) the power, direct or indirect, to cause the
-      direction or management of such entity, whether by contract or
-      otherwise, or (ii) ownership of fifty percent (50%) or more of the
-      outstanding shares, or (iii) beneficial ownership of such entity.
-
-      "You" (or "Your") shall mean an individual or Legal Entity
-      exercising permissions granted by this License.
-
-      "Source" form shall mean the preferred form for making modifications,
-      including but not limited to software source code, documentation
-      source, and configuration files.
-
-      "Object" form shall mean any form resulting from mechanical
-      transformation or translation of a Source form, including but
-      not limited to compiled object code, generated documentation,
-      and conversions to other media types.
-
-      "Work" shall mean the work of authorship, whether in Source or
-      Object form, made available under the License, as indicated by a
-      copyright notice that is included in or attached to the work
-      (an example is provided in the Appendix below).
-
-      "Derivative Works" shall mean any work, whether in Source or Object
-      form, that is based on (or derived from) the Work and for which the
-      editorial revisions, annotations, elaborations, or other modifications
-      represent, as a whole, an original work of authorship. For the purposes
-      of this License, Derivative Works shall not include works that remain
-      separable from, or merely link (or bind by name) to the interfaces of,
-      the Work and Derivative Works thereof.
-
-      "Contribution" shall mean any work of authorship, including
-      the original version of the Work and any modifications or additions
-      to that Work or Derivative Works thereof, that is intentionally
-      submitted to Licensor for inclusion in the Work by the copyright owner
-      or by an individual or Legal Entity authorized to submit on behalf of
-      the copyright owner. For the purposes of this definition, "submitted"
-      means any form of electronic, verbal, or written communication sent
-      to the Licensor or its representatives, including but not limited to
-      communication on electronic mailing lists, source code control systems,
-      and issue tracking systems that are managed by, or on behalf of, the
-      Licensor for the purpose of discussing and improving the Work, but
-      excluding communication that is conspicuously marked or otherwise
-      designated in writing by the copyright owner as "Not a Contribution."
-
-      "Contributor" shall mean Licensor and any individual or Legal Entity
-      on behalf of whom a Contribution has been received by Licensor and
-      subsequently incorporated within the Work.
-
-   2. Grant of Copyright License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      copyright license to reproduce, prepare Derivative Works of,
-      publicly display, publicly perform, sublicense, and distribute the
-      Work and such Derivative Works in Source or Object form.
-
-   3. Grant of Patent License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      (except as stated in this section) patent license to make, have made,
-      use, offer to sell, sell, import, and otherwise transfer the Work,
-      where such license applies only to those patent claims licensable
-      by such Contributor that are necessarily infringed by their
-      Contribution(s) alone or by combination of their Contribution(s)
-      with the Work to which such Contribution(s) was submitted. If You
-      institute patent litigation against any entity (including a
-      cross-claim or counterclaim in a lawsuit) alleging that the Work
-      or a Contribution incorporated within the Work constitutes direct
-      or contributory patent infringement, then any patent licenses
-      granted to You under this License for that Work shall terminate
-      as of the date such litigation is filed.
-
-   4. Redistribution. You may reproduce and distribute copies of the
-      Work or Derivative Works thereof in any medium, with or without
-      modifications, and in Source or Object form, provided that You
-      meet the following conditions:
-
-      (a) You must give any other recipients of the Work or
-          Derivative Works a copy of this License; and
-
-      (b) You must cause any modified files to carry prominent notices
-          stating that You changed the files; and
-
-      (c) You must retain, in the Source form of any Derivative Works
-          that You distribute, all copyright, patent, trademark, and
-          attribution notices from the Source form of the Work,
-          excluding those notices that do not pertain to any part of
-          the Derivative Works; and
-
-      (d) If the Work includes a "NOTICE" text file as part of its
-          distribution, then any Derivative Works that You distribute must
-          include a readable copy of the attribution notices contained
-          within such NOTICE file, excluding those notices that do not
-          pertain to any part of the Derivative Works, in at least one
-          of the following places: within a NOTICE text file distributed
-          as part of the Derivative Works; within the Source form or
-          documentation, if provided along with the Derivative Works; or,
-          within a display generated by the Derivative Works, if and
-          wherever such third-party notices normally appear. The contents
-          of the NOTICE file are for informational purposes only and
-          do not modify the License. You may add Your own attribution
-          notices within Derivative Works that You distribute, alongside
-          or as an addendum to the NOTICE text from the Work, provided
-          that such additional attribution notices cannot be construed
-          as modifying the License.
-
-      You may add Your own copyright statement to Your modifications and
-      may provide additional or different license terms and conditions
-      for use, reproduction, or distribution of Your modifications, or
-      for any such Derivative Works as a whole, provided Your use,
-      reproduction, and distribution of the Work otherwise complies with
-      the conditions stated in this License.
-
-   5. Submission of Contributions. Unless You explicitly state otherwise,
-      any Contribution intentionally submitted for inclusion in the Work
-      by You to the Licensor shall be under the terms and conditions of
-      this License, without any additional terms or conditions.
-      Notwithstanding the above, nothing herein shall supersede or modify
-      the terms of any separate license agreement you may have executed
-      with Licensor regarding such Contributions.
-
-   6. Trademarks. This License does not grant permission to use the trade
-      names, trademarks, service marks, or product names of the Licensor,
-      except as required for reasonable and customary use in describing the
-      origin of the Work and reproducing the content of the NOTICE file.
-
-   7. Disclaimer of Warranty. Unless required by applicable law or
-      agreed to in writing, Licensor provides the Work (and each
-      Contributor provides its Contributions) on an "AS IS" BASIS,
-      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-      implied, including, without limitation, any warranties or conditions
-      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
-      PARTICULAR PURPOSE. You are solely responsible for determining the
-      appropriateness of using or redistributing the Work and assume any
-      risks associated with Your exercise of permissions under this License.
-
-   8. Limitation of Liability. In no event and under no legal theory,
-      whether in tort (including negligence), contract, or otherwise,
-      unless required by applicable law (such as deliberate and grossly
-      negligent acts) or agreed to in writing, shall any Contributor be
-      liable to You for damages, including any direct, indirect, special,
-      incidental, or consequential damages of any character arising as a
-      result of this License or out of the use or inability to use the
-      Work (including but not limited to damages for loss of goodwill,
-      work stoppage, computer failure or malfunction, or any and all
-      other commercial damages or losses), even if such Contributor
-      has been advised of the possibility of such damages.
-
-   9. Accepting Warranty or Additional Liability. While redistributing
-      the Work or Derivative Works thereof, You may choose to offer,
-      and charge a fee for, acceptance of support, warranty, indemnity,
-      or other liability obligations and/or rights consistent with this
-      License. However, in accepting such obligations, You may act only
-      on Your own behalf and on Your sole responsibility, not on behalf
-      of any other Contributor, and only if You agree to indemnify,
-      defend, and hold each Contributor harmless for any liability
-      incurred by, or claims asserted against, such Contributor by reason
-      of your accepting any such warranty or additional liability.
-
-   END OF TERMS AND CONDITIONS
-
-   APPENDIX: How to apply the Apache License to your work.
-
-      To apply the Apache License to your work, attach the following
-      boilerplate notice, with the fields enclosed by brackets "[]"
-      replaced with your own identifying information. (Don't include
-      the brackets!)  The text should be enclosed in the appropriate
-      comment syntax for the file format. We also recommend that a
-      file or class name and description of purpose be included on the
-      same "printed page" as the copyright notice for easier
-      identification within third-party archives.
-
-   Copyright [yyyy] [name of copyright owner]
-
-   Licensed under the Apache License, Version 2.0 (the "License");
-   you may not use this file except in compliance with the License.
-   You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0b59b0ed/solr/licenses/commons-beanutils-NOTICE.txt
----------------------------------------------------------------------
diff --git a/solr/licenses/commons-beanutils-NOTICE.txt b/solr/licenses/commons-beanutils-NOTICE.txt
deleted file mode 100644
index 3f59805..0000000
--- a/solr/licenses/commons-beanutils-NOTICE.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-This product includes software developed by
-The Apache Software Foundation (http://www.apache.org/).


[07/48] lucene-solr:jira/http2: SOLR-12448: Fix outdated docs that say autoAddReplicas is for shared FS only in Ref Guide and v2 API specs

Posted by da...@apache.org.
SOLR-12448: Fix outdated docs that say autoAddReplicas is for shared FS only in Ref Guide and v2 API specs


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/ecad9198
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/ecad9198
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/ecad9198

Branch: refs/heads/jira/http2
Commit: ecad9198d83b9454aeb140d59a769196b9a913e0
Parents: 6fbaf69
Author: Cassandra Targett <ct...@apache.org>
Authored: Tue Jul 31 12:02:58 2018 -0500
Committer: Cassandra Targett <ct...@apache.org>
Committed: Tue Jul 31 12:25:45 2018 -0500

----------------------------------------------------------------------
 solr/solr-ref-guide/src/collections-api.adoc                 | 8 ++++++--
 solr/solr-ref-guide/src/running-solr-on-hdfs.adoc            | 2 +-
 solr/solrj/src/resources/apispec/collections.Commands.json   | 3 ++-
 .../apispec/collections.collection.Commands.modify.json      | 4 +++-
 4 files changed, 12 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ecad9198/solr/solr-ref-guide/src/collections-api.adoc
----------------------------------------------------------------------
diff --git a/solr/solr-ref-guide/src/collections-api.adoc b/solr/solr-ref-guide/src/collections-api.adoc
index 527f4c7..b94509d 100644
--- a/solr/solr-ref-guide/src/collections-api.adoc
+++ b/solr/solr-ref-guide/src/collections-api.adoc
@@ -52,7 +52,9 @@ The number of shards to be created as part of the collection. This is a required
 A comma separated list of shard names, e.g., `shard-x,shard-y,shard-z`. This is a required parameter when the `router.name` is `implicit`.
 
 `replicationFactor`::
-The number of replicas to be created for each shard. The default is `1`. This will create a NRT type of replica. If you want another type of replica, see the `tlogReplicas` and `pullReplica` parameters. See the section <<shards-and-indexing-data-in-solrcloud.adoc#types-of-replicas,Types of Replicas>> for more information about replica types.
+The number of replicas to be created for each shard. The default is `1`.
++
+This will create a NRT type of replica. If you want another type of replica, see the `tlogReplicas` and `pullReplica` parameters below. See the section <<shards-and-indexing-data-in-solrcloud.adoc#types-of-replicas,Types of Replicas>> for more information about replica types.
 
 `nrtReplicas`::
 The number of NRT (Near-Real-Time) replicas to create for this collection. This type of replica maintains a transaction log and updates its index locally. If you want all of your replicas to be of this type, you can simply use `replicationFactor` instead.
@@ -96,7 +98,9 @@ Please note that <<realtime-get.adoc#realtime-get,RealTime Get>> or retrieval by
 Set core property _name_ to _value_. See the section <<defining-core-properties.adoc#defining-core-properties,Defining core.properties>> for details on supported properties and values.
 
 `autoAddReplicas`::
-When set to `true`, enables automatic addition of replicas on shared file systems (such as HDFS) only. See the section <<running-solr-on-hdfs.adoc#automatically-add-replicas-in-solrcloud,autoAddReplicas Settings>> for more details on settings and overrides. The default is `false`.
+When set to `true`, enables automatic addition of replicas when the number of active replicas falls below the value set for `replicationFactor`. This may occur if a replica goes down, for example. The default is `false`, which means new replicas will not be added.
++
+While this parameter is provided as part of Solr's set of features to provide autoscaling of clusters, it is available even when you have not implemented any other part of autoscaling (such as a policy). See the section <<solrcloud-autoscaling-auto-add-replicas.adoc#the-autoaddreplicas-parameter,SolrCloud Autoscaling Automatically Adding Replicas>> for more details about this option and how it can be used.
 
 `async`::
 Request ID to track this action which will be <<Asynchronous Calls,processed asynchronously>>.

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ecad9198/solr/solr-ref-guide/src/running-solr-on-hdfs.adoc
----------------------------------------------------------------------
diff --git a/solr/solr-ref-guide/src/running-solr-on-hdfs.adoc b/solr/solr-ref-guide/src/running-solr-on-hdfs.adoc
index c468058..92c32b7 100644
--- a/solr/solr-ref-guide/src/running-solr-on-hdfs.adoc
+++ b/solr/solr-ref-guide/src/running-solr-on-hdfs.adoc
@@ -181,7 +181,7 @@ If using Kerberos, you will need to add the three Kerberos related properties to
 
 == Automatically Add Replicas in SolrCloud
 
-The ability to automatically add new replicas when the Overseer notices that a shard has gone down was previously only available to users running Solr in HDFS, but it is now available to all users via Solr's autoscaling framework. See the section <<solrcloud-autoscaling-triggers.adoc#auto-add-replicas-trigger,Auto Add Replicas Trigger>> for details on how to enable and disable this feature.
+The ability to automatically add new replicas when the Overseer notices that a shard has gone down was previously only available to users running Solr in HDFS, but it is now available to all users via Solr's autoscaling framework. See the section <<solrcloud-autoscaling-auto-add-replicas.adoc#the-autoaddreplicas-parameter,SolrCloud Autoscaling Automatically Adding Replicas>> for details on how to enable and disable this feature.
 
 [WARNING]
 ====

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ecad9198/solr/solrj/src/resources/apispec/collections.Commands.json
----------------------------------------------------------------------
diff --git a/solr/solrj/src/resources/apispec/collections.Commands.json b/solr/solrj/src/resources/apispec/collections.Commands.json
index 0268227..ed55a1e 100644
--- a/solr/solrj/src/resources/apispec/collections.Commands.json
+++ b/solr/solrj/src/resources/apispec/collections.Commands.json
@@ -82,7 +82,8 @@
         },
         "autoAddReplicas": {
           "type": "boolean",
-          "description": "When set to true, enables auto addition of replicas on shared file systems (such as HDFS). See https://lucene.apache.org/solr/guide/running-solr-on-hdfs.html for more details on settings and overrides.",
+          "description": "When set to true, enables auto addition of replicas when the number of active replicas falls below the value set for replicationFactor.",
+          "documentation": "https://lucene.apache.org/solr/guide/solrcloud-autoscaling-auto-add-replicas.html",
           "default": "false"
         },
         "rule": {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ecad9198/solr/solrj/src/resources/apispec/collections.collection.Commands.modify.json
----------------------------------------------------------------------
diff --git a/solr/solrj/src/resources/apispec/collections.collection.Commands.modify.json b/solr/solrj/src/resources/apispec/collections.collection.Commands.modify.json
index 658b4eb..62e35cc 100644
--- a/solr/solrj/src/resources/apispec/collections.collection.Commands.modify.json
+++ b/solr/solrj/src/resources/apispec/collections.collection.Commands.modify.json
@@ -26,7 +26,9 @@
     },
     "autoAddReplicas": {
       "type": "boolean",
-      "description": "When set to true, enables auto addition of replicas on shared file systems (such as HDFS). See https://lucene.apache.org/solr/guide/running-solr-on-hdfs.html for more details on settings and overrides."
+      "description": "When set to true, enables auto addition of replicas when the number of active replicas falls below the value set for replicationFactor.",
+      "documentation": "https://lucene.apache.org/solr/guide/solrcloud-autoscaling-auto-add-replicas.html",
+      "default": "false"
     },
     "replicationFactor": {
       "type": "integer",


[18/48] lucene-solr:jira/http2: Make the nightly test smaller so that it does not fail with GC overhead exceeded (OOM). Clean up random number fetching to make it shorter.

Posted by da...@apache.org.
Make the nightly test smaller so that it does not fail with GC overhead exceeded (OOM). Clean up random number fetching to make it shorter.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/3203e99d
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/3203e99d
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/3203e99d

Branch: refs/heads/jira/http2
Commit: 3203e99d8fbcaac3458fcf882d4ec229f97dfa43
Parents: 5dffff7
Author: Dawid Weiss <dw...@apache.org>
Authored: Wed Aug 1 13:49:39 2018 +0200
Committer: Dawid Weiss <dw...@apache.org>
Committed: Wed Aug 1 14:05:02 2018 +0200

----------------------------------------------------------------------
 .../lucene/document/TestLatLonShapeQueries.java | 15 +++--
 .../java/org/apache/lucene/geo/GeoTestUtil.java | 70 ++++++++++----------
 2 files changed, 43 insertions(+), 42 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/3203e99d/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonShapeQueries.java
----------------------------------------------------------------------
diff --git a/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonShapeQueries.java b/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonShapeQueries.java
index 03941b9..21d4e83 100644
--- a/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonShapeQueries.java
+++ b/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonShapeQueries.java
@@ -45,6 +45,8 @@ import org.apache.lucene.util.FixedBitSet;
 import org.apache.lucene.util.IOUtils;
 import org.apache.lucene.util.LuceneTestCase;
 
+import static com.carrotsearch.randomizedtesting.RandomizedTest.randomBoolean;
+import static com.carrotsearch.randomizedtesting.RandomizedTest.randomInt;
 import static org.apache.lucene.geo.GeoEncodingUtils.decodeLatitude;
 import static org.apache.lucene.geo.GeoEncodingUtils.decodeLongitude;
 import static org.apache.lucene.geo.GeoEncodingUtils.encodeLatitude;
@@ -104,7 +106,7 @@ public class TestLatLonShapeQueries extends LuceneTestCase {
 
   @Nightly
   public void testRandomBig() throws Exception {
-    doTestRandom(200000);
+    doTestRandom(50000);
   }
 
   private void doTestRandom(int count) throws Exception {
@@ -116,7 +118,7 @@ public class TestLatLonShapeQueries extends LuceneTestCase {
 
     Polygon[] polygons = new Polygon[numPolygons];
     for (int id = 0; id < numPolygons; ++id) {
-      int x = random().nextInt(20);
+      int x = randomInt(20);
       if (x == 17) {
         polygons[id] = null;
         if (VERBOSE) {
@@ -127,6 +129,7 @@ public class TestLatLonShapeQueries extends LuceneTestCase {
         polygons[id] = GeoTestUtil.nextPolygon();
       }
     }
+
     verify(polygons);
   }
 
@@ -173,8 +176,8 @@ public class TestLatLonShapeQueries extends LuceneTestCase {
         poly2D[id] = Polygon2D.create(quantizePolygon(polygons[id]));
       }
       w.addDocument(doc);
-      if (id > 0 && random().nextInt(100) == 42) {
-        int idToDelete = random().nextInt(id);
+      if (id > 0 && randomInt(100) == 42) {
+        int idToDelete = randomInt(id);
         w.deleteDocuments(new Term("id", ""+idToDelete));
         deleted.add(idToDelete);
         if (VERBOSE) {
@@ -183,7 +186,7 @@ public class TestLatLonShapeQueries extends LuceneTestCase {
       }
     }
 
-    if (random().nextBoolean()) {
+    if (randomBoolean()) {
       w.forceMerge(1);
     }
     final IndexReader r = DirectoryReader.open(w);
@@ -198,7 +201,7 @@ public class TestLatLonShapeQueries extends LuceneTestCase {
 
     for (int iter = 0; iter < iters; ++iter) {
       if (VERBOSE) {
-        System.out.println("\nTEST: iter=" + (iter+1) + " of " + iters + " s=" + s);
+        System.out.println("\nTEST: iter=" + (iter + 1) + " of " + iters + " s=" + s);
       }
 
       // BBox

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/3203e99d/lucene/test-framework/src/java/org/apache/lucene/geo/GeoTestUtil.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/geo/GeoTestUtil.java b/lucene/test-framework/src/java/org/apache/lucene/geo/GeoTestUtil.java
index fe81fd6..8817d20 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/geo/GeoTestUtil.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/geo/GeoTestUtil.java
@@ -19,13 +19,16 @@ package org.apache.lucene.geo;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
-import java.util.Random;
 
 import org.apache.lucene.util.NumericUtils;
 import org.apache.lucene.util.SloppyMath;
 import org.apache.lucene.util.TestUtil;
 
-import com.carrotsearch.randomizedtesting.RandomizedContext;
+import static com.carrotsearch.randomizedtesting.RandomizedTest.randomBoolean;
+import static com.carrotsearch.randomizedtesting.RandomizedTest.randomDouble;
+import static com.carrotsearch.randomizedtesting.RandomizedTest.randomInt;
+import static com.carrotsearch.randomizedtesting.RandomizedTest.randomIntBetween;
+import static org.apache.lucene.util.LuceneTestCase.random;
 
 /** static methods for testing geo */
 public class GeoTestUtil {
@@ -63,7 +66,7 @@ public class GeoTestUtil {
 
     // first pick a base value.
     final double baseValue;
-    int surpriseMe = random().nextInt(17);
+    int surpriseMe = randomInt(17);
     if (surpriseMe == 0) {
       // random bits
       long lowBits = NumericUtils.doubleToSortableLong(low);
@@ -81,18 +84,18 @@ public class GeoTestUtil {
     } else if (surpriseMe == 4) {
       // divide up space into block of 360
       double delta = (high - low) / 360;
-      int block = random().nextInt(360);
+      int block = randomInt(360);
       baseValue = low + delta * block;
     } else {
       // distributed ~ evenly
-      baseValue = low + (high - low) * random().nextDouble();
+      baseValue = low + (high - low) * randomDouble();
     }
 
     assert baseValue >= low;
     assert baseValue <= high;
 
     // either return the base value or adjust it by 1 ulp in a random direction (if possible)
-    int adjustMe = random().nextInt(17);
+    int adjustMe = randomInt(17);
     if (adjustMe == 0) {
       return Math.nextAfter(adjustMe, high);
     } else if (adjustMe == 1) {
@@ -106,7 +109,7 @@ public class GeoTestUtil {
   private static double nextLatitudeNear(double otherLatitude, double delta) {
     delta = Math.abs(delta);
     GeoUtils.checkLatitude(otherLatitude);
-    int surpriseMe = random().nextInt(97);
+    int surpriseMe = randomInt(97);
     if (surpriseMe == 0) {
       // purely random
       return nextLatitude();
@@ -123,7 +126,7 @@ public class GeoTestUtil {
   private static double nextLongitudeNear(double otherLongitude, double delta) {
     delta = Math.abs(delta);
     GeoUtils.checkLongitude(otherLongitude);
-    int surpriseMe = random().nextInt(97);
+    int surpriseMe = randomInt(97);
     if (surpriseMe == 0) {
       // purely random
       return nextLongitude();
@@ -145,7 +148,7 @@ public class GeoTestUtil {
     assert maxLatitude >= minLatitude;
     GeoUtils.checkLatitude(minLatitude);
     GeoUtils.checkLatitude(maxLatitude);
-    if (random().nextInt(47) == 0) {
+    if (randomInt(47) == 0) {
       // purely random
       return nextLatitude();
     } else {
@@ -166,7 +169,7 @@ public class GeoTestUtil {
     assert maxLongitude >= minLongitude;
     GeoUtils.checkLongitude(minLongitude);
     GeoUtils.checkLongitude(maxLongitude);
-    if (random().nextInt(47) == 0) {
+    if (randomInt(47) == 0) {
       // purely random
       return nextLongitude();
     } else {
@@ -211,7 +214,7 @@ public class GeoTestUtil {
   public static double[] nextPointNear(Rectangle rectangle) {
     if (rectangle.crossesDateline()) {
       // pick a "side" of the two boxes we really are
-      if (random().nextBoolean()) {
+      if (randomBoolean()) {
         return nextPointNear(new Rectangle(rectangle.minLat, rectangle.maxLat, -180, rectangle.maxLon));
       } else {
         return nextPointNear(new Rectangle(rectangle.minLat, rectangle.maxLat, rectangle.minLon, 180));
@@ -229,11 +232,11 @@ public class GeoTestUtil {
     Polygon holes[] = polygon.getHoles();
 
     // if there are any holes, target them aggressively
-    if (holes.length > 0 && random().nextInt(3) == 0) {
-      return nextPointNear(holes[random().nextInt(holes.length)]);
+    if (holes.length > 0 && randomInt(3) == 0) {
+      return nextPointNear(holes[randomInt(holes.length)]);
     }
 
-    int surpriseMe = random().nextInt(97);
+    int surpriseMe = randomInt(97);
     if (surpriseMe == 0) {
       // purely random
       return new double[] { nextLatitude(), nextLongitude() };
@@ -242,7 +245,7 @@ public class GeoTestUtil {
       return new double[] { nextLatitudeBetween(polygon.minLat, polygon.maxLat), nextLongitudeBetween(polygon.minLon, polygon.maxLon) };
     } else if (surpriseMe < 20) {
       // target a vertex
-      int vertex = random().nextInt(polyLats.length - 1);
+      int vertex = randomInt(polyLats.length - 1);
       return new double[] { nextLatitudeNear(polyLats[vertex], polyLats[vertex+1] - polyLats[vertex]), 
                             nextLongitudeNear(polyLons[vertex], polyLons[vertex+1] - polyLons[vertex]) };
     } else if (surpriseMe < 30) {
@@ -250,14 +253,14 @@ public class GeoTestUtil {
       Polygon container = boxPolygon(new Rectangle(polygon.minLat, polygon.maxLat, polygon.minLon, polygon.maxLon));
       double containerLats[] = container.getPolyLats();
       double containerLons[] = container.getPolyLons();
-      int startVertex = random().nextInt(containerLats.length - 1);
+      int startVertex = randomInt(containerLats.length - 1);
       return nextPointAroundLine(containerLats[startVertex], containerLons[startVertex], 
                                  containerLats[startVertex+1], containerLons[startVertex+1]);
     } else {
       // target points around diagonals between vertices
-      int startVertex = random().nextInt(polyLats.length - 1);
+      int startVertex = randomInt(polyLats.length - 1);
       // but favor edges heavily
-      int endVertex = random().nextBoolean() ? startVertex + 1 : random().nextInt(polyLats.length - 1);
+      int endVertex = randomBoolean() ? startVertex + 1 : randomInt(polyLats.length - 1);
       return nextPointAroundLine(polyLats[startVertex], polyLons[startVertex], 
                                  polyLats[endVertex],   polyLons[endVertex]);
     }
@@ -270,11 +273,11 @@ public class GeoTestUtil {
     
     // if there are any holes, target them aggressively
     Polygon holes[] = polygon.getHoles();
-    if (holes.length > 0 && random().nextInt(3) == 0) {
-      return nextBoxNear(holes[random().nextInt(holes.length)]);
+    if (holes.length > 0 && randomInt(3) == 0) {
+      return nextBoxNear(holes[randomInt(holes.length)]);
     }
     
-    int surpriseMe = random().nextInt(97);
+    int surpriseMe = randomInt(97);
     if (surpriseMe == 0) {
       // formed from two interesting points
       point1 = nextPointNear(polygon);
@@ -286,7 +289,7 @@ public class GeoTestUtil {
       // now figure out a good delta: we use a rough heuristic, up to the length of an edge
       double polyLats[] = polygon.getPolyLats();
       double polyLons[] = polygon.getPolyLons();
-      int vertex = random().nextInt(polyLats.length - 1);
+      int vertex = randomInt(polyLats.length - 1);
       double deltaX = polyLons[vertex+1] - polyLons[vertex];
       double deltaY = polyLats[vertex+1] - polyLats[vertex];
       double edgeLength = Math.sqrt(deltaX * deltaX + deltaY * deltaY);
@@ -386,14 +389,14 @@ public class GeoTestUtil {
 
   /** returns next pseudorandom polygon */
   public static Polygon nextPolygon() {
-    if (random().nextBoolean()) {
+    if (randomBoolean()) {
       return surpriseMePolygon();
-    } else if (random().nextInt(10) == 1) {
+    } else if (randomInt(10) == 1) {
       // this poly is slow to create ... only do it 10% of the time:
       while (true) {
-        int gons = TestUtil.nextInt(random(), 4, 500);
+        int gons = randomIntBetween(4, 500);
         // So the poly can cover at most 50% of the earth's surface:
-        double radiusMeters = random().nextDouble() * GeoUtils.EARTH_MEAN_RADIUS_METERS * Math.PI / 2.0 + 1.0;
+        double radiusMeters = randomDouble() * GeoUtils.EARTH_MEAN_RADIUS_METERS * Math.PI / 2.0 + 1.0;
         try {
           return createRegularPolygon(nextLatitude(), nextLongitude(), radiusMeters, gons);
         } catch (IllegalArgumentException iae) {
@@ -403,7 +406,7 @@ public class GeoTestUtil {
     }
 
     Rectangle box = nextBoxInternal(false);
-    if (random().nextBoolean()) {
+    if (randomBoolean()) {
       // box
       return boxPolygon(box);
     } else {
@@ -480,19 +483,19 @@ public class GeoTestUtil {
       //System.out.println("\nPOLY ITER");
       double centerLat = nextLatitude();
       double centerLon = nextLongitude();
-      double radius = 0.1 + 20 * random().nextDouble();
-      double radiusDelta = random().nextDouble();
+      double radius = 0.1 + 20 * randomDouble();
+      double radiusDelta = randomDouble();
 
       ArrayList<Double> lats = new ArrayList<>();
       ArrayList<Double> lons = new ArrayList<>();
       double angle = 0.0;
       while (true) {
-        angle += random().nextDouble()*40.0;
+        angle += randomDouble() * 40.0;
         //System.out.println("  angle " + angle);
         if (angle > 360) {
           break;
         }
-        double len = radius * (1.0 - radiusDelta + radiusDelta * random().nextDouble());
+        double len = radius * (1.0 - radiusDelta + radiusDelta * randomDouble());
         //System.out.println("    len=" + len);
         double lat = centerLat + len * Math.cos(SloppyMath.toRadians(angle));
         double lon = centerLon + len * Math.sin(SloppyMath.toRadians(angle));
@@ -521,11 +524,6 @@ public class GeoTestUtil {
     }
   }
 
-  /** Keep it simple, we don't need to take arbitrary Random for geo tests */
-  private static Random random() {
-   return RandomizedContext.current().getRandom();
-  }
-
   /** 
    * Returns svg of polygon for debugging. 
    * <p>


[08/48] lucene-solr:jira/http2: SOLR-11870: Ref Guide: Add docs on filter param for ICU filters

Posted by da...@apache.org.
SOLR-11870: Ref Guide: Add docs on filter param for ICU filters


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/13960594
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/13960594
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/13960594

Branch: refs/heads/jira/http2
Commit: 13960594e4785520a4cc674c7fe4f00df4712b9b
Parents: ecad919
Author: Cassandra Targett <ct...@apache.org>
Authored: Tue Jul 31 13:17:14 2018 -0500
Committer: Cassandra Targett <ct...@apache.org>
Committed: Tue Jul 31 13:17:14 2018 -0500

----------------------------------------------------------------------
 .../solr-ref-guide/src/filter-descriptions.adoc | 50 +++++++++++++++-----
 1 file changed, 37 insertions(+), 13 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/13960594/solr/solr-ref-guide/src/filter-descriptions.adoc
----------------------------------------------------------------------
diff --git a/solr/solr-ref-guide/src/filter-descriptions.adoc b/solr/solr-ref-guide/src/filter-descriptions.adoc
index 95e83b6..f517901 100644
--- a/solr/solr-ref-guide/src/filter-descriptions.adoc
+++ b/solr/solr-ref-guide/src/filter-descriptions.adoc
@@ -469,15 +469,17 @@ Note that for this filter to work properly, the upstream tokenizer must not remo
 
 == ICU Folding Filter
 
-This filter is a custom Unicode normalization form that applies the foldings specified in http://www.unicode.org/reports/tr30/tr30-4.html[Unicode Technical Report 30] in addition to the `NFKC_Casefold` normalization form as described in <<ICU Normalizer 2 Filter>>. This filter is a better substitute for the combined behavior of the <<ASCII Folding Filter>>, <<Lower Case Filter>>, and <<ICU Normalizer 2 Filter>>.
+This filter is a custom Unicode normalization form that applies the foldings specified in http://www.unicode.org/reports/tr30/tr30-4.html[Unicode TR #30: Character Foldings] in addition to the `NFKC_Casefold` normalization form as described in <<ICU Normalizer 2 Filter>>. This filter is a better substitute for the combined behavior of the <<ASCII Folding Filter>>, <<Lower Case Filter>>, and <<ICU Normalizer 2 Filter>>.
 
 To use this filter, see `solr/contrib/analysis-extras/README.txt` for instructions on which jars you need to add to your `solr_home/lib`. For more information about adding jars, see the section <<lib-directives-in-solrconfig.adoc#lib-directives-in-solrconfig,Lib Directives in Solrconfig>>.
 
 *Factory class:* `solr.ICUFoldingFilterFactory`
 
-*Arguments:* None
+*Arguments:*
 
-*Example:*
+`filter`:: (string, optional) A Unicode set filter that can be used to e.g., exclude a set of characters from being processed. See the http://icu-project.org/apiref/icu4j/com/ibm/icu/text/UnicodeSet.html[UnicodeSet javadocs] for more information.
+
+*Example without a filter:*
 
 [source,xml]
 ----
@@ -487,27 +489,39 @@ To use this filter, see `solr/contrib/analysis-extras/README.txt` for instructio
 </analyzer>
 ----
 
-For detailed information on this normalization form, see http://www.unicode.org/reports/tr30/tr30-4.html.
+*Example with a filter to exclude Swedish/Finnish characters:*
+
+[source,xml]
+----
+<analyzer>
+  <tokenizer class="solr.StandardTokenizerFactory"/>
+  <filter class="solr.ICUFoldingFilterFactory" filter="[^åäöÅÄÖ]"/>
+</analyzer>
+----
+
+For detailed information on this normalization form, see http://www.unicode.org/reports/tr30/tr30-4.html[Unicode TR #30: Character Foldings].
 
 == ICU Normalizer 2 Filter
 
 This filter factory normalizes text according to one of five Unicode Normalization Forms as described in http://unicode.org/reports/tr15/[Unicode Standard Annex #15]:
 
-* NFC: (name="nfc" mode="compose") Normalization Form C, canonical decomposition
-* NFD: (name="nfc" mode="decompose") Normalization Form D, canonical decomposition, followed by canonical composition
-* NFKC: (name="nfkc" mode="compose") Normalization Form KC, compatibility decomposition
-* NFKD: (name="nfkc" mode="decompose") Normalization Form KD, compatibility decomposition, followed by canonical composition
-* NFKC_Casefold: (name="nfkc_cf" mode="compose") Normalization Form KC, with additional Unicode case folding. Using the ICU Normalizer 2 Filter is a better-performing substitution for the <<Lower Case Filter>> and NFKC normalization.
+* NFC: (`name="nfc" mode="compose"`) Normalization Form C, canonical decomposition
+* NFD: (`name="nfc" mode="decompose"`) Normalization Form D, canonical decomposition, followed by canonical composition
+* NFKC: (`name="nfkc" mode="compose"`) Normalization Form KC, compatibility decomposition
+* NFKD: (`name="nfkc" mode="decompose"`) Normalization Form KD, compatibility decomposition, followed by canonical composition
+* NFKC_Casefold: (`name="nfkc_cf" mode="compose"`) Normalization Form KC, with additional Unicode case folding. Using the ICU Normalizer 2 Filter is a better-performing substitution for the <<Lower Case Filter>> and NFKC normalization.
 
 *Factory class:* `solr.ICUNormalizer2FilterFactory`
 
 *Arguments:*
 
-`name`:: (string) The name of the normalization form; `nfc`, `nfd`, `nfkc`, `nfkd`, `nfkc_cf`
+`name`:: The name of the normalization form. Valid options are `nfc`, `nfd`, `nfkc`, `nfkd`, or `nfkc_cf` (the default). Required.
 
-`mode`:: (string) The mode of Unicode character composition and decomposition; `compose` or `decompose`
+`mode`:: The mode of Unicode character composition and decomposition. Valid options are: `compose` (the default) or `decompose`. Required.
 
-*Example:*
+`filter`:: A Unicode set filter that can be used to e.g., exclude a set of characters from being processed. See the http://icu-project.org/apiref/icu4j/com/ibm/icu/text/UnicodeSet.html[UnicodeSet javadocs] for more information. Optional.
+
+*Example with NFKC_Casefold:*
 
 [source,xml]
 ----
@@ -517,7 +531,17 @@ This filter factory normalizes text according to one of five Unicode Normalizati
 </analyzer>
 ----
 
-For detailed information about these Unicode Normalization Forms, see http://unicode.org/reports/tr15/.
+*Example with a filter to exclude Swedish/Finnish characters:*
+
+[source,xml]
+----
+<analyzer>
+  <tokenizer class="solr.StandardTokenizerFactory"/>
+  <filter class="solr.ICUNormalizer2FilterFactory" name="nfkc_cf" mode="compose" filter="[^åäöÅÄÖ]"/>
+</analyzer>
+----
+
+For detailed information about these normalization forms, see http://unicode.org/reports/tr15/[Unicode Normalization Forms].
 
 To use this filter, see `solr/contrib/analysis-extras/README.txt` for instructions on which jars you need to add to your `solr_home/lib`.
 


[24/48] lucene-solr:jira/http2: SOLR-12509: Improve SplitShardCmd performance and reliability.

Posted by da...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/1133bf98/solr/core/src/test/org/apache/solr/update/SolrIndexSplitterTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/update/SolrIndexSplitterTest.java b/solr/core/src/test/org/apache/solr/update/SolrIndexSplitterTest.java
index ae743da..79eccd9 100644
--- a/solr/core/src/test/org/apache/solr/update/SolrIndexSplitterTest.java
+++ b/solr/core/src/test/org/apache/solr/update/SolrIndexSplitterTest.java
@@ -34,9 +34,11 @@ import org.apache.solr.common.cloud.CompositeIdRouter;
 import org.apache.solr.common.cloud.DocRouter;
 import org.apache.solr.common.cloud.PlainIdRouter;
 import org.apache.solr.common.util.Hash;
+import org.apache.solr.common.util.NamedList;
 import org.apache.solr.core.DirectoryFactory;
 import org.apache.solr.core.SolrCore;
 import org.apache.solr.request.LocalSolrQueryRequest;
+import org.apache.solr.response.SolrQueryResponse;
 import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -45,13 +47,15 @@ import org.slf4j.LoggerFactory;
 
 public class SolrIndexSplitterTest extends SolrTestCaseJ4 {
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
-  
+
   File indexDir1 = null, indexDir2 = null, indexDir3 = null;
 
   @BeforeClass
   public static void beforeClass() throws Exception {
-    System.setProperty("enable.update.log", "false"); // schema12 doesn't support _version_
-    initCore("solrconfig.xml", "schema12.xml");
+   // System.setProperty("enable.update.log", "false"); // schema12 doesn't support _version_
+    System.setProperty("solr.directoryFactory", "solr.NRTCachingDirectoryFactory");
+    System.setProperty("solr.tests.lockType", DirectoryFactory.LOCK_TYPE_SIMPLE);
+    initCore("solrconfig.xml", "schema15.xml");
   }
 
   @Override
@@ -67,6 +71,15 @@ public class SolrIndexSplitterTest extends SolrTestCaseJ4 {
 
   @Test
   public void testSplitByPaths() throws Exception {
+    doTestSplitByPaths(SolrIndexSplitter.SplitMethod.REWRITE);
+  }
+
+  @Test
+  public void testSplitByPathsLink() throws Exception {
+    doTestSplitByPaths(SolrIndexSplitter.SplitMethod.LINK);
+  }
+
+  private void doTestSplitByPaths(SolrIndexSplitter.SplitMethod splitMethod) throws Exception {
     LocalSolrQueryRequest request = null;
     try {
       // add two docs
@@ -81,10 +94,10 @@ public class SolrIndexSplitterTest extends SolrTestCaseJ4 {
       List<DocRouter.Range> ranges = getRanges(id1, id2);
 
       request = lrf.makeRequest("q", "dummy");
-
-      SplitIndexCommand command = new SplitIndexCommand(request,
-          Lists.newArrayList(indexDir1.getAbsolutePath(), indexDir2.getAbsolutePath()), null, ranges, new PlainIdRouter(), null, null);
-      new SolrIndexSplitter(command).split();
+      SolrQueryResponse rsp = new SolrQueryResponse();
+      SplitIndexCommand command = new SplitIndexCommand(request, rsp,
+          Lists.newArrayList(indexDir1.getAbsolutePath(), indexDir2.getAbsolutePath()), null, ranges, new PlainIdRouter(), null, null, splitMethod);
+      doSplit(command);
 
       Directory directory = h.getCore().getDirectoryFactory().get(indexDir1.getAbsolutePath(),
           DirectoryFactory.DirContext.DEFAULT, h.getCore().getSolrConfig().indexConfig.lockType);
@@ -106,9 +119,23 @@ public class SolrIndexSplitterTest extends SolrTestCaseJ4 {
       if (request != null) request.close(); // decrefs the searcher
     }
   }
+
+  private void doSplit(SplitIndexCommand command) throws Exception {
+    NamedList<Object> results = new NamedList<>();
+    new SolrIndexSplitter(command).split(results);
+    command.rsp.addResponse(results);
+  }
   
   // SOLR-5144
   public void testSplitDeletes() throws Exception {
+    doTestSplitDeletes(SolrIndexSplitter.SplitMethod.REWRITE);
+  }
+
+  public void testSplitDeletesLink() throws Exception {
+    doTestSplitDeletes(SolrIndexSplitter.SplitMethod.LINK);
+  }
+
+  private void doTestSplitDeletes(SolrIndexSplitter.SplitMethod splitMethod) throws Exception {
     LocalSolrQueryRequest request = null;
     try {
       // add two docs
@@ -126,10 +153,11 @@ public class SolrIndexSplitterTest extends SolrTestCaseJ4 {
       List<DocRouter.Range> ranges = getRanges(id1, id2);
 
       request = lrf.makeRequest("q", "dummy");
+      SolrQueryResponse rsp = new SolrQueryResponse();
 
-      SplitIndexCommand command = new SplitIndexCommand(request,
-          Lists.newArrayList(indexDir1.getAbsolutePath(), indexDir2.getAbsolutePath()), null, ranges, new PlainIdRouter(), null, null);
-      new SolrIndexSplitter(command).split();
+      SplitIndexCommand command = new SplitIndexCommand(request, rsp,
+          Lists.newArrayList(indexDir1.getAbsolutePath(), indexDir2.getAbsolutePath()), null, ranges, new PlainIdRouter(), null, null, splitMethod);
+      doSplit(command);
 
       Directory directory = h.getCore().getDirectoryFactory().get(indexDir1.getAbsolutePath(),
           DirectoryFactory.DirContext.DEFAULT, h.getCore().getSolrConfig().indexConfig.lockType);
@@ -152,11 +180,25 @@ public class SolrIndexSplitterTest extends SolrTestCaseJ4 {
 
   @Test
   public void testSplitByCores() throws Exception {
-    // add two docs
+    doTestSplitByCores(SolrIndexSplitter.SplitMethod.REWRITE);
+  }
+
+  @Test
+  public void testSplitByCoresLink() throws Exception {
+    doTestSplitByCores(SolrIndexSplitter.SplitMethod.LINK);
+  }
+
+  private void doTestSplitByCores(SolrIndexSplitter.SplitMethod splitMethod) throws Exception {
+    // add three docs and 1 delete
     String id1 = "dorothy";
     assertU(adoc("id", id1));
     String id2 = "kansas";
     assertU(adoc("id", id2));
+    String id3 = "wizard";
+    assertU(adoc("id", id3));
+    assertU(commit());
+    assertJQ(req("q", "*:*"), "/response/numFound==3");
+    assertU(delI("wizard"));
     assertU(commit());
     assertJQ(req("q", "*:*"), "/response/numFound==2");
     List<DocRouter.Range> ranges = getRanges(id1, id2);
@@ -165,16 +207,17 @@ public class SolrIndexSplitterTest extends SolrTestCaseJ4 {
     try {
 
       core1 = h.getCoreContainer().create("split1",
-          ImmutableMap.of("dataDir", indexDir1.getAbsolutePath(), "configSet", "minimal"));
+          ImmutableMap.of("dataDir", indexDir1.getAbsolutePath(), "configSet", "cloud-minimal"));
       core2 = h.getCoreContainer().create("split2",
-          ImmutableMap.of("dataDir", indexDir2.getAbsolutePath(), "configSet", "minimal"));
+          ImmutableMap.of("dataDir", indexDir2.getAbsolutePath(), "configSet", "cloud-minimal"));
 
       LocalSolrQueryRequest request = null;
       try {
         request = lrf.makeRequest("q", "dummy");
-
-        SplitIndexCommand command = new SplitIndexCommand(request, null, Lists.newArrayList(core1, core2), ranges, new PlainIdRouter(), null, null);
-        new SolrIndexSplitter(command).split();
+        SolrQueryResponse rsp = new SolrQueryResponse();
+        SplitIndexCommand command = new SplitIndexCommand(request, rsp, null, Lists.newArrayList(core1, core2), ranges,
+            new PlainIdRouter(), null, null, splitMethod);
+        doSplit(command);
       } finally {
         if (request != null) request.close();
       }
@@ -196,6 +239,15 @@ public class SolrIndexSplitterTest extends SolrTestCaseJ4 {
 
   @Test
   public void testSplitAlternately() throws Exception {
+    doTestSplitAlternately(SolrIndexSplitter.SplitMethod.REWRITE);
+  }
+
+  @Test
+  public void testSplitAlternatelyLink() throws Exception {
+    doTestSplitAlternately(SolrIndexSplitter.SplitMethod.LINK);
+  }
+
+  private void doTestSplitAlternately(SolrIndexSplitter.SplitMethod splitMethod) throws Exception {
     LocalSolrQueryRequest request = null;
     Directory directory = null;
     try {
@@ -208,10 +260,11 @@ public class SolrIndexSplitterTest extends SolrTestCaseJ4 {
       assertU(commit());
 
       request = lrf.makeRequest("q", "dummy");
-
-      SplitIndexCommand command = new SplitIndexCommand(request,
-          Lists.newArrayList(indexDir1.getAbsolutePath(), indexDir2.getAbsolutePath(), indexDir3.getAbsolutePath()), null, null, new PlainIdRouter(), null, null);
-      new SolrIndexSplitter(command).split();
+      SolrQueryResponse rsp = new SolrQueryResponse();
+      SplitIndexCommand command = new SplitIndexCommand(request, rsp,
+          Lists.newArrayList(indexDir1.getAbsolutePath(), indexDir2.getAbsolutePath(), indexDir3.getAbsolutePath()),
+          null, null, new PlainIdRouter(), null, null, splitMethod);
+      doSplit(command);
 
       directory = h.getCore().getDirectoryFactory().get(indexDir1.getAbsolutePath(),
           DirectoryFactory.DirContext.DEFAULT, h.getCore().getSolrConfig().indexConfig.lockType);
@@ -242,7 +295,16 @@ public class SolrIndexSplitterTest extends SolrTestCaseJ4 {
   }
 
   @Test
-  public void testSplitByRouteKey() throws Exception  {
+  public void testSplitByRouteKey() throws Exception {
+    doTestSplitByRouteKey(SolrIndexSplitter.SplitMethod.REWRITE);
+  }
+
+  @Test
+  public void testSplitByRouteKeyLink() throws Exception  {
+    doTestSplitByRouteKey(SolrIndexSplitter.SplitMethod.LINK);
+  }
+
+  private void doTestSplitByRouteKey(SolrIndexSplitter.SplitMethod splitMethod) throws Exception  {
     File indexDir = createTempDir().toFile();
 
     CompositeIdRouter r1 = new CompositeIdRouter();
@@ -274,9 +336,11 @@ public class SolrIndexSplitterTest extends SolrTestCaseJ4 {
     Directory directory = null;
     try {
       request = lrf.makeRequest("q", "dummy");
-      SplitIndexCommand command = new SplitIndexCommand(request,
-          Lists.newArrayList(indexDir.getAbsolutePath()), null, Lists.newArrayList(splitKeyRange), new CompositeIdRouter(), null, splitKey);
-      new SolrIndexSplitter(command).split();
+      SolrQueryResponse rsp = new SolrQueryResponse();
+      SplitIndexCommand command = new SplitIndexCommand(request, rsp,
+          Lists.newArrayList(indexDir.getAbsolutePath()), null, Lists.newArrayList(splitKeyRange),
+          new CompositeIdRouter(), null, splitKey, splitMethod);
+      doSplit(command);
       directory = h.getCore().getDirectoryFactory().get(indexDir.getAbsolutePath(),
           DirectoryFactory.DirContext.DEFAULT, h.getCore().getSolrConfig().indexConfig.lockType);
       DirectoryReader reader = DirectoryReader.open(directory);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/1133bf98/solr/solr-ref-guide/src/collections-api.adoc
----------------------------------------------------------------------
diff --git a/solr/solr-ref-guide/src/collections-api.adoc b/solr/solr-ref-guide/src/collections-api.adoc
index b94509d..80ce941 100644
--- a/solr/solr-ref-guide/src/collections-api.adoc
+++ b/solr/solr-ref-guide/src/collections-api.adoc
@@ -259,9 +259,11 @@ This command allows for seamless splitting and requires no downtime. A shard bei
 
 The split is performed by dividing the original shard's hash range into two equal partitions and dividing up the documents in the original shard according to the new sub-ranges. Two parameters discussed below, `ranges` and `split.key` provide further control over how the split occurs.
 
-The newly created shards will have as many replicas as the parent shard.
+The newly created shards will have as many replicas as the parent shard, of the same replica types.
 
-You must ensure that the node running the leader of the parent shard has enough free disk space i.e., more than twice the index size, for the split to succeed. The API uses the Autoscaling framework to find nodes that can satisfy the disk requirements for the new replicas but only when an Autoscaling policy is configured. Refer to <<solrcloud-autoscaling-policy-preferences.adoc#solrcloud-autoscaling-policy-preferences,Autoscaling Policy and Preferences>> section for more details.
+When using `splitMethod=rewrite` (default) you must ensure that the node running the leader of the parent shard has enough free disk space i.e., more than twice the index size, for the split to succeed. The API uses the Autoscaling framework to find nodes that can satisfy the disk requirements for the new replicas but only when an Autoscaling policy is configured. Refer to <<solrcloud-autoscaling-policy-preferences.adoc#solrcloud-autoscaling-policy-preferences,Autoscaling Policy and Preferences>> section for more details.
+
+Also, the first replicas of resulting sub-shards will always be placed on the shard leader node, which may cause Autoscaling policy violations that need to be resolved either automatically (when appropriate triggers are in use) or manually.
 
 Shard splitting can be a long running process. In order to avoid timeouts, you should run this as an <<Asynchronous Calls,asynchronous call>>.
 
@@ -285,12 +287,26 @@ This parameter can be used to split a shard using a route key such that all docu
 +
 For example, suppose `split.key=A!` hashes to the range `12-15` and belongs to shard 'shard1' with range `0-20`. Splitting by this route key would yield three sub-shards with ranges `0-11`, `12-15` and `16-20`. Note that the sub-shard with the hash range of the route key may also contain documents for other route keys whose hash ranges overlap.
 
+`splitMethod`::
+Currently two methods of shard splitting are supported:
+* `splitMethod=rewrite` (default) after selecting documents to retain in each partition this method creates sub-indexes from
+scratch, which is a lengthy CPU- and I/O-intensive process but results in optimally-sized sub-indexes that don't contain
+any data from documents not belonging to each partition.
+* `splitMethod=link` uses file system-level hard links for creating copies of the original index files and then only modifies the
+file that contains the list of deleted documents in each partition. This method is many times quicker and lighter on resources than the
+`rewrite` method but the resulting sub-indexes are still as large as the original index because they still contain data from documents not
+belonging to the partition. This slows down the replication process and consumes more disk space on replica nodes (the multiple hard-linked
+copies don't occupy additional disk space on the leader node, unless hard-linking is not supported).
+
 `property._name_=_value_`::
 Set core property _name_ to _value_. See the section <<defining-core-properties.adoc#defining-core-properties,Defining core.properties>> for details on supported properties and values.
 
 `waitForFinalState`::
 If `true`, the request will complete only when all affected replicas become active. The default is `false`, which means that the API will return the status of the single action, which may be before the new replica is online and active.
 
+`timing`::
+If `true` then each stage of processing will be timed and a `timing` section will be included in response.
+
 `async`::
 Request ID to track this action which will be <<Asynchronous Calls,processed asynchronously>>
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/1133bf98/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionAdminRequest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionAdminRequest.java b/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionAdminRequest.java
index 8d36296..50cd65d 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionAdminRequest.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionAdminRequest.java
@@ -1144,6 +1144,7 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
     protected String ranges;
     protected String splitKey;
     protected String shard;
+    protected String splitMethod;
 
     private Properties properties;
 
@@ -1155,6 +1156,15 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
     public SplitShard setRanges(String ranges) { this.ranges = ranges; return this; }
     public String getRanges() { return ranges; }
 
+    public SplitShard setSplitMethod(String splitMethod) {
+      this.splitMethod = splitMethod;
+      return this;
+    }
+
+    public String getSplitMethod() {
+      return splitMethod;
+    }
+
     public SplitShard setSplitKey(String splitKey) {
       this.splitKey = splitKey;
       return this;
@@ -1191,6 +1201,7 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
       params.set(CoreAdminParams.SHARD, shard);
       params.set("split.key", this.splitKey);
       params.set(CoreAdminParams.RANGES, ranges);
+      params.set(CommonAdminParams.SPLIT_METHOD, splitMethod);
 
       if(properties != null) {
         addProperties(params, properties);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/1133bf98/solr/solrj/src/java/org/apache/solr/common/params/CommonAdminParams.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/common/params/CommonAdminParams.java b/solr/solrj/src/java/org/apache/solr/common/params/CommonAdminParams.java
index c39b4a8..c12ee32 100644
--- a/solr/solrj/src/java/org/apache/solr/common/params/CommonAdminParams.java
+++ b/solr/solrj/src/java/org/apache/solr/common/params/CommonAdminParams.java
@@ -25,6 +25,8 @@ public interface CommonAdminParams
   String WAIT_FOR_FINAL_STATE = "waitForFinalState";
   /** Allow in-place move of replicas that use shared filesystems. */
   String IN_PLACE_MOVE = "inPlaceMove";
+  /** Method to use for shard splitting. */
+  String SPLIT_METHOD = "splitMethod";
   /** Timeout for replicas to become active. */
   String TIMEOUT = "timeout";
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/1133bf98/solr/solrj/src/java/org/apache/solr/common/params/CoreAdminParams.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/common/params/CoreAdminParams.java b/solr/solrj/src/java/org/apache/solr/common/params/CoreAdminParams.java
index 58c39a3..9103450 100644
--- a/solr/solrj/src/java/org/apache/solr/common/params/CoreAdminParams.java
+++ b/solr/solrj/src/java/org/apache/solr/common/params/CoreAdminParams.java
@@ -84,7 +84,7 @@ public abstract class CoreAdminParams
 
   /** The hash ranges to be used to split a shard or an index */
   public final static String RANGES = "ranges";
-  
+
   public static final String ROLES = "roles";
 
   public static final String REQUESTID = "requestid";


[46/48] lucene-solr:jira/http2: Fix ReqOptSumScorer to not advance backwards.

Posted by da...@apache.org.
Fix ReqOptSumScorer to not advance backwards.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/fb7fce86
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/fb7fce86
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/fb7fce86

Branch: refs/heads/jira/http2
Commit: fb7fce862390733ace76d2fcb485d71f90a28edd
Parents: ffedb99
Author: Adrien Grand <jp...@gmail.com>
Authored: Sun Aug 5 22:40:43 2018 +0200
Committer: Adrien Grand <jp...@gmail.com>
Committed: Sun Aug 5 22:40:43 2018 +0200

----------------------------------------------------------------------
 .../core/src/java/org/apache/lucene/search/ReqOptSumScorer.java  | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/fb7fce86/lucene/core/src/java/org/apache/lucene/search/ReqOptSumScorer.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/ReqOptSumScorer.java b/lucene/core/src/java/org/apache/lucene/search/ReqOptSumScorer.java
index 418ad99..2463e6a 100644
--- a/lucene/core/src/java/org/apache/lucene/search/ReqOptSumScorer.java
+++ b/lucene/core/src/java/org/apache/lucene/search/ReqOptSumScorer.java
@@ -212,7 +212,9 @@ class ReqOptSumScorer extends Scorer {
 
   @Override
   public int advanceShallow(int target) throws IOException {
-    optScorer.advanceShallow(target);
+    if (optScorer.docID() < target) {
+      optScorer.advanceShallow(target);
+    }
     return reqScorer.advanceShallow(target);
   }
 


[09/48] lucene-solr:jira/http2: SOLR-12402: Factor out SolrDefaultStreamFactory class.

Posted by da...@apache.org.
SOLR-12402: Factor out SolrDefaultStreamFactory class.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/4a1ee046
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/4a1ee046
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/4a1ee046

Branch: refs/heads/jira/http2
Commit: 4a1ee046c37615b3927737618c9a4c937bd6ede9
Parents: 1396059
Author: Christine Poerschke <cp...@apache.org>
Authored: Tue Jul 31 18:47:53 2018 +0100
Committer: Christine Poerschke <cp...@apache.org>
Committed: Tue Jul 31 19:29:43 2018 +0100

----------------------------------------------------------------------
 solr/CHANGES.txt                                |  2 +
 .../solr/handler/SolrDefaultStreamFactory.java  | 53 ++++++++++++++++++++
 .../org/apache/solr/handler/StreamHandler.java  | 11 +---
 3 files changed, 57 insertions(+), 9 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4a1ee046/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 2d8db8c..37dd5a7 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -129,6 +129,8 @@ New Features
   future cluster operations whether they are invoked manually via the Collection API or by the Autoscaling framework.
   (noble, shalin)
 
+* SOLR-12402: Factor out SolrDefaultStreamFactory class. (Christine Poerschke)
+
 Bug Fixes
 ----------------------
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4a1ee046/solr/core/src/java/org/apache/solr/handler/SolrDefaultStreamFactory.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/SolrDefaultStreamFactory.java b/solr/core/src/java/org/apache/solr/handler/SolrDefaultStreamFactory.java
new file mode 100644
index 0000000..0b375f4
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/handler/SolrDefaultStreamFactory.java
@@ -0,0 +1,53 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.handler;
+
+import org.apache.solr.client.solrj.io.Lang;
+import org.apache.solr.client.solrj.io.stream.expr.DefaultStreamFactory;
+import org.apache.solr.core.SolrResourceLoader;
+
+/**
+ * A default collection of mappings, used to convert strings into stream expressions.
+ * Same as {@link DefaultStreamFactory} plus functions that rely directly on either
+ * Lucene or Solr capabilities that are not part of {@link Lang}.
+ *
+ * @since 7.5
+ */
+public class SolrDefaultStreamFactory extends DefaultStreamFactory {
+
+  private SolrResourceLoader solrResourceLoader;
+
+  public SolrDefaultStreamFactory() {
+    super();
+    this.withFunctionName("analyze",  AnalyzeEvaluator.class);
+    this.withFunctionName("classify", ClassifyStream.class);
+  }
+
+  public SolrDefaultStreamFactory withSolrResourceLoader(SolrResourceLoader solrResourceLoader) {
+    this.solrResourceLoader = solrResourceLoader;
+    return this;
+  }
+
+  public void setSolrResourceLoader(SolrResourceLoader solrResourceLoader) {
+    this.solrResourceLoader = solrResourceLoader;
+  }
+
+  public SolrResourceLoader getSolrResourceLoader() {
+    return solrResourceLoader;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4a1ee046/solr/core/src/java/org/apache/solr/handler/StreamHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/StreamHandler.java b/solr/core/src/java/org/apache/solr/handler/StreamHandler.java
index 4e43e1c..e6ebc51 100644
--- a/solr/core/src/java/org/apache/solr/handler/StreamHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/StreamHandler.java
@@ -31,7 +31,6 @@ import org.apache.solr.client.solrj.io.SolrClientCache;
 import org.apache.solr.client.solrj.io.Tuple;
 import org.apache.solr.client.solrj.io.comp.StreamComparator;
 import org.apache.solr.client.solrj.io.stream.*;
-import org.apache.solr.client.solrj.io.stream.expr.DefaultStreamFactory;
 import org.apache.solr.client.solrj.io.stream.expr.Explanation;
 import org.apache.solr.client.solrj.io.stream.expr.Explanation.ExpressionType;
 import org.apache.solr.client.solrj.io.stream.expr.Expressible;
@@ -65,7 +64,7 @@ public class StreamHandler extends RequestHandlerBase implements SolrCoreAware,
 
   static SolrClientCache clientCache = new SolrClientCache();
   static ModelCache modelCache = null;
-  private StreamFactory streamFactory = new DefaultStreamFactory();
+  private SolrDefaultStreamFactory streamFactory = new SolrDefaultStreamFactory();
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   private String coreName;
   private Map<String,DaemonStream> daemons = Collections.synchronizedMap(new HashMap());
@@ -105,13 +104,7 @@ public class StreamHandler extends RequestHandlerBase implements SolrCoreAware,
           defaultZkhost,
           clientCache);
     }
-
-    /*
-    * Add the core functions. These are functions that rely directly on either Lucene or Solr
-    * capabilities that are not part of Lang.
-    */
-    streamFactory.withFunctionName("analyze",  AnalyzeEvaluator.class);
-    streamFactory.withFunctionName("classify", ClassifyStream.class);
+    streamFactory.withSolrResourceLoader(core.getResourceLoader());
 
     // This pulls all the overrides and additions from the config
     List<PluginInfo> pluginInfos = core.getSolrConfig().getPluginInfos(Expressible.class.getName());


[19/48] lucene-solr:jira/http2: LUCENE-8060: ToChildBlockJoinQuery can't delegate getMaxScore but not advanceShallow.

Posted by da...@apache.org.
LUCENE-8060: ToChildBlockJoinQuery can't delegate getMaxScore but not advanceShallow.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/11630916
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/11630916
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/11630916

Branch: refs/heads/jira/http2
Commit: 116309160e3e4c9df37f60e077126f58bd7e3e8e
Parents: 3203e99
Author: Adrien Grand <jp...@gmail.com>
Authored: Wed Aug 1 14:33:56 2018 +0200
Committer: Adrien Grand <jp...@gmail.com>
Committed: Wed Aug 1 14:42:37 2018 +0200

----------------------------------------------------------------------
 .../java/org/apache/lucene/search/join/ToChildBlockJoinQuery.java  | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/11630916/lucene/join/src/java/org/apache/lucene/search/join/ToChildBlockJoinQuery.java
----------------------------------------------------------------------
diff --git a/lucene/join/src/java/org/apache/lucene/search/join/ToChildBlockJoinQuery.java b/lucene/join/src/java/org/apache/lucene/search/join/ToChildBlockJoinQuery.java
index 9de319b..7d9668c 100644
--- a/lucene/join/src/java/org/apache/lucene/search/join/ToChildBlockJoinQuery.java
+++ b/lucene/join/src/java/org/apache/lucene/search/join/ToChildBlockJoinQuery.java
@@ -282,7 +282,7 @@ public class ToChildBlockJoinQuery extends Query {
 
     @Override
     public float getMaxScore(int upTo) throws IOException {
-      return parentScorer.getMaxScore(DocIdSetIterator.NO_MORE_DOCS);
+      return Float.POSITIVE_INFINITY;
     }
     
     int getParentDoc() {


[22/48] lucene-solr:jira/http2: Revert "Make the nightly test smaller so that it does not fail with GC overhead exceeded (OOM). Clean up random number fetching to make it shorter."

Posted by da...@apache.org.
Revert "Make the nightly test smaller so that it does not fail with GC overhead exceeded (OOM). Clean up random number fetching to make it shorter."

This reverts commit 3203e99d8fbcaac3458fcf882d4ec229f97dfa43.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/e56c8722
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/e56c8722
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/e56c8722

Branch: refs/heads/jira/http2
Commit: e56c8722ce99338e980b32e100b96f2c19af9ddf
Parents: 86a39fa
Author: Adrien Grand <jp...@gmail.com>
Authored: Wed Aug 1 15:44:57 2018 +0200
Committer: Adrien Grand <jp...@gmail.com>
Committed: Wed Aug 1 15:44:57 2018 +0200

----------------------------------------------------------------------
 .../lucene/document/TestLatLonShapeQueries.java | 15 ++---
 .../java/org/apache/lucene/geo/GeoTestUtil.java | 70 ++++++++++----------
 2 files changed, 42 insertions(+), 43 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e56c8722/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonShapeQueries.java
----------------------------------------------------------------------
diff --git a/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonShapeQueries.java b/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonShapeQueries.java
index 21d4e83..03941b9 100644
--- a/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonShapeQueries.java
+++ b/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonShapeQueries.java
@@ -45,8 +45,6 @@ import org.apache.lucene.util.FixedBitSet;
 import org.apache.lucene.util.IOUtils;
 import org.apache.lucene.util.LuceneTestCase;
 
-import static com.carrotsearch.randomizedtesting.RandomizedTest.randomBoolean;
-import static com.carrotsearch.randomizedtesting.RandomizedTest.randomInt;
 import static org.apache.lucene.geo.GeoEncodingUtils.decodeLatitude;
 import static org.apache.lucene.geo.GeoEncodingUtils.decodeLongitude;
 import static org.apache.lucene.geo.GeoEncodingUtils.encodeLatitude;
@@ -106,7 +104,7 @@ public class TestLatLonShapeQueries extends LuceneTestCase {
 
   @Nightly
   public void testRandomBig() throws Exception {
-    doTestRandom(50000);
+    doTestRandom(200000);
   }
 
   private void doTestRandom(int count) throws Exception {
@@ -118,7 +116,7 @@ public class TestLatLonShapeQueries extends LuceneTestCase {
 
     Polygon[] polygons = new Polygon[numPolygons];
     for (int id = 0; id < numPolygons; ++id) {
-      int x = randomInt(20);
+      int x = random().nextInt(20);
       if (x == 17) {
         polygons[id] = null;
         if (VERBOSE) {
@@ -129,7 +127,6 @@ public class TestLatLonShapeQueries extends LuceneTestCase {
         polygons[id] = GeoTestUtil.nextPolygon();
       }
     }
-
     verify(polygons);
   }
 
@@ -176,8 +173,8 @@ public class TestLatLonShapeQueries extends LuceneTestCase {
         poly2D[id] = Polygon2D.create(quantizePolygon(polygons[id]));
       }
       w.addDocument(doc);
-      if (id > 0 && randomInt(100) == 42) {
-        int idToDelete = randomInt(id);
+      if (id > 0 && random().nextInt(100) == 42) {
+        int idToDelete = random().nextInt(id);
         w.deleteDocuments(new Term("id", ""+idToDelete));
         deleted.add(idToDelete);
         if (VERBOSE) {
@@ -186,7 +183,7 @@ public class TestLatLonShapeQueries extends LuceneTestCase {
       }
     }
 
-    if (randomBoolean()) {
+    if (random().nextBoolean()) {
       w.forceMerge(1);
     }
     final IndexReader r = DirectoryReader.open(w);
@@ -201,7 +198,7 @@ public class TestLatLonShapeQueries extends LuceneTestCase {
 
     for (int iter = 0; iter < iters; ++iter) {
       if (VERBOSE) {
-        System.out.println("\nTEST: iter=" + (iter + 1) + " of " + iters + " s=" + s);
+        System.out.println("\nTEST: iter=" + (iter+1) + " of " + iters + " s=" + s);
       }
 
       // BBox

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e56c8722/lucene/test-framework/src/java/org/apache/lucene/geo/GeoTestUtil.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/geo/GeoTestUtil.java b/lucene/test-framework/src/java/org/apache/lucene/geo/GeoTestUtil.java
index 8817d20..fe81fd6 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/geo/GeoTestUtil.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/geo/GeoTestUtil.java
@@ -19,16 +19,13 @@ package org.apache.lucene.geo;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
+import java.util.Random;
 
 import org.apache.lucene.util.NumericUtils;
 import org.apache.lucene.util.SloppyMath;
 import org.apache.lucene.util.TestUtil;
 
-import static com.carrotsearch.randomizedtesting.RandomizedTest.randomBoolean;
-import static com.carrotsearch.randomizedtesting.RandomizedTest.randomDouble;
-import static com.carrotsearch.randomizedtesting.RandomizedTest.randomInt;
-import static com.carrotsearch.randomizedtesting.RandomizedTest.randomIntBetween;
-import static org.apache.lucene.util.LuceneTestCase.random;
+import com.carrotsearch.randomizedtesting.RandomizedContext;
 
 /** static methods for testing geo */
 public class GeoTestUtil {
@@ -66,7 +63,7 @@ public class GeoTestUtil {
 
     // first pick a base value.
     final double baseValue;
-    int surpriseMe = randomInt(17);
+    int surpriseMe = random().nextInt(17);
     if (surpriseMe == 0) {
       // random bits
       long lowBits = NumericUtils.doubleToSortableLong(low);
@@ -84,18 +81,18 @@ public class GeoTestUtil {
     } else if (surpriseMe == 4) {
       // divide up space into block of 360
       double delta = (high - low) / 360;
-      int block = randomInt(360);
+      int block = random().nextInt(360);
       baseValue = low + delta * block;
     } else {
       // distributed ~ evenly
-      baseValue = low + (high - low) * randomDouble();
+      baseValue = low + (high - low) * random().nextDouble();
     }
 
     assert baseValue >= low;
     assert baseValue <= high;
 
     // either return the base value or adjust it by 1 ulp in a random direction (if possible)
-    int adjustMe = randomInt(17);
+    int adjustMe = random().nextInt(17);
     if (adjustMe == 0) {
       return Math.nextAfter(adjustMe, high);
     } else if (adjustMe == 1) {
@@ -109,7 +106,7 @@ public class GeoTestUtil {
   private static double nextLatitudeNear(double otherLatitude, double delta) {
     delta = Math.abs(delta);
     GeoUtils.checkLatitude(otherLatitude);
-    int surpriseMe = randomInt(97);
+    int surpriseMe = random().nextInt(97);
     if (surpriseMe == 0) {
       // purely random
       return nextLatitude();
@@ -126,7 +123,7 @@ public class GeoTestUtil {
   private static double nextLongitudeNear(double otherLongitude, double delta) {
     delta = Math.abs(delta);
     GeoUtils.checkLongitude(otherLongitude);
-    int surpriseMe = randomInt(97);
+    int surpriseMe = random().nextInt(97);
     if (surpriseMe == 0) {
       // purely random
       return nextLongitude();
@@ -148,7 +145,7 @@ public class GeoTestUtil {
     assert maxLatitude >= minLatitude;
     GeoUtils.checkLatitude(minLatitude);
     GeoUtils.checkLatitude(maxLatitude);
-    if (randomInt(47) == 0) {
+    if (random().nextInt(47) == 0) {
       // purely random
       return nextLatitude();
     } else {
@@ -169,7 +166,7 @@ public class GeoTestUtil {
     assert maxLongitude >= minLongitude;
     GeoUtils.checkLongitude(minLongitude);
     GeoUtils.checkLongitude(maxLongitude);
-    if (randomInt(47) == 0) {
+    if (random().nextInt(47) == 0) {
       // purely random
       return nextLongitude();
     } else {
@@ -214,7 +211,7 @@ public class GeoTestUtil {
   public static double[] nextPointNear(Rectangle rectangle) {
     if (rectangle.crossesDateline()) {
       // pick a "side" of the two boxes we really are
-      if (randomBoolean()) {
+      if (random().nextBoolean()) {
         return nextPointNear(new Rectangle(rectangle.minLat, rectangle.maxLat, -180, rectangle.maxLon));
       } else {
         return nextPointNear(new Rectangle(rectangle.minLat, rectangle.maxLat, rectangle.minLon, 180));
@@ -232,11 +229,11 @@ public class GeoTestUtil {
     Polygon holes[] = polygon.getHoles();
 
     // if there are any holes, target them aggressively
-    if (holes.length > 0 && randomInt(3) == 0) {
-      return nextPointNear(holes[randomInt(holes.length)]);
+    if (holes.length > 0 && random().nextInt(3) == 0) {
+      return nextPointNear(holes[random().nextInt(holes.length)]);
     }
 
-    int surpriseMe = randomInt(97);
+    int surpriseMe = random().nextInt(97);
     if (surpriseMe == 0) {
       // purely random
       return new double[] { nextLatitude(), nextLongitude() };
@@ -245,7 +242,7 @@ public class GeoTestUtil {
       return new double[] { nextLatitudeBetween(polygon.minLat, polygon.maxLat), nextLongitudeBetween(polygon.minLon, polygon.maxLon) };
     } else if (surpriseMe < 20) {
       // target a vertex
-      int vertex = randomInt(polyLats.length - 1);
+      int vertex = random().nextInt(polyLats.length - 1);
       return new double[] { nextLatitudeNear(polyLats[vertex], polyLats[vertex+1] - polyLats[vertex]), 
                             nextLongitudeNear(polyLons[vertex], polyLons[vertex+1] - polyLons[vertex]) };
     } else if (surpriseMe < 30) {
@@ -253,14 +250,14 @@ public class GeoTestUtil {
       Polygon container = boxPolygon(new Rectangle(polygon.minLat, polygon.maxLat, polygon.minLon, polygon.maxLon));
       double containerLats[] = container.getPolyLats();
       double containerLons[] = container.getPolyLons();
-      int startVertex = randomInt(containerLats.length - 1);
+      int startVertex = random().nextInt(containerLats.length - 1);
       return nextPointAroundLine(containerLats[startVertex], containerLons[startVertex], 
                                  containerLats[startVertex+1], containerLons[startVertex+1]);
     } else {
       // target points around diagonals between vertices
-      int startVertex = randomInt(polyLats.length - 1);
+      int startVertex = random().nextInt(polyLats.length - 1);
       // but favor edges heavily
-      int endVertex = randomBoolean() ? startVertex + 1 : randomInt(polyLats.length - 1);
+      int endVertex = random().nextBoolean() ? startVertex + 1 : random().nextInt(polyLats.length - 1);
       return nextPointAroundLine(polyLats[startVertex], polyLons[startVertex], 
                                  polyLats[endVertex],   polyLons[endVertex]);
     }
@@ -273,11 +270,11 @@ public class GeoTestUtil {
     
     // if there are any holes, target them aggressively
     Polygon holes[] = polygon.getHoles();
-    if (holes.length > 0 && randomInt(3) == 0) {
-      return nextBoxNear(holes[randomInt(holes.length)]);
+    if (holes.length > 0 && random().nextInt(3) == 0) {
+      return nextBoxNear(holes[random().nextInt(holes.length)]);
     }
     
-    int surpriseMe = randomInt(97);
+    int surpriseMe = random().nextInt(97);
     if (surpriseMe == 0) {
       // formed from two interesting points
       point1 = nextPointNear(polygon);
@@ -289,7 +286,7 @@ public class GeoTestUtil {
       // now figure out a good delta: we use a rough heuristic, up to the length of an edge
       double polyLats[] = polygon.getPolyLats();
       double polyLons[] = polygon.getPolyLons();
-      int vertex = randomInt(polyLats.length - 1);
+      int vertex = random().nextInt(polyLats.length - 1);
       double deltaX = polyLons[vertex+1] - polyLons[vertex];
       double deltaY = polyLats[vertex+1] - polyLats[vertex];
       double edgeLength = Math.sqrt(deltaX * deltaX + deltaY * deltaY);
@@ -389,14 +386,14 @@ public class GeoTestUtil {
 
   /** returns next pseudorandom polygon */
   public static Polygon nextPolygon() {
-    if (randomBoolean()) {
+    if (random().nextBoolean()) {
       return surpriseMePolygon();
-    } else if (randomInt(10) == 1) {
+    } else if (random().nextInt(10) == 1) {
       // this poly is slow to create ... only do it 10% of the time:
       while (true) {
-        int gons = randomIntBetween(4, 500);
+        int gons = TestUtil.nextInt(random(), 4, 500);
         // So the poly can cover at most 50% of the earth's surface:
-        double radiusMeters = randomDouble() * GeoUtils.EARTH_MEAN_RADIUS_METERS * Math.PI / 2.0 + 1.0;
+        double radiusMeters = random().nextDouble() * GeoUtils.EARTH_MEAN_RADIUS_METERS * Math.PI / 2.0 + 1.0;
         try {
           return createRegularPolygon(nextLatitude(), nextLongitude(), radiusMeters, gons);
         } catch (IllegalArgumentException iae) {
@@ -406,7 +403,7 @@ public class GeoTestUtil {
     }
 
     Rectangle box = nextBoxInternal(false);
-    if (randomBoolean()) {
+    if (random().nextBoolean()) {
       // box
       return boxPolygon(box);
     } else {
@@ -483,19 +480,19 @@ public class GeoTestUtil {
       //System.out.println("\nPOLY ITER");
       double centerLat = nextLatitude();
       double centerLon = nextLongitude();
-      double radius = 0.1 + 20 * randomDouble();
-      double radiusDelta = randomDouble();
+      double radius = 0.1 + 20 * random().nextDouble();
+      double radiusDelta = random().nextDouble();
 
       ArrayList<Double> lats = new ArrayList<>();
       ArrayList<Double> lons = new ArrayList<>();
       double angle = 0.0;
       while (true) {
-        angle += randomDouble() * 40.0;
+        angle += random().nextDouble()*40.0;
         //System.out.println("  angle " + angle);
         if (angle > 360) {
           break;
         }
-        double len = radius * (1.0 - radiusDelta + radiusDelta * randomDouble());
+        double len = radius * (1.0 - radiusDelta + radiusDelta * random().nextDouble());
         //System.out.println("    len=" + len);
         double lat = centerLat + len * Math.cos(SloppyMath.toRadians(angle));
         double lon = centerLon + len * Math.sin(SloppyMath.toRadians(angle));
@@ -524,6 +521,11 @@ public class GeoTestUtil {
     }
   }
 
+  /** Keep it simple, we don't need to take arbitrary Random for geo tests */
+  private static Random random() {
+   return RandomizedContext.current().getRandom();
+  }
+
   /** 
    * Returns svg of polygon for debugging. 
    * <p>


[28/48] lucene-solr:jira/http2: LUCENE-8312: Fixed performance regression with non-scoring term queries.

Posted by da...@apache.org.
LUCENE-8312: Fixed performance regression with non-scoring term queries.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/64573c14
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/64573c14
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/64573c14

Branch: refs/heads/jira/http2
Commit: 64573c142c851741da50f8858c9d630557a151d0
Parents: 18c2300
Author: Adrien Grand <jp...@gmail.com>
Authored: Wed Aug 1 19:13:54 2018 +0200
Committer: Adrien Grand <jp...@gmail.com>
Committed: Wed Aug 1 21:39:55 2018 +0200

----------------------------------------------------------------------
 lucene/core/src/java/org/apache/lucene/search/TermQuery.java | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/64573c14/lucene/core/src/java/org/apache/lucene/search/TermQuery.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/TermQuery.java b/lucene/core/src/java/org/apache/lucene/search/TermQuery.java
index 46901f2..3fa4aa7 100644
--- a/lucene/core/src/java/org/apache/lucene/search/TermQuery.java
+++ b/lucene/core/src/java/org/apache/lucene/search/TermQuery.java
@@ -114,7 +114,7 @@ public class TermQuery extends Query {
       if (scoreMode == ScoreMode.TOP_SCORES) {
         return new TermScorer(this, termsEnum.impacts(PostingsEnum.FREQS), scorer);
       } else {
-        return new TermScorer(this, termsEnum.postings(null, PostingsEnum.FREQS), scorer);
+        return new TermScorer(this, termsEnum.postings(null, scoreMode.needsScores() ? PostingsEnum.FREQS : PostingsEnum.NONE), scorer);
       }
     }
 


[37/48] lucene-solr:jira/http2: SOLR-8207: Add "Nodes" view to the Admin UI "Cloud" tab, listing nodes and key metrics

Posted by da...@apache.org.
SOLR-8207: Add "Nodes" view to the Admin UI "Cloud" tab, listing nodes and key metrics


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/17a02c10
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/17a02c10
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/17a02c10

Branch: refs/heads/jira/http2
Commit: 17a02c1089b80ee358a5dc6692cb443d9b4c9b01
Parents: 1af7686
Author: Jan Høydahl <ja...@apache.org>
Authored: Fri Aug 3 13:55:11 2018 +0200
Committer: Jan Høydahl <ja...@apache.org>
Committed: Fri Aug 3 13:55:11 2018 +0200

----------------------------------------------------------------------
 solr/CHANGES.txt                                |   2 +
 .../org/apache/solr/core/CoreContainer.java     |   2 +-
 .../solr/handler/admin/AdminHandlersProxy.java  | 128 +++++
 .../solr/handler/admin/MetricsHandler.java      |  13 +-
 .../solr/handler/admin/SystemInfoHandler.java   |  11 +-
 .../handler/admin/AdminHandlersProxyTest.java   | 119 +++++
 .../solr/handler/admin/MetricsHandlerTest.java  |   8 +-
 solr/solr-ref-guide/src/cloud-screens.adoc      |  25 +-
 .../src/images/cloud-screens/cloud-nodes.png    | Bin 0 -> 185805 bytes
 .../apache/solr/common/params/CommonParams.java |   2 +
 solr/webapp/web/css/angular/cloud.css           | 193 ++++++++
 solr/webapp/web/css/angular/menu.css            |   2 +-
 solr/webapp/web/index.html                      |   4 +-
 solr/webapp/web/js/angular/controllers/cloud.js | 484 ++++++++++++++++++-
 solr/webapp/web/js/angular/services.js          |   7 +-
 solr/webapp/web/partials/cloud.html             | 132 +++++
 16 files changed, 1103 insertions(+), 29 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/17a02c10/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index e8ee4f4..fdccfb8 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -123,6 +123,8 @@ New Features
 
 * SOLR-12536: autoscaling policy support to equally distribute replicas on the basis of arbitrary properties (noble)
 
+* SOLR-8207: Add "Nodes" view to the Admin UI "Cloud" tab, listing nodes and key metrics (janhoy)
+
 * SOLR-11990: Make it possible to co-locate replicas of multiple collections together in a node. A collection may be
   co-located with another collection during collection creation time by specifying a 'withCollection' parameter. It can
   also be co-located afterwards by using the modify collection API. The co-location guarantee is enforced regardless of

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/17a02c10/solr/core/src/java/org/apache/solr/core/CoreContainer.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/CoreContainer.java b/solr/core/src/java/org/apache/solr/core/CoreContainer.java
index 6af7c97..0583101 100644
--- a/solr/core/src/java/org/apache/solr/core/CoreContainer.java
+++ b/solr/core/src/java/org/apache/solr/core/CoreContainer.java
@@ -571,7 +571,7 @@ public class CoreContainer {
     configSetsHandler = createHandler(CONFIGSETS_HANDLER_PATH, cfg.getConfigSetsHandlerClass(), ConfigSetsHandler.class);
 
     // metricsHistoryHandler uses metricsHandler, so create it first
-    metricsHandler = new MetricsHandler(metricManager);
+    metricsHandler = new MetricsHandler(this);
     containerHandlers.put(METRICS_PATH, metricsHandler);
     metricsHandler.initializeMetrics(metricManager, SolrInfoBean.Group.node.toString(), metricTag, METRICS_PATH);
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/17a02c10/solr/core/src/java/org/apache/solr/handler/admin/AdminHandlersProxy.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/AdminHandlersProxy.java b/solr/core/src/java/org/apache/solr/handler/admin/AdminHandlersProxy.java
new file mode 100644
index 0000000..ae3e01f
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/handler/admin/AdminHandlersProxy.java
@@ -0,0 +1,128 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.handler.admin;
+
+import java.io.IOException;
+import java.lang.invoke.MethodHandles;
+import java.net.URL;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.Future;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+
+import org.apache.solr.client.solrj.SolrClient;
+import org.apache.solr.client.solrj.SolrRequest;
+import org.apache.solr.client.solrj.SolrServerException;
+import org.apache.solr.client.solrj.impl.HttpSolrClient;
+import org.apache.solr.client.solrj.request.GenericSolrRequest;
+import org.apache.solr.cloud.ZkController;
+import org.apache.solr.common.SolrException;
+import org.apache.solr.common.params.MapSolrParams;
+import org.apache.solr.common.params.SolrParams;
+import org.apache.solr.common.util.NamedList;
+import org.apache.solr.common.util.Pair;
+import org.apache.solr.core.CoreContainer;
+import org.apache.solr.request.SolrQueryRequest;
+import org.apache.solr.response.SolrQueryResponse;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Static methods to proxy calls to an Admin (GET) API to other nodes in the cluster and return a combined response
+ */
+public class AdminHandlersProxy {
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final String PARAM_NODES = "nodes";
+
+  // Proxy this request to a different remote node if 'node' parameter is provided
+  public static boolean maybeProxyToNodes(SolrQueryRequest req, SolrQueryResponse rsp, CoreContainer container)
+      throws IOException, SolrServerException, InterruptedException {
+    String nodeNames = req.getParams().get(PARAM_NODES);
+    if (nodeNames == null || nodeNames.isEmpty()) {
+      return false; // local request
+    }
+
+    if (!container.isZooKeeperAware()) {
+      throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Parameter " + PARAM_NODES + " only supported in Cloud mode");
+    }
+    
+    Set<String> nodes;
+    String pathStr = req.getPath();
+    
+    Map<String,String> paramsMap = req.getParams().toMap(new HashMap<>());
+    paramsMap.remove(PARAM_NODES);
+    SolrParams params = new MapSolrParams(paramsMap);
+    Set<String> liveNodes = container.getZkController().zkStateReader.getClusterState().getLiveNodes();
+    
+    if (nodeNames.equals("all")) {
+      nodes = liveNodes;
+      log.debug("All live nodes requested");
+    } else {
+      nodes = new HashSet<>(Arrays.asList(nodeNames.split(",")));
+      for (String nodeName : nodes) {
+        if (!nodeName.matches("^[^/:]+:\\d+_[\\w/]+$")) {
+          throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Parameter " + PARAM_NODES + " has wrong format");
+        }
+
+        if (!liveNodes.contains(nodeName)) {
+          throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Requested node " + nodeName + " is not part of cluster");
+        }
+      }       
+      log.debug("Nodes requested: {}", nodes);
+    }
+    log.debug(PARAM_NODES + " parameter {} specified on {} request", nodeNames, pathStr);
+    
+    Map<String, Pair<Future<NamedList<Object>>, SolrClient>> responses = new HashMap<>();
+    for (String node : nodes) {
+      responses.put(node, callRemoteNode(node, pathStr, params, container.getZkController()));
+    }
+    
+    for (Map.Entry<String, Pair<Future<NamedList<Object>>, SolrClient>> entry : responses.entrySet()) {
+      try {
+        NamedList<Object> resp = entry.getValue().first().get(10, TimeUnit.SECONDS);
+        entry.getValue().second().close();
+        rsp.add(entry.getKey(), resp);
+      } catch (ExecutionException ee) {
+        log.warn("Exception when fetching result from node {}", entry.getKey(), ee);
+      } catch (TimeoutException te) {
+        log.warn("Timeout when fetching result from node {}", entry.getKey(), te);
+      }
+    }
+    log.info("Fetched response from {} nodes: {}", responses.keySet().size(), responses.keySet());
+    return true;
+  } 
+
+  /**
+   * Makes a remote request and returns a future and the solr client. The caller is responsible for closing the client 
+   */
+  public static Pair<Future<NamedList<Object>>, SolrClient> callRemoteNode(String nodeName, String endpoint, 
+                                                                           SolrParams params, ZkController zkController) 
+      throws IOException, SolrServerException {
+    log.debug("Proxying {} request to node {}", endpoint, nodeName);
+    URL baseUrl = new URL(zkController.zkStateReader.getBaseUrlForNodeName(nodeName));
+    HttpSolrClient solr = new HttpSolrClient.Builder(baseUrl.toString()).build();
+    SolrRequest proxyReq = new GenericSolrRequest(SolrRequest.METHOD.GET, endpoint, params);
+    HttpSolrClient.HttpUriRequestResponse proxyResp = solr.httpUriRequest(proxyReq);
+    return new Pair<>(proxyResp.future, solr);
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/17a02c10/solr/core/src/java/org/apache/solr/handler/admin/MetricsHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/MetricsHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/MetricsHandler.java
index ca291e8..1f1a820 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/MetricsHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/MetricsHandler.java
@@ -41,6 +41,7 @@ import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.SimpleOrderedMap;
 import org.apache.solr.common.util.StrUtils;
+import org.apache.solr.core.CoreContainer;
 import org.apache.solr.handler.RequestHandlerBase;
 import org.apache.solr.metrics.SolrMetricManager;
 import org.apache.solr.request.SolrQueryRequest;
@@ -67,11 +68,17 @@ public class MetricsHandler extends RequestHandlerBase implements PermissionName
   public static final String ALL = "all";
 
   private static final Pattern KEY_REGEX = Pattern.compile("(?<!" + Pattern.quote("\\") + ")" + Pattern.quote(":"));
+  private CoreContainer cc;
 
   public MetricsHandler() {
     this.metricManager = null;
   }
 
+  public MetricsHandler(CoreContainer coreContainer) {
+    this.metricManager = coreContainer.getMetricManager();
+    this.cc = coreContainer;
+  }
+
   public MetricsHandler(SolrMetricManager metricManager) {
     this.metricManager = metricManager;
   }
@@ -87,9 +94,13 @@ public class MetricsHandler extends RequestHandlerBase implements PermissionName
       throw new SolrException(SolrException.ErrorCode.INVALID_STATE, "SolrMetricManager instance not initialized");
     }
 
+    if (cc != null && AdminHandlersProxy.maybeProxyToNodes(req, rsp, cc)) {
+      return; // Request was proxied to other node
+    }
+
     handleRequest(req.getParams(), (k, v) -> rsp.add(k, v));
   }
-
+  
   public void handleRequest(SolrParams params, BiConsumer<String, Object> consumer) throws Exception {
     boolean compact = params.getBool(COMPACT_PARAM, true);
     String[] keys = params.getParams(KEY_PARAM);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/17a02c10/solr/core/src/java/org/apache/solr/handler/admin/SystemInfoHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/SystemInfoHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/SystemInfoHandler.java
index 8a2786c..d8e10ab 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/SystemInfoHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/SystemInfoHandler.java
@@ -61,13 +61,14 @@ import static org.apache.solr.common.params.CommonParams.NAME;
 public class SystemInfoHandler extends RequestHandlerBase 
 {
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final String PARAM_NODE = "node";
 
   public static String REDACT_STRING = RedactionUtils.getRedactString();
 
   /**
    * <p>
    * Undocumented expert level system property to prevent doing a reverse lookup of our hostname.
-   * This property ill be logged as a suggested workaround if any probems are noticed when doing reverse 
+   * This property will be logged as a suggested workaround if any problems are noticed when doing reverse 
    * lookup.
    * </p>
    *
@@ -130,7 +131,11 @@ public class SystemInfoHandler extends RequestHandlerBase
   @Override
   public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception
   {
+    rsp.setHttpCaching(false);
     SolrCore core = req.getCore();
+    if (AdminHandlersProxy.maybeProxyToNodes(req, rsp, getCoreContainer(req, core))) {
+      return; // Request was proxied to other node
+    }
     if (core != null) rsp.add( "core", getCoreInfo( core, req.getSchema() ) );
     boolean solrCloudMode =  getCoreContainer(req, core).isZooKeeperAware();
     rsp.add( "mode", solrCloudMode ? "solrcloud" : "std");
@@ -142,7 +147,9 @@ public class SystemInfoHandler extends RequestHandlerBase
     rsp.add( "lucene", getLuceneInfo() );
     rsp.add( "jvm", getJvmInfo() );
     rsp.add( "system", getSystemInfo() );
-    rsp.setHttpCaching(false);
+    if (solrCloudMode) {
+      rsp.add("node", getCoreContainer(req, core).getZkController().getNodeName());
+    }
   }
 
   private CoreContainer getCoreContainer(SolrQueryRequest req, SolrCore core) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/17a02c10/solr/core/src/test/org/apache/solr/handler/admin/AdminHandlersProxyTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/handler/admin/AdminHandlersProxyTest.java b/solr/core/src/test/org/apache/solr/handler/admin/AdminHandlersProxyTest.java
new file mode 100644
index 0000000..93cbe3e
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/handler/admin/AdminHandlersProxyTest.java
@@ -0,0 +1,119 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.handler.admin;
+
+import java.io.IOException;
+import java.util.Collections;
+import java.util.Set;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.http.impl.client.CloseableHttpClient;
+import org.apache.lucene.util.IOUtils;
+import org.apache.solr.client.solrj.SolrRequest;
+import org.apache.solr.client.solrj.SolrServerException;
+import org.apache.solr.client.solrj.impl.CloudSolrClient;
+import org.apache.solr.client.solrj.request.GenericSolrRequest;
+import org.apache.solr.client.solrj.response.SimpleSolrResponse;
+import org.apache.solr.cloud.SolrCloudTestCase;
+import org.apache.solr.common.SolrException;
+import org.apache.solr.common.params.MapSolrParams;
+import org.apache.solr.common.util.NamedList;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class AdminHandlersProxyTest extends SolrCloudTestCase {
+  private CloseableHttpClient httpClient;
+  private CloudSolrClient solrClient;
+
+  @BeforeClass
+  public static void setupCluster() throws Exception {
+    configureCluster(2)
+        .addConfig("conf", configset("cloud-minimal"))
+        .configure();
+  }
+
+  @Before
+  @Override
+  public void setUp() throws Exception {
+    super.setUp();
+    solrClient = getCloudSolrClient(cluster);
+    solrClient.connect(1000, TimeUnit.MILLISECONDS);
+    httpClient = (CloseableHttpClient) solrClient.getHttpClient();
+  }
+
+  @After
+  @Override
+  public void tearDown() throws Exception {
+    super.tearDown();
+    IOUtils.close(solrClient, httpClient);
+  }
+
+  @Test
+  public void proxySystemInfoHandlerAllNodes() throws IOException, SolrServerException {
+    MapSolrParams params = new MapSolrParams(Collections.singletonMap("nodes", "all"));
+    GenericSolrRequest req = new GenericSolrRequest(SolrRequest.METHOD.GET, "/admin/info/system", params);
+    SimpleSolrResponse rsp = req.process(solrClient, null);
+    NamedList<Object> nl = rsp.getResponse();
+    assertEquals(3, nl.size());
+    assertTrue(nl.getName(1).endsWith("_solr"));
+    assertTrue(nl.getName(2).endsWith("_solr"));
+    assertEquals("solrcloud", ((NamedList)nl.get(nl.getName(1))).get("mode"));
+    assertEquals(nl.getName(2), ((NamedList)nl.get(nl.getName(2))).get("node"));
+  }
+
+  @Test
+  public void proxyMetricsHandlerAllNodes() throws IOException, SolrServerException {
+    MapSolrParams params = new MapSolrParams(Collections.singletonMap("nodes", "all"));
+    GenericSolrRequest req = new GenericSolrRequest(SolrRequest.METHOD.GET, "/admin/metrics", params);
+    SimpleSolrResponse rsp = req.process(solrClient, null);
+    NamedList<Object> nl = rsp.getResponse();
+    assertEquals(3, nl.size());
+    assertTrue(nl.getName(1).endsWith("_solr"));
+    assertTrue(nl.getName(2).endsWith("_solr"));
+    assertNotNull(((NamedList)nl.get(nl.getName(1))).get("metrics"));
+  }
+
+  @Test(expected = SolrException.class)
+  public void proxySystemInfoHandlerNonExistingNode() throws IOException, SolrServerException {
+    MapSolrParams params = new MapSolrParams(Collections.singletonMap("nodes", "example.com:1234_solr"));
+    GenericSolrRequest req = new GenericSolrRequest(SolrRequest.METHOD.GET, "/admin/info/system", params);
+    SimpleSolrResponse rsp = req.process(solrClient, null);
+  }
+  
+  @Test
+  public void proxySystemInfoHandlerOneNode() {
+    Set<String> nodes = solrClient.getClusterStateProvider().getLiveNodes();
+    assertEquals(2, nodes.size());
+    nodes.forEach(node -> {
+      MapSolrParams params = new MapSolrParams(Collections.singletonMap("nodes", node));
+      GenericSolrRequest req = new GenericSolrRequest(SolrRequest.METHOD.GET, "/admin/info/system", params);
+      SimpleSolrResponse rsp = null;
+      try {
+        rsp = req.process(solrClient, null);
+      } catch (Exception e) {
+        fail("Exception while proxying request to node " + node);
+      }
+      NamedList<Object> nl = rsp.getResponse();
+      assertEquals(2, nl.size());
+      assertEquals("solrcloud", ((NamedList)nl.get(nl.getName(1))).get("mode"));
+      assertEquals(nl.getName(1), ((NamedList)nl.get(nl.getName(1))).get("node"));
+    });
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/17a02c10/solr/core/src/test/org/apache/solr/handler/admin/MetricsHandlerTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/handler/admin/MetricsHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/admin/MetricsHandlerTest.java
index 392bdfc..0fe5ad7 100644
--- a/solr/core/src/test/org/apache/solr/handler/admin/MetricsHandlerTest.java
+++ b/solr/core/src/test/org/apache/solr/handler/admin/MetricsHandlerTest.java
@@ -48,7 +48,7 @@ public class MetricsHandlerTest extends SolrTestCaseJ4 {
 
   @Test
   public void test() throws Exception {
-    MetricsHandler handler = new MetricsHandler(h.getCoreContainer().getMetricManager());
+    MetricsHandler handler = new MetricsHandler(h.getCoreContainer());
 
     SolrQueryResponse resp = new SolrQueryResponse();
     handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", MetricsHandler.COMPACT_PARAM, "false", CommonParams.WT, "json"), resp);
@@ -179,7 +179,7 @@ public class MetricsHandlerTest extends SolrTestCaseJ4 {
 
   @Test
   public void testCompact() throws Exception {
-    MetricsHandler handler = new MetricsHandler(h.getCoreContainer().getMetricManager());
+    MetricsHandler handler = new MetricsHandler(h.getCoreContainer());
 
     SolrQueryResponse resp = new SolrQueryResponse();
     handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", CommonParams.WT, "json", MetricsHandler.COMPACT_PARAM, "true"), resp);
@@ -197,7 +197,7 @@ public class MetricsHandlerTest extends SolrTestCaseJ4 {
   public void testPropertyFilter() throws Exception {
     assertQ(req("*:*"), "//result[@numFound='0']");
 
-    MetricsHandler handler = new MetricsHandler(h.getCoreContainer().getMetricManager());
+    MetricsHandler handler = new MetricsHandler(h.getCoreContainer());
 
     SolrQueryResponse resp = new SolrQueryResponse();
     handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", CommonParams.WT, "json",
@@ -234,7 +234,7 @@ public class MetricsHandlerTest extends SolrTestCaseJ4 {
 
   @Test
   public void testKeyMetrics() throws Exception {
-    MetricsHandler handler = new MetricsHandler(h.getCoreContainer().getMetricManager());
+    MetricsHandler handler = new MetricsHandler(h.getCoreContainer());
 
     String key1 = "solr.core.collection1:CACHE.core.fieldCache";
     SolrQueryResponse resp = new SolrQueryResponse();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/17a02c10/solr/solr-ref-guide/src/cloud-screens.adoc
----------------------------------------------------------------------
diff --git a/solr/solr-ref-guide/src/cloud-screens.adoc b/solr/solr-ref-guide/src/cloud-screens.adoc
index 77cf9e5..34982ee 100644
--- a/solr/solr-ref-guide/src/cloud-screens.adoc
+++ b/solr/solr-ref-guide/src/cloud-screens.adoc
@@ -26,7 +26,24 @@ This screen provides status information about each collection & node in your clu
 The "Cloud" menu option is only available on Solr instances running in <<getting-started-with-solrcloud.adoc#getting-started-with-solrcloud,SolrCloud mode>>. Single node or master/slave replication instances of Solr will not display this option.
 ====
 
-Click on the Cloud option in the left-hand navigation, and a small sub-menu appears with options called "Tree", "Graph", and "Graph (Radial)". The default view ("Graph") shows a graph of each collection, the shards that make up those collections, and the addresses and type ("NRT", "TLOG" or "PULL") of each replica for each shard.
+Click on the "Cloud" option in the left-hand navigation, and a small sub-menu appears with options called "Nodes", "Tree", "Graph" and "Graph (Radial)". The sub-view selected by default is "Graph".
+
+== Nodes view
+The "Nodes" view shows a list of the hosts and nodes in the cluster along with key information for each: "CPU", "Heap", "Disk usage", "Requests", "Collections" and "Replicas".
+
+The example below shows the default "cloud" example with some documents added to the "gettingstarted" collection. Details are expanded for node on port 7574, showing more metadata and more metrics details. The screen provides links to navigate to nodes, collections and replicas. The table supports paging and filtering on host/node names and collection names.
+ 
+image::images/cloud-screens/cloud-nodes.png[image,width=900,height=415]
+
+== Tree view
+The "Tree" view shows a directory structure of the data in ZooKeeper, including cluster wide information regarding the `live_nodes` and `overseer` status, as well as collection specific information such as the `state.json`, current shard leaders, and configuration files in use. In this example, we see part of the `state.json`  definition for the "tlog" collection:
+
+image::images/cloud-screens/cloud-tree.png[image,width=487,height=250]
+
+As an aid to debugging, the data shown in the "Tree" view can be exported locally using the following command `bin/solr zk ls -r /`
+
+== Graph views
+The "Graph" view shows a graph of each collection, the shards that make up those collections, and the addresses and type ("NRT", "TLOG" or "PULL") of each replica for each shard.
 
 This example shows a simple cluster. In addition to the 2 shard, 2 replica "gettingstarted" collection, there is an additional "tlog" collection consisting of mixed TLOG and PULL replica types.
 
@@ -39,9 +56,3 @@ image::images/cloud-screens/cloud-hover.png[image,width=512,height=250]
 The "Graph (Radial)" option provides a different visual view of each node. Using the same example cluster, the radial graph view looks like:
 
 image::images/cloud-screens/cloud-radial.png[image,width=478,height=250]
-
-The "Tree" option shows a directory structure of the data in ZooKeeper, including cluster wide information regarding the `live_nodes` and `overseer` status, as well as collection specific information such as the `state.json`, current shard leaders, and configuration files in use. In this example, we see part of the `state.json`  definition for the "tlog" collection:
-
-image::images/cloud-screens/cloud-tree.png[image,width=487,height=250]
-
-As an aid to debugging, the data shown in the "Tree" view can be exported locally using the following command `bin/solr zk ls -r /`

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/17a02c10/solr/solr-ref-guide/src/images/cloud-screens/cloud-nodes.png
----------------------------------------------------------------------
diff --git a/solr/solr-ref-guide/src/images/cloud-screens/cloud-nodes.png b/solr/solr-ref-guide/src/images/cloud-screens/cloud-nodes.png
new file mode 100644
index 0000000..cecc758
Binary files /dev/null and b/solr/solr-ref-guide/src/images/cloud-screens/cloud-nodes.png differ

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/17a02c10/solr/solrj/src/java/org/apache/solr/common/params/CommonParams.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/common/params/CommonParams.java b/solr/solrj/src/java/org/apache/solr/common/params/CommonParams.java
index 054e9ee..63a763c 100644
--- a/solr/solrj/src/java/org/apache/solr/common/params/CommonParams.java
+++ b/solr/solrj/src/java/org/apache/solr/common/params/CommonParams.java
@@ -182,6 +182,7 @@ public interface CommonParams {
   String AUTHZ_PATH = "/admin/authorization";
   String AUTHC_PATH = "/admin/authentication";
   String ZK_PATH = "/admin/zookeeper";
+  String SYSTEM_INFO_PATH = "/admin/info/system";
   String METRICS_PATH = "/admin/metrics";
   String METRICS_HISTORY_PATH = "/admin/metrics/history";
   String AUTOSCALING_PATH = "/admin/autoscaling";
@@ -199,6 +200,7 @@ public interface CommonParams {
       COLLECTIONS_HANDLER_PATH,
       HEALTH_CHECK_HANDLER_PATH,
       CONFIGSETS_HANDLER_PATH,
+      SYSTEM_INFO_PATH,
       AUTHC_PATH,
       AUTHZ_PATH,
       METRICS_PATH,

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/17a02c10/solr/webapp/web/css/angular/cloud.css
----------------------------------------------------------------------
diff --git a/solr/webapp/web/css/angular/cloud.css b/solr/webapp/web/css/angular/cloud.css
index e7a16c1..c3d54a5 100644
--- a/solr/webapp/web/css/angular/cloud.css
+++ b/solr/webapp/web/css/angular/cloud.css
@@ -421,6 +421,12 @@ limitations under the License.
   padding-bottom: 15px;
 }
 
+#nodesPaging
+{
+  padding-top: 5px;
+  padding-bottom: 5px;
+}
+
 #content #cloud #legend .shard-inactive,
 #content #cloud #legend .shard-inactive li,
 #content #cloud #legend .shard-inactive li text,
@@ -449,3 +455,190 @@ limitations under the License.
 {
   fill: #007BA7;
 }
+
+/* Nodes tab */
+#nodes-table {
+  border-collapse: collapse;
+}
+
+#nodes-table td, #nodes-table th {
+  border: 1px solid #ddd;
+  padding: 8px;
+  vertical-align: top;
+}
+#nodes-table th {
+  font-weight: bolder;
+  font-stretch: extra-expanded;
+  background: #F8F8F8;
+}
+#content #cloud #nodes-content #nodes-table
+{
+  border-top: 1px solid #c0c0c0;
+  margin-top: 10px;
+  padding-top: 10px;
+}
+
+#content #cloud #nodes-content .host-name,
+#content #cloud #nodes-content .node-name a
+{
+  font-weight: bold;
+  font-size: larger;
+}
+
+#content #cloud #nodes-content a,
+#content #cloud #nodes-content a:hover,
+#content #cloud #nodes-content a.hover
+{
+  text-decoration: underline;
+  text-decoration-style: dotted;
+  text-decoration-color: #beebff;
+}
+
+#content #cloud #nodes-content a:hover,
+#content #cloud #nodes-content a.hover
+{
+  background-color: #beebff;
+}
+
+#content #cloud #nodes-content .host-spec,
+#content #cloud #nodes-content .node-spec,
+#content #cloud #nodes-content .node-spec a
+{
+  font-style: italic;
+}
+#content #cloud #nodes-content .node-uptime
+{
+  font-weight: bolder;
+  font-size: 20px;
+}
+#content #cloud #nodes-content .node-load,
+#content #cloud #nodes-content .node-cpu,
+#content #cloud #nodes-content .node-heap,
+#content #cloud #nodes-content .node-disk
+{
+  font-weight: bolder;
+  font-size: 20px;
+}
+
+#content #cloud #nodes-content .pct-normal
+{
+  color: darkgreen;
+}
+
+#content #cloud #nodes-content .pct-warn
+{
+  color: orange;
+}
+
+#content #cloud #nodes-content .pct-critical
+{
+  color: red;
+}
+
+/* Styling of reload and details buttons */
+#content #cloud #controls
+{
+  display: block;
+  height: 30px;
+}
+
+#content #cloud .reload
+{
+  background-image: url( ../../img/ico/arrow-circle.png );
+  padding-left: 21px;
+  float: left;
+}
+
+#content #cloud .reload.loader
+{
+  padding-left: 0;
+}
+
+#content #cloud .details-button 
+{
+  background-image: url(../../img/ico/ui-check-box-uncheck.png);
+  background-position: 0 50%;
+  color: #8D8D8D;
+  margin-top: 7px;
+  margin-left: 10px;
+  padding-left: 21px;
+  width: 30px;
+}
+
+#content #cloud .details-button.on
+{
+  background-image: url( ../../img/ico/ui-check-box.png );
+  color: #333;
+}
+
+#content #cloud #nodes-content .more
+{
+  font-style: italic;
+  text-underline: #0000fa;
+}
+
+/* Disk usage details d3 chart bars style */
+.chart {
+    background: #eee;
+    padding: 1px;
+}
+.chart div {
+    width:90%;
+}
+.chart div div {
+    display:inline-block;
+}
+.chart div div.rect {
+    transition: all 0.5s ease-out;
+    -moz-transition: all 0.5s ease-out;
+    -webkit-transition: all 0.5s ease-out;
+    width:0;
+    font: 10px sans-serif;
+    background-color: #4CAF50;
+    text-align: left;
+    padding: 3px;
+    margin: 2px;
+    color: #000000;
+    box-shadow: 1px 1px 1px #666;
+}
+
+#content #nodes-content .leader
+{
+  font-weight: bold;
+}
+
+#content #nodes-content .scroll-height-250
+{
+  max-height: 250px;
+  overflow-scrolling: auto;
+  overflow: auto;
+  /*overflow-y: auto;*/
+}
+
+#content #nodes-content .min-width-150
+{
+  min-width: 150px;
+}
+
+#content #cloud #nodes-content .node-cores
+{
+  min-width: 150px;
+}
+
+#content #nodes-content .core-details
+{
+  padding-left: 21px;
+}
+
+
+
+::-webkit-scrollbar {
+    -webkit-appearance: none;
+    width: 7px;
+}
+
+::-webkit-scrollbar-thumb {
+    border-radius: 4px;
+    background-color: rgba(0,0,0,.5);
+    -webkit-box-shadow: 0 0 1px rgba(255,255,255,.5);
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/17a02c10/solr/webapp/web/css/angular/menu.css
----------------------------------------------------------------------
diff --git a/solr/webapp/web/css/angular/menu.css b/solr/webapp/web/css/angular/menu.css
index 549d737..ba5e0b6 100644
--- a/solr/webapp/web/css/angular/menu.css
+++ b/solr/webapp/web/css/angular/menu.css
@@ -260,9 +260,9 @@ limitations under the License.
 
 #menu #cloud.global p a { background-image: url( ../../img/ico/network-cloud.png ); }
 #menu #cloud.global .tree a { background-image: url( ../../img/ico/folder-tree.png ); }
+#menu #cloud.global .nodes a { background-image: url( ../../img/solr-ico.png ); }
 #menu #cloud.global .graph a { background-image: url( ../../img/ico/molecule.png ); }
 #menu #cloud.global .rgraph a { background-image: url( ../../img/ico/asterisk.png ); }
-#menu #cloud.global .dump a { background-image: url( ../../img/ico/download-cloud.png ); }
 
 .sub-menu .ping.error a
 {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/17a02c10/solr/webapp/web/index.html
----------------------------------------------------------------------
diff --git a/solr/webapp/web/index.html b/solr/webapp/web/index.html
index 3b4fb62..256af89 100644
--- a/solr/webapp/web/index.html
+++ b/solr/webapp/web/index.html
@@ -150,10 +150,10 @@ limitations under the License.
 
             <li id="cloud" class="global optional" ng-show="isCloudEnabled" ng-class="{active:showingCloud}"><p><a href="#/~cloud">Cloud</a></p>
               <ul ng-show="showingCloud">
+                <li class="nodes" ng-class="{active:page=='cloud-nodes'}"><a href="#/~cloud?view=nodes">Nodes</a></li>
                 <li class="tree" ng-class="{active:page=='cloud-tree'}"><a href="#/~cloud?view=tree">Tree</a></li>
-                <li class="graph" ng-class="{active:page=='cloud-graph'}"><a href="#/~cloud">Graph</a></li>
+                <li class="graph" ng-class="{active:page=='cloud-graph'}"><a href="#/~cloud?view=graph">Graph</a></li>
                 <li class="rgraph" ng-class="{active:page=='cloud-rgraph'}"><a href="#/~cloud?view=rgraph">Graph (Radial)</a></li>
-                <!--<li class="dump" ng-class="{active:page=='cloud-dump'}"><a ng-click="dumpCloud()">Dump</a></li>-->
               </ul>
             </li>
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/17a02c10/solr/webapp/web/js/angular/controllers/cloud.js
----------------------------------------------------------------------
diff --git a/solr/webapp/web/js/angular/controllers/cloud.js b/solr/webapp/web/js/angular/controllers/cloud.js
index f1af1f7..08eea38 100644
--- a/solr/webapp/web/js/angular/controllers/cloud.js
+++ b/solr/webapp/web/js/angular/controllers/cloud.js
@@ -16,7 +16,7 @@
 */
 
 solrAdminApp.controller('CloudController',
-    function($scope, $location, Zookeeper, Constants) {
+    function($scope, $location, Zookeeper, Constants, Collections, System, Metrics) {
 
         $scope.showDebug = false;
 
@@ -26,22 +26,466 @@ solrAdminApp.controller('CloudController',
 
         $scope.closeDebug = function() {
             $scope.showDebug = false;
-        }
+        };
 
         var view = $location.search().view ? $location.search().view : "graph";
-        if (view == "tree") {
+        if (view === "tree") {
             $scope.resetMenu("cloud-tree", Constants.IS_ROOT_PAGE);
             treeSubController($scope, Zookeeper);
-        } else if (view == "rgraph") {
+        } else if (view === "rgraph") {
             $scope.resetMenu("cloud-rgraph", Constants.IS_ROOT_PAGE);
             graphSubController($scope, Zookeeper, true);
-        } else if (view == "graph") {
+        } else if (view === "graph") {
             $scope.resetMenu("cloud-graph", Constants.IS_ROOT_PAGE);
             graphSubController($scope, Zookeeper, false);
+        } else if (view === "nodes") {
+            $scope.resetMenu("cloud-nodes", Constants.IS_ROOT_PAGE);
+            nodesSubController($scope, Collections, System, Metrics);
         }
     }
 );
 
+function getOrCreateObj(name, object) {
+  if (name in object) {
+    entry = object[name];
+  } else {
+    entry = {};
+    object[name] = entry;
+  }
+  return entry;
+}
+
+function getOrCreateList(name, object) {
+  if (name in object) {
+    entry = object[name];
+  } else {
+    entry = [];
+    object[name] = entry;
+  }
+  return entry;
+}
+
+function ensureInList(string, list) {
+  if (list.indexOf(string) === -1) {
+    list.push(string);
+  }
+}
+
+/* Puts a node name into the hosts structure */
+function ensureNodeInHosts(node_name, hosts) {
+  var hostName = node_name.split(":")[0];
+  var host = getOrCreateObj(hostName, hosts);
+  var hostNodes = getOrCreateList("nodes", host);
+  ensureInList(node_name, hostNodes);
+}
+
+// from http://scratch99.com/web-development/javascript/convert-bytes-to-mb-kb/
+function bytesToSize(bytes) {
+  var sizes = ['b', 'Kb', 'Mb', 'Gb', 'Tb'];
+  if (bytes === 0) return '0b';
+  var i = parseInt(Math.floor(Math.log(bytes) / Math.log(1024)));
+  if (bytes === 0) return bytes + '' + sizes[i];
+  return (bytes / Math.pow(1024, i)).toFixed(1) + '' + sizes[i];
+}
+
+function numDocsHuman(docs) {
+  var sizes = ['', 'k', 'mn', 'bn', 'tn'];
+  if (docs === 0) return '0';
+  var i = parseInt(Math.floor(Math.log(docs) / Math.log(1000)));
+  if (i === 0) return docs + '' + sizes[i];
+  return (docs / Math.pow(1000, i)).toFixed(1) + '' + sizes[i];
+}
+
+/* Returns a style class depending on percentage */
+var styleForPct = function (pct) {
+  if (pct < 60) return "pct-normal";
+  if (pct < 80) return "pct-warn";
+  return "pct-critical"
+};
+
+function isNumeric(n) {
+  return !isNaN(parseFloat(n)) && isFinite(n);
+}
+
+var nodesSubController = function($scope, Collections, System, Metrics) {
+  $scope.pageSize = 10;
+  $scope.showNodes = true;
+  $scope.showTree = false;
+  $scope.showGraph = false;
+  $scope.showData = false;
+  $scope.showAllDetails = false;
+  $scope.showDetails = {};
+  $scope.from = 0;
+  $scope.to = $scope.pageSize - 1;
+  $scope.filterType = "node"; // Pre-initialize dropdown
+
+  $scope.toggleAllDetails = function() {
+    $scope.showAllDetails = !$scope.showAllDetails;
+    for (var node in $scope.nodes) {
+      $scope.showDetails[node] = $scope.showAllDetails;
+    }
+    for (var host in $scope.hosts) {
+      $scope.showDetails[host] = $scope.showAllDetails;
+    }
+  };
+
+  $scope.toggleDetails = function(key) {
+    $scope.showDetails[key] = !$scope.showDetails[key] === true;
+  };
+
+  $scope.toggleHostDetails = function(key) {
+    $scope.showDetails[key] = !$scope.showDetails[key] === true;
+    for (var nodeId in $scope.hosts[key].nodes) {
+      var node = $scope.hosts[key].nodes[nodeId];
+      $scope.showDetails[node] = $scope.showDetails[key];
+    }
+  };
+
+  $scope.nextPage = function() {
+    $scope.from += parseInt($scope.pageSize);
+    $scope.reload();
+  };
+
+  $scope.previousPage = function() {
+    $scope.from = Math.max(0, $scope.from - parseInt($scope.pageSize));
+    $scope.reload();
+  };
+  
+  // Checks if this node is the first (alphabetically) for a given host. Used to decide rowspan in table
+  $scope.isFirstNodeForHost = function(node) {
+    var hostName = node.split(":")[0]; 
+    var nodesInHost = $scope.filteredNodes.filter(function (node) {
+      return node.startsWith(hostName);
+    });
+    return nodesInHost[0] === node;
+  };
+  
+  // Initializes the cluster state, list of nodes, collections etc
+  $scope.initClusterState = function() {
+    var nodes = {};
+    var hosts = {};
+    var live_nodes = [];
+
+    // We build a node-centric view of the cluster state which we can easily consume to render the table
+    Collections.status(function (data) {
+      // Fetch cluster state from collections API and invert to a nodes structure
+      for (var name in data.cluster.collections) {
+        var collection = data.cluster.collections[name];
+        collection.name = name;
+        var shards = collection.shards;
+        collection.shards = [];
+        for (var shardName in shards) {
+          var shard = shards[shardName];
+          shard.name = shardName;
+          shard.collection = collection.name;
+          var replicas = shard.replicas;
+          shard.replicas = [];
+          for (var replicaName in replicas) {
+            var core = replicas[replicaName];
+            core.name = replicaName;
+            core.collection = collection.name;
+            core.shard = shard.name;
+            core.shard_state = shard.state;
+
+            var node_name = core['node_name'];
+            var node = getOrCreateObj(node_name, nodes);
+            var cores = getOrCreateList("cores", node);
+            cores.push(core);
+            node['base_url'] = core.base_url;
+            node['id'] = core.base_url.replace(/[^\w\d]/g, '');
+            node['host'] = node_name.split(":")[0];
+            var collections = getOrCreateList("collections", node);
+            ensureInList(core.collection, collections);
+            ensureNodeInHosts(node_name, hosts);
+          }
+        }
+      }
+
+      live_nodes = data.cluster.live_nodes;
+      for (n in data.cluster.live_nodes) {
+        node = data.cluster.live_nodes[n];
+        if (!(node in nodes)) {
+          var hostName = node.split(":")[0];
+          nodes[node] = {};
+          nodes[node]['host'] = hostName;
+        }
+        ensureNodeInHosts(node, hosts);
+      }
+
+      // Make sure nodes are sorted alphabetically to align with rowspan in table 
+      for (var host in hosts) {
+        hosts[host].nodes.sort();
+      }
+
+      $scope.nodes = nodes;
+      $scope.hosts = hosts;
+      $scope.live_nodes = live_nodes;
+
+      $scope.Math = window.Math;
+      $scope.reload();
+    });
+  };
+
+  $scope.filterInput = function() {
+    $scope.from = 0;
+    $scope.to = $scope.pageSize - 1;
+    $scope.reload();
+  };
+
+  /*
+    Reload will fetch data for the current page of the table and thus refresh numbers.
+    It is also called whenever a filter or paging action is executed 
+   */
+  $scope.reload = function() {
+    var nodes = $scope.nodes;
+    var node_keys = Object.keys(nodes);
+    var hosts = $scope.hosts;
+    var live_nodes = $scope.live_nodes;
+    var hostNames = Object.keys(hosts);
+    hostNames.sort();
+    var pageSize = isNumeric($scope.pageSize) ? $scope.pageSize : 10;
+
+    // Calculate what nodes that will show on this page
+    var nodesToShow = [];
+    var nodesParam;
+    var hostsToShow = [];
+    var filteredNodes;
+    var filteredHosts;
+    var isFiltered = false;
+    switch ($scope.filterType) {
+      case "node":  // Find what nodes match the node filter
+        if ($scope.nodeFilter) {
+          filteredNodes = node_keys.filter(function (node) {
+            return node.indexOf($scope.nodeFilter) !== -1;
+          });
+        }
+        break;
+
+      case "collection": // Find what collections match the collection filter and what nodes that have these collections
+        if ($scope.collectionFilter) {
+          candidateNodes = {};
+          nodesCollections = [];
+          for (var i = 0 ; i < node_keys.length ; i++) {
+            var node_name = node_keys[i];
+            var node = nodes[node_name];
+            nodeColl = {};
+            nodeColl['node'] = node_name;
+            collections = {};
+            node.cores.forEach(function(core) {
+              collections[core.collection] = true;
+            });
+            nodeColl['collections'] = Object.keys(collections);
+            nodesCollections.push(nodeColl);
+          }
+          nodesCollections.forEach(function(nc) {
+            matchingColls = nc['collections'].filter(function (collection) {
+              return collection.indexOf($scope.collectionFilter) !== -1;
+            });
+            if (matchingColls.length > 0) {
+              candidateNodes[nc.node] = true;
+            }
+          });
+          filteredNodes = Object.keys(candidateNodes);
+        }
+        break;
+
+      case "health":
+
+    }
+    
+    if (filteredNodes) {
+      // If filtering is active, calculate what hosts contain the nodes that match the filters
+      isFiltered = true;
+      filteredHosts = filteredNodes.map(function (node) {
+        return node.split(":")[0];
+      }).filter(function (item, index, self) {
+        return self.indexOf(item) === index;
+      });
+    } else {
+      filteredNodes = node_keys;
+      filteredHosts = hostNames;
+    }
+    filteredNodes.sort();
+    filteredHosts.sort();
+    
+    // Find what hosts & nodes (from the filtered set) that should be displayed on current page
+    for (var id = $scope.from ; id < $scope.from + pageSize && filteredHosts[id] ; id++) {
+      var hostName = filteredHosts[id];
+      hostsToShow.push(hostName);
+      if (isFiltered) { // Only show the nodes per host matching active filter
+        nodesToShow = nodesToShow.concat(filteredNodes.filter(function (node) {
+          return node.startsWith(hostName);
+        }));
+      } else {
+        nodesToShow = nodesToShow.concat(hosts[hostName]['nodes']);
+      }
+    }
+    nodesParam = nodesToShow.join(',');
+    $scope.nextEnabled = $scope.from + pageSize < filteredHosts.length;
+    $scope.prevEnabled = $scope.from - pageSize >= 0;
+    nodesToShow.sort();
+    hostsToShow.sort();
+
+    /*
+     Fetch system info for all selected nodes
+     Pick the data we want to display and add it to the node-centric data structure
+      */
+    System.get({"nodes": nodesParam}, function (systemResponse) {
+      for (var node in systemResponse) {
+        if (node in nodes) {
+          var s = systemResponse[node];
+          nodes[node]['system'] = s;
+          var memTotal = s.system.totalPhysicalMemorySize;
+          var memFree = s.system.freePhysicalMemorySize;
+          var memPercentage = Math.floor((memTotal - memFree) / memTotal * 100);
+          nodes[node]['memUsedPct'] = memPercentage;
+          nodes[node]['memUsedPctStyle'] = styleForPct(memPercentage);
+          nodes[node]['memTotal'] = bytesToSize(memTotal);
+          nodes[node]['memFree'] = bytesToSize(memFree);
+          nodes[node]['memUsed'] = bytesToSize(memTotal - memFree);
+
+          var heapTotal = s.jvm.memory.raw.total;
+          var heapFree = s.jvm.memory.raw.free;
+          var heapPercentage = Math.floor((heapTotal - heapFree) / heapTotal * 100);
+          nodes[node]['heapUsed'] = bytesToSize(heapTotal - heapFree);
+          nodes[node]['heapUsedPct'] = heapPercentage;
+          nodes[node]['heapUsedPctStyle'] = styleForPct(heapPercentage);
+          nodes[node]['heapTotal'] = bytesToSize(heapTotal);
+          nodes[node]['heapFree'] = bytesToSize(heapFree);
+
+          var jvmUptime = s.jvm.jmx.upTimeMS / 1000; // Seconds
+          nodes[node]['jvmUptime'] = secondsForHumans(jvmUptime);
+          nodes[node]['jvmUptimeSec'] = jvmUptime;
+
+          nodes[node]['uptime'] = s.system.uptime.replace(/.*up (.*?,.*?),.*/, "$1");
+          nodes[node]['loadAvg'] = Math.round(s.system.systemLoadAverage * 100) / 100;
+          nodes[node]['cpuPct'] = Math.ceil(s.system.processCpuLoad);
+          nodes[node]['cpuPctStyle'] = styleForPct(Math.ceil(s.system.processCpuLoad));
+          nodes[node]['maxFileDescriptorCount'] = s.system.maxFileDescriptorCount;
+          nodes[node]['openFileDescriptorCount'] = s.system.openFileDescriptorCount;
+        }
+      }
+    });
+
+    /*
+     Fetch metrics for all selected nodes. Only pull the metrics that we'll show to save bandwidth
+     Pick the data we want to display and add it to the node-centric data structure
+      */
+    Metrics.get({
+          "nodes": nodesParam,
+          "prefix": "CONTAINER.fs,org.eclipse.jetty.server.handler.DefaultHandler.get-requests,INDEX.sizeInBytes,SEARCHER.searcher.numDocs,SEARCHER.searcher.deletedDocs,SEARCHER.searcher.warmupTime"
+        },
+        function (metricsResponse) {
+          for (var node in metricsResponse) {
+            if (node in nodes) {
+              var m = metricsResponse[node];
+              nodes[node]['metrics'] = m;
+              var diskTotal = m.metrics['solr.node']['CONTAINER.fs.totalSpace'];
+              var diskFree = m.metrics['solr.node']['CONTAINER.fs.usableSpace'];
+              var diskPercentage = Math.floor((diskTotal - diskFree) / diskTotal * 100);
+              nodes[node]['diskUsedPct'] = diskPercentage;
+              nodes[node]['diskUsedPctStyle'] = styleForPct(diskPercentage);
+              nodes[node]['diskTotal'] = bytesToSize(diskTotal);
+              nodes[node]['diskFree'] = bytesToSize(diskFree);
+
+              var r = m.metrics['solr.jetty']['org.eclipse.jetty.server.handler.DefaultHandler.get-requests'];
+              nodes[node]['req'] = r.count;
+              nodes[node]['req1minRate'] = Math.floor(r['1minRate'] * 100) / 100;
+              nodes[node]['req5minRate'] = Math.floor(r['5minRate'] * 100) / 100;
+              nodes[node]['req15minRate'] = Math.floor(r['15minRate'] * 100) / 100;
+              nodes[node]['reqp75_ms'] = Math.floor(r['p75_ms']);
+              nodes[node]['reqp95_ms'] = Math.floor(r['p95_ms']);
+              nodes[node]['reqp99_ms'] = Math.floor(r['p99_ms']);
+
+              var cores = nodes[node]['cores'];
+              var indexSizeTotal = 0;
+              var docsTotal = 0;
+              var graphData = [];
+              if (cores) {
+                for (coreId in cores) {
+                  var core = cores[coreId];
+                  var keyName = "solr.core." + core['core'].replace('_', '.').replace('_', '.');
+                  var nodeMetric = m.metrics[keyName];
+                  var size = nodeMetric['INDEX.sizeInBytes'];
+                  size = (typeof size !== 'undefined') ? size : 0;
+                  core['sizeInBytes'] = size;
+                  core['size'] = bytesToSize(size);
+                  core['label'] = core['core'].replace('_shard', '_s').replace(/_replica_./, 'r');
+                  indexSizeTotal += size;
+                  var numDocs = nodeMetric['SEARCHER.searcher.numDocs'];
+                  numDocs = (typeof numDocs !== 'undefined') ? numDocs : 0;
+                  core['numDocs'] = numDocs;
+                  core['numDocsHuman'] = numDocsHuman(numDocs);
+                  core['avgSizePerDoc'] = bytesToSize(numDocs === 0 ? 0 : size / numDocs);
+                  var deletedDocs = nodeMetric['SEARCHER.searcher.deletedDocs'];
+                  deletedDocs = (typeof deletedDocs !== 'undefined') ? deletedDocs : 0;
+                  core['deletedDocs'] = deletedDocs;
+                  core['deletedDocsHuman'] = numDocsHuman(deletedDocs);
+                  var warmupTime = nodeMetric['SEARCHER.searcher.warmupTime'];
+                  warmupTime = (typeof warmupTime !== 'undefined') ? warmupTime : 0;
+                  core['warmupTime'] = warmupTime;
+                  docsTotal += core['numDocs'];
+                }
+                for (coreId in cores) {
+                  core = cores[coreId];
+                  var graphObj = {};
+                  graphObj['label'] = core['label'];
+                  graphObj['size'] = core['sizeInBytes'];
+                  graphObj['sizeHuman'] = core['size'];
+                  graphObj['pct'] = (core['sizeInBytes'] / indexSizeTotal) * 100;
+                  graphData.push(graphObj);
+                }
+                cores.sort(function (a, b) {
+                  return b.sizeInBytes - a.sizeInBytes
+                });
+              } else {
+                cores = {};
+              }
+              graphData.sort(function (a, b) {
+                return b.size - a.size
+              });
+              nodes[node]['graphData'] = graphData;
+              nodes[node]['numDocs'] = numDocsHuman(docsTotal);
+              nodes[node]['sizeInBytes'] = indexSizeTotal;
+              nodes[node]['size'] = bytesToSize(indexSizeTotal);
+              nodes[node]['sizePerDoc'] = docsTotal === 0 ? '0b' : bytesToSize(indexSizeTotal / docsTotal);
+
+              // Build the d3 powered bar chart
+              $('#chart' + nodes[node]['id']).empty();
+              var chart = d3.select('#chart' + nodes[node]['id']).append('div').attr('class', 'chart');
+
+              // Add one div per bar which will group together both labels and bars
+              var g = chart.selectAll('div')
+                  .data(nodes[node]['graphData']).enter()
+                  .append('div');
+
+              // Add the bars
+              var bars = g.append("div")
+                  .attr("class", "rect")
+                  .text(function (d) {
+                    return d.label + ':\u00A0\u00A0' + d.sizeHuman;
+                  });
+
+              // Execute the transition to show the bars
+              bars.transition()
+                  .ease('elastic')
+                  .style('width', function (d) {
+                    return d.pct + '%';
+                  });
+            }
+          }
+        });
+    $scope.nodes = nodes;
+    $scope.hosts = hosts;
+    $scope.live_nodes = live_nodes;
+    $scope.nodesToShow = nodesToShow;
+    $scope.hostsToShow = hostsToShow;
+    $scope.filteredNodes = filteredNodes;
+    $scope.filteredHosts = filteredHosts;
+  };
+  $scope.initClusterState();
+};
+
 var treeSubController = function($scope, Zookeeper) {
     $scope.showTree = true;
     $scope.showGraph = false;
@@ -78,6 +522,28 @@ var treeSubController = function($scope, Zookeeper) {
     $scope.initTree();
 };
 
+/**
+ * Translates seconds into human readable format of seconds, minutes, hours, days, and years
+ * 
+ * @param  {number} seconds The number of seconds to be processed
+ * @return {string}         The phrase describing the the amount of time
+ */
+function secondsForHumans ( seconds ) {
+    var levels = [
+        [Math.floor(seconds / 31536000), 'y'],
+        [Math.floor((seconds % 31536000) / 86400), 'd'],
+        [Math.floor(((seconds % 31536000) % 86400) / 3600), 'h'],
+        [Math.floor((((seconds % 31536000) % 86400) % 3600) / 60), 'm']
+    ];
+    var returntext = '';
+
+    for (var i = 0, max = levels.length; i < max; i++) {
+        if ( levels[i][0] === 0 ) continue;
+        returntext += ' ' + levels[i][0] + levels[i][1];
+    }
+    return returntext.trim() === '' ? '0m' : returntext.trim();
+}
+
 var graphSubController = function ($scope, Zookeeper, isRadial) {
     $scope.showTree = false;
     $scope.showGraph = true;
@@ -101,17 +567,17 @@ var graphSubController = function ($scope, Zookeeper, isRadial) {
     $scope.next = function() {
         $scope.pos += $scope.rows;
         $scope.initGraph();
-    }
+    };
 
     $scope.previous = function() {
         $scope.pos = Math.max(0, $scope.pos - $scope.rows);
         $scope.initGraph();
-    }
+    };
 
     $scope.resetGraph = function() {
         $scope.pos = 0;
         $scope.initGraph();
-    }
+    };
 
     $scope.initGraph = function() {
         Zookeeper.liveNodes(function (data) {
@@ -514,4 +980,4 @@ solrAdminApp.directive('graph', function(Constants) {
             }
         }
     };
-})
+});

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/17a02c10/solr/webapp/web/js/angular/services.js
----------------------------------------------------------------------
diff --git a/solr/webapp/web/js/angular/services.js b/solr/webapp/web/js/angular/services.js
index 4d89347..66f2654 100644
--- a/solr/webapp/web/js/angular/services.js
+++ b/solr/webapp/web/js/angular/services.js
@@ -19,8 +19,12 @@ var solrAdminServices = angular.module('solrAdminServices', ['ngResource']);
 
 solrAdminServices.factory('System',
   ['$resource', function($resource) {
-    return $resource('admin/info/system', {"wt":"json", "_":Date.now()});
+    return $resource('admin/info/system', {"wt":"json", "nodes": "@nodes", "_":Date.now()});
   }])
+.factory('Metrics',
+    ['$resource', function($resource) {
+      return $resource('admin/metrics', {"wt":"json", "nodes": "@nodes", "prefix":"@prefix", "_":Date.now()});
+    }])
 .factory('Collections',
   ['$resource', function($resource) {
     return $resource('admin/collections',
@@ -63,7 +67,6 @@ solrAdminServices.factory('System',
   ['$resource', function($resource) {
     return $resource('admin/zookeeper', {wt:'json', _:Date.now()}, {
       "simple": {},
-      "dump": {params: {dump: "true"}},
       "liveNodes": {params: {path: '/live_nodes'}},
       "clusterState": {params: {detail: "true", path: "/clusterstate.json"}},
       "detail": {params: {detail: "true", path: "@path"}},

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/17a02c10/solr/webapp/web/partials/cloud.html
----------------------------------------------------------------------
diff --git a/solr/webapp/web/partials/cloud.html b/solr/webapp/web/partials/cloud.html
index c13ccc6..078c9af 100644
--- a/solr/webapp/web/partials/cloud.html
+++ b/solr/webapp/web/partials/cloud.html
@@ -48,6 +48,138 @@ limitations under the License.
 
     </div>
 
+    <div id="nodes-content" class="content clearfix" ng-show="showNodes">
+      <div id="controls">
+        <a class="reload" ng-click="initClusterState()"><span>Refresh</span></a>
+        <a class="details-button" ng-click="toggleAllDetails()" ng-class="{on:showAllDetails}">
+          <span>Show all details</span>
+        </a>
+      </div>
+      <div>
+        <div id="nodesPaging">
+         <button ng-show="prevEnabled" ng-click="previousPage()" id="nodesPagingPrev">&lt; Previous</button>
+         <span ng-show="filteredHosts.length==0">No hosts found.</span>
+         <span ng-hide="filteredHosts.length==0">Hosts {{from+1}} - {{from + hostsToShow.length}} of {{filteredHosts.length}}.</span>&nbsp;
+
+         Filter by:&nbsp;<select ng-model="filterType" ng-change="filterInput()" id="nodesPagingFilterType">
+           <option value="node">Host/node name</option>
+           <option value="collection">Collection name</option>
+         </select>&nbsp;
+
+         <span ng-show="filterType=='node'">
+           <input ng-model="nodeFilter" type="text" size="10" name="nodefilter" ng-change="filterInput()" ng-model-options='{ debounce: 500 }' />&nbsp;
+         </span>
+          <span ng-show="filterType=='collection'">
+            <input ng-model="collectionFilter" type="text" size="10" name="collectionfilter" ng-change="filterInput()" ng-model-options='{ debounce: 500 }'/>&nbsp;
+          </span>
+         Show <input ng-model="pageSize" ng-change="filterInput()" type="text" size="2" name="rows" ng-model-options='{ debounce: 500 }'/> hosts per page.
+         <button ng-show="nextEnabled" ng-click="nextPage()">Next &gt;</button>
+        </div>
+      </div>
+      <table id="nodes-table">
+        <thead>
+          <tr>
+              <th>Host</th>
+              <th>Node</th>
+              <th>CPU</th>
+              <th>Heap</th>
+              <th>Disk usage</th>
+              <th>Requests</th>
+              <th>Collections</th>
+              <th>Replicas</th>
+          </tr>
+        </thead>
+        <tbody>
+          <tr ng-repeat="key in nodesToShow | orderBy:'key.order'" ng-init="n = nodes[key]">
+              <td rowspan="{{hosts[n.host].nodes.length}}" ng-show="isFirstNodeForHost(key)">
+                <div class="host-name">{{n.host}}</div>
+                <span class="host-spec" ng-show="!showDetails[n.host]">
+                  <span title="{{n.system.system.uptime}}">{{n.system.system.name}}</span> 
+                  <span title="free: {{n.memFree}}">{{n.memTotal}}</span>
+                  <span title="{{n.system.jvm.name}} {{n.system.jvm.version}}">Java {{n.system.jvm.spec.version}}</span>
+                  <br/>Load: {{n.loadAvg}}
+                </span>
+                <div class="host-spec" ng-show="showDetails[n.host]">
+                  {{n.system.system.name}} {{n.system.system.version}}, {{n.system.system.availableProcessors}}cpu<br/>
+                  Uptime: {{n.uptime}}<br/>
+                  <span title="Used: {{n.memUsed}} - includes OS file-cache, and it is normal for it to approach 100%">Memory: {{n.memTotal}}</span><br/> 
+                  File descriptors: {{n.openFileDescriptorCount}}/{{n.maxFileDescriptorCount}}<br/>
+                  Disk: <span class="{{n.diskUsedPctStyle}}" title="Nodes may use other disks too">{{n.diskTotal}} used: {{n.diskUsedPct}}%</span><br/>
+                  Load: {{n.loadAvg}}
+                </div>
+                <div class="node-spec" ng-click="toggleHostDetails(n.host)">
+                  <a ng-show="showDetails[n.host]">hide details...</a>
+                  <a ng-show="!showDetails[n.host]">show details...</a>
+                </div>
+              </td>
+              <td><div class="node-name"><a href="{{n.base_url}}">{{key.replace(n.host+':', '')}}</a></div>
+                Uptime: {{n.jvmUptime}}<br/>
+                <div class="node-spec" ng-show="showDetails[key]">
+                  Java <span title="{{n.system.jvm.jre.vendor}}">{{n.system.jvm.jre.version}}</span><br/>
+                  Solr {{n.system.lucene['solr-impl-version'].split(" ")[0]}}<br>
+                </div>
+                <div class="node-spec" ng-click="toggleDetails(key)">
+                  <a ng-show="showDetails[key]">hide details...</a>
+                  <a ng-show="!showDetails[key]">show details...</a>
+                </div>
+              </td>
+              <td>
+                <div class="node-cpu">
+                  <span class="{{n.cpuPctStyle}}">{{n.cpuPct}}%</span>
+                </div>
+              </td>
+              <td>
+                <div class="node-heap" title="total: {{n.heapTotal}} free: {{n.heapFree}} used%: {{n.heapUsedPct}}%">
+                  <span class="{{n.heapUsedPctStyle}}">{{n.heapUsedPct}}%</span>
+                </div>
+                <div class="node-spec" ng-show="showDetails[key]">
+                  Max: {{n.heapTotal}}<br/>
+                  Used: {{n.heapUsed}}
+                </div>
+              </td>
+              <td class="scroll-height-250">
+                <div>
+                  <div class="node-disk" title="Available disk: {{n.diskTotal}} free: {{n.diskFree}} used by this node: {{n.size}}">
+                    {{n.size}}
+                  </div>
+                  <div class="node-spec" ng-show="showDetails[key]">
+                    Total #docs: {{n.numDocs}}<br/>
+                    Avg size/doc: {{n.sizePerDoc}}
+                  </div>
+                  <div id="chart{{n.id}}" ng-show="showDetails[key]"></div>
+                </div>
+              </td>
+              <td><div class="node-requests" title="1minRate: {{n.req1minRate}} 5minRate: {{n.req5minRate}} 15minRate: {{n.req15minRate}} p75: {{n.reqp75_ms}} p99: {{n.reqp99_ms}}">
+                RPM: {{n.req15minRate}}<br/>p95: {{n.reqp95_ms}}ms</div>
+              </td>
+              <td>
+                <div ng-show="!n.collections">(none)</div>
+                <div ng-repeat="c in n.collections | limitTo:showDetails[key]?999:2 track by $index">
+                  <a href="{{n.base_url + '/#/~collections/' + c}}">{{ c }}</a>
+                </div>
+                <div class="more" ng-show="n.collections.length > 2 && !showDetails[key]">
+                  <a ng-click="toggleDetails(key)">({{n.collections.length - 2}} more...)</a>
+                </div>
+              </td>
+              <td class="scroll-height-250">
+                  <div ng-show="!n.cores">(none)</div>
+                  <div ng-repeat="core in n.cores | limitTo:showDetails[key]?999:2 track by $index">
+                    <a class="{{core.leader ? 'leader' : 'replica'}}" href="{{core.base_url + '/#/' + core.core}}">{{ core.label }}</a> ({{core.numDocsHuman}} docs)
+                    <ul class="core-details" ng-show="showDetails[key]">
+                      <li>deleted: {{core.deletedDocsHuman}}</li>
+                      <li>warmupTime: {{core.warmupTime}}</li>
+                      <li ng-show="core.numDocs > 0">avg size/doc: {{core.avgSizePerDoc}}</li>
+                    </ul>
+                  </div>
+                  <div class="more" ng-show="n.cores.length > 2 && !showDetails[key]">
+                    <a ng-click="toggleDetails(key)">({{n.cores.length - 2}} more...)</a>
+                  </div>
+              </td>
+          </tr>
+        </tbody>
+      </table>
+    </div>
+    
     <div graph data="graphData" leaf-count="leafCount" helper-data="helperData" is-radial="isRadial" id="graph-content" class="content clearfix" ng-show="showGraph">
 
       <div id="canvas"></div>


[32/48] lucene-solr:jira/http2: SOLR-12594: MetricsHistoryHandler.getOverseerLeader fails when hostname contains hyphen.

Posted by da...@apache.org.
SOLR-12594: MetricsHistoryHandler.getOverseerLeader fails when hostname contains hyphen.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/d1173b8a
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/d1173b8a
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/d1173b8a

Branch: refs/heads/jira/http2
Commit: d1173b8adc2aaf88582c84e964e2c35c783e0ca8
Parents: 5de10c7
Author: Andrzej Bialecki <ab...@apache.org>
Authored: Thu Aug 2 18:47:58 2018 +0200
Committer: Andrzej Bialecki <ab...@apache.org>
Committed: Thu Aug 2 18:47:58 2018 +0200

----------------------------------------------------------------------
 solr/CHANGES.txt                                         |  2 ++
 .../apache/solr/handler/admin/MetricsHistoryHandler.java | 11 +++++++----
 .../solr/metrics/reporters/solr/SolrClusterReporter.java | 11 +++++++----
 .../apache/solr/metrics/rrd/SolrRrdBackendFactory.java   |  9 +++++++--
 .../solr/handler/admin/MetricsHistoryHandlerTest.java    |  4 +---
 5 files changed, 24 insertions(+), 13 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d1173b8a/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 1b45436..e8ee4f4 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -198,6 +198,8 @@ Bug Fixes
 
 * SOLR-12344: SolrSlf4jReporter doesn't set MDC context. (ab)
 
+* SOLR-12594: MetricsHistoryHandler.getOverseerLeader fails when hostname contains hyphen. (ab)
+
 Optimizations
 ----------------------
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d1173b8a/solr/core/src/java/org/apache/solr/handler/admin/MetricsHistoryHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/MetricsHistoryHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/MetricsHistoryHandler.java
index 419f021..9a46d04 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/MetricsHistoryHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/MetricsHistoryHandler.java
@@ -62,6 +62,7 @@ import org.apache.solr.client.solrj.cloud.SolrCloudManager;
 import org.apache.solr.client.solrj.cloud.autoscaling.ReplicaInfo;
 import org.apache.solr.client.solrj.cloud.autoscaling.VersionedData;
 import org.apache.solr.client.solrj.impl.HttpClientUtil;
+import org.apache.solr.cloud.LeaderElector;
 import org.apache.solr.cloud.Overseer;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.cloud.ClusterState;
@@ -332,12 +333,14 @@ public class MetricsHistoryHandler extends RequestHandlerBase implements Permiss
     if (oid == null) {
       return null;
     }
-    String[] ids = oid.split("-");
-    if (ids.length != 3) { // unknown format
-      log.warn("Unknown format of leader id, skipping: " + oid);
+    String nodeName = null;
+    try {
+      nodeName = LeaderElector.getNodeName(oid);
+    } catch (Exception e) {
+      log.warn("Unknown format of leader id, skipping: " + oid, e);
       return null;
     }
-    return ids[1];
+    return nodeName;
   }
 
   private boolean amIOverseerLeader() {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d1173b8a/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrClusterReporter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrClusterReporter.java b/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrClusterReporter.java
index 9e8861e..17390e1 100644
--- a/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrClusterReporter.java
+++ b/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrClusterReporter.java
@@ -28,6 +28,7 @@ import java.util.concurrent.TimeUnit;
 import java.util.function.Supplier;
 
 import org.apache.http.client.HttpClient;
+import org.apache.solr.cloud.LeaderElector;
 import org.apache.solr.cloud.Overseer;
 import org.apache.solr.cloud.ZkController;
 import org.apache.solr.common.cloud.SolrZkClient;
@@ -270,13 +271,15 @@ public class SolrClusterReporter extends SolrCoreContainerReporter {
       if (oid == null) {
         return lastKnownUrl;
       }
-      String[] ids = oid.split("-");
-      if (ids.length != 3) { // unknown format
-        log.warn("Unknown format of leader id, skipping: " + oid);
+      String nodeName = null;
+      try {
+        nodeName = LeaderElector.getNodeName(oid);
+      } catch (Exception e) {
+        log.warn("Unknown format of leader id, skipping: " + oid, e);
         return lastKnownUrl;
       }
       // convert nodeName back to URL
-      String url = zk.getZkStateReader().getBaseUrlForNodeName(ids[1]);
+      String url = zk.getZkStateReader().getBaseUrlForNodeName(nodeName);
       // check that it's parseable
       try {
         new java.net.URL(url);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d1173b8a/solr/core/src/java/org/apache/solr/metrics/rrd/SolrRrdBackendFactory.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/metrics/rrd/SolrRrdBackendFactory.java b/solr/core/src/java/org/apache/solr/metrics/rrd/SolrRrdBackendFactory.java
index a3c6f64..5448d8f 100644
--- a/solr/core/src/java/org/apache/solr/metrics/rrd/SolrRrdBackendFactory.java
+++ b/solr/core/src/java/org/apache/solr/metrics/rrd/SolrRrdBackendFactory.java
@@ -206,7 +206,8 @@ public class SolrRrdBackendFactory extends RrdBackendFactory implements SolrClos
         return null;
       }
       if (o instanceof byte[]) {
-        Long time = (Long)doc.getFieldValue("timestamp_l");
+        Object timeObj = doc.getFieldValue("timestamp_l");
+        Long time = timeObj instanceof Number ? ((Number)timeObj).longValue() : Long.parseLong(String.valueOf(timeObj));
         return new SolrRrdBackend.SyncData((byte[])o, time);
       } else {
         throw new SolrServerException("Unexpected value of '" + DATA_FIELD + "' field: " + o.getClass().getName() + ": " + o);
@@ -248,7 +249,11 @@ public class SolrRrdBackendFactory extends RrdBackendFactory implements SolrClos
         SolrDocumentList docs = rsp.getResults();
         if (docs != null) {
           docs.forEach(d -> {
-            Long time = (Long)d.getFieldValue("timestamp_l");
+            Object o = d.getFieldValue("timestamp_l");
+            if (o == null) {
+              return;
+            }
+            Long time = o instanceof Number ? ((Number)o).longValue() : Long.parseLong(String.valueOf(o));
             Pair<String, Long> p = new Pair<>(((String)d.getFieldValue("id")).substring(idPrefixLength), time);
             byName.put(p.first(), p);
           });

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d1173b8a/solr/core/src/test/org/apache/solr/handler/admin/MetricsHistoryHandlerTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/handler/admin/MetricsHistoryHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/admin/MetricsHistoryHandlerTest.java
index dfdecc2..735b427 100644
--- a/solr/core/src/test/org/apache/solr/handler/admin/MetricsHistoryHandlerTest.java
+++ b/solr/core/src/test/org/apache/solr/handler/admin/MetricsHistoryHandlerTest.java
@@ -21,7 +21,6 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.lucene.util.LuceneTestCase;
 import org.apache.solr.client.solrj.SolrClient;
 import org.apache.solr.client.solrj.cloud.SolrCloudManager;
 import org.apache.solr.client.solrj.cloud.autoscaling.AutoScalingConfig;
@@ -45,7 +44,6 @@ import org.rrd4j.core.RrdDb;
  *
  */
 @LogLevel("org.apache.solr.cloud=DEBUG")
-@LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // added 20-Jul-2018
 public class MetricsHistoryHandlerTest extends SolrCloudTestCase {
 
   private static SolrCloudManager cloudManager;
@@ -60,7 +58,7 @@ public class MetricsHistoryHandlerTest extends SolrCloudTestCase {
 
   @BeforeClass
   public static void beforeClass() throws Exception {
-    simulated = random().nextBoolean() || true;
+    simulated = random().nextBoolean();
     Map<String, Object> args = new HashMap<>();
     args.put(MetricsHistoryHandler.SYNC_PERIOD_PROP, 1);
     args.put(MetricsHistoryHandler.COLLECT_PERIOD_PROP, 1);


[15/48] lucene-solr:jira/http2: SOLR-12412: Fix precommit

Posted by da...@apache.org.
SOLR-12412: Fix precommit


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/0dc124aa
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/0dc124aa
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/0dc124aa

Branch: refs/heads/jira/http2
Commit: 0dc124aa78e2a1c121a9634e69f84c8b1f6be331
Parents: eada799
Author: Cao Manh Dat <da...@apache.org>
Authored: Wed Aug 1 07:14:41 2018 +0700
Committer: Cao Manh Dat <da...@apache.org>
Committed: Wed Aug 1 07:14:41 2018 +0700

----------------------------------------------------------------------
 solr/core/src/test/org/apache/solr/cloud/LeaderTragicEventTest.java | 1 -
 1 file changed, 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0dc124aa/solr/core/src/test/org/apache/solr/cloud/LeaderTragicEventTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/LeaderTragicEventTest.java b/solr/core/src/test/org/apache/solr/cloud/LeaderTragicEventTest.java
index 83bd3c3..604ec45 100644
--- a/solr/core/src/test/org/apache/solr/cloud/LeaderTragicEventTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/LeaderTragicEventTest.java
@@ -38,7 +38,6 @@ import org.apache.solr.common.cloud.ClusterStateUtil;
 import org.apache.solr.common.cloud.DocCollection;
 import org.apache.solr.common.cloud.Replica;
 import org.apache.solr.common.cloud.Slice;
-import org.apache.solr.common.util.ObjectReleaseTracker;
 import org.apache.solr.core.CoreContainer;
 import org.apache.solr.core.DirectoryFactory;
 import org.apache.solr.core.MockDirectoryFactory;


[43/48] lucene-solr:jira/http2: SOLR-12622: Ref guide example for configuring SolrSlf4jReporter

Posted by da...@apache.org.
SOLR-12622: Ref guide example for configuring SolrSlf4jReporter


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/b33df4ec
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/b33df4ec
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/b33df4ec

Branch: refs/heads/jira/http2
Commit: b33df4ecff2c138dd248b945833e2e59e4aa0424
Parents: 664187f
Author: Varun Thacker <va...@apache.org>
Authored: Sat Aug 4 11:13:14 2018 -0700
Committer: Varun Thacker <va...@apache.org>
Committed: Sat Aug 4 11:13:22 2018 -0700

----------------------------------------------------------------------
 solr/solr-ref-guide/src/metrics-reporting.adoc | 54 ++++++++++++++++++---
 1 file changed, 47 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/b33df4ec/solr/solr-ref-guide/src/metrics-reporting.adoc
----------------------------------------------------------------------
diff --git a/solr/solr-ref-guide/src/metrics-reporting.adoc b/solr/solr-ref-guide/src/metrics-reporting.adoc
index 631b5a1..ede9764 100644
--- a/solr/solr-ref-guide/src/metrics-reporting.adoc
+++ b/solr/solr-ref-guide/src/metrics-reporting.adoc
@@ -161,13 +161,19 @@ Reporter configurations are specified in `solr.xml` file in `<metrics><reporter>
     <int name="port">9999</int>
     <int name="period">60</int>
   </reporter>
-  <reporter name="collection1Updates" registry="solr.core.collection1" class="org.apache.solr.metrics.reporters.SolrSlf4jReporter">
-    <int name="period">300</int>
-    <str name="prefix">example</str>
-    <str name="logger">updatesLogger</str>
-    <str name="filter">QUERYHANDLER./update</str>
-  </reporter>
- </metrics>
+  <metrics>
+    <reporter name="log_metrics" group="core" class="org.apache.solr.metrics.reporters.SolrSlf4jReporter">
+      <int name="period">60</int>
+      <str name="filter">QUERY./select.requestTimes</str>
+      <str name="filter">QUERY./get.requestTimes</str>
+      <str name="filter">UPDATE./update.requestTimes</str>
+      <str name="filter">UPDATE./update.clientErrors</str>
+      <str name="filter">UPDATE./update.errors</str>
+      <str name="filter">SEARCHER.new.time</str>
+      <str name="filter">SEARCHER.new.warmup</str>
+      <str name="logger">org.apache.solr.metrics.reporters.SolrSlf4jReporter</str>
+    </reporter>
+  </metrics>
 ...
 </solr>
 ----
@@ -247,6 +253,40 @@ It takes the following arguments, in addition to the common arguments <<Reporter
 The name of the logger to use. Default is empty, in which case the group (or the initial part of the registry name that identifies a metrics group) will be used if specified in the plugin configuration.
 
 Users can specify logger name (and the corresponding logger configuration in e.g., Log4j configuration) to output metrics-related logging to separate file(s), which can then be processed by external applications.
+Here is an example for configuring the default log4j2.xml which ships in solr. This can be used in conjunction with the solr.xml example provided earlier in this page to configure the SolrSlf4jReporter
+
+[source,text]
+----
+<Configuration>
+  <Appenders>
+  ...
+    <RollingFile
+        name="MetricsFile"
+        fileName="${sys:solr.log.dir}/solr_metrics.log"
+        filePattern="${sys:solr.log.dir}/solr_metrics.log.%i" >
+      <PatternLayout>
+        <Pattern>
+          %d{yyyy-MM-dd HH:mm:ss.SSS} %-5p (%t) [%X{node_name} %X{collection} %X{shard} %X{replica} %X{core}] %m%n
+        </Pattern>
+      </PatternLayout>
+      <Policies>
+        <OnStartupTriggeringPolicy />
+        <SizeBasedTriggeringPolicy size="32 MB"/>
+      </Policies>
+      <DefaultRolloverStrategy max="10"/>
+    </RollingFile>
+  ...
+  </Appenders>
+
+  <Loggers>
+  ...
+    <Logger name="org.apache.solr.metrics.reporters.SolrSlf4jReporter" level="info" additivity="false">
+      <AppenderRef ref="MetricsFile"/>
+    </Logger>
+  ...
+  </Loggers>
+</Configuration>
+----
 
 Each log line produced by this reporter consists of configuration-specific fields, and a message that follows this format:
 


[27/48] lucene-solr:jira/http2: LUCENE-8435: Add new LatLonShapePolygonQuery for querying indexed LatLonShape fields by arbitrary polygons

Posted by da...@apache.org.
LUCENE-8435: Add new LatLonShapePolygonQuery for querying indexed LatLonShape fields by arbitrary polygons


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/18c2300f
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/18c2300f
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/18c2300f

Branch: refs/heads/jira/http2
Commit: 18c2300fd61c369b87ce01b6201b95a53f89e115
Parents: 679b4aa
Author: Nicholas Knize <nk...@gmail.com>
Authored: Sat Jul 28 12:55:35 2018 -0500
Committer: Nicholas Knize <nk...@gmail.com>
Committed: Wed Aug 1 12:53:36 2018 -0500

----------------------------------------------------------------------
 lucene/CHANGES.txt                              |   2 +
 .../src/java/org/apache/lucene/geo/Polygon.java |  30 ++
 .../java/org/apache/lucene/geo/Polygon2D.java   | 147 ++++++-
 .../org/apache/lucene/geo/TestPolygon2D.java    |  43 ++
 .../org/apache/lucene/document/LatLonShape.java |   4 +
 .../document/LatLonShapePolygonQuery.java       | 271 +++++++++++++
 .../document/TestLatLonPolygonShapeQueries.java | 393 +++++++++++++++++++
 .../lucene/document/TestLatLonShapeQueries.java | 276 -------------
 8 files changed, 886 insertions(+), 280 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/18c2300f/lucene/CHANGES.txt
----------------------------------------------------------------------
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index b76cc6f..9b9bcc8 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -213,6 +213,8 @@ Changes in Runtime Behavior:
 
 Improvements
 
+* LUCENE-8435: Add new LatLonShapePolygonQuery for querying indexed LatLonShape fields by arbitrary polygons (Nick Knize)
+
 * LUCENE-8367: Make per-dimension drill down optional for each facet dimension (Mike McCandless)
 
 * LUCENE-8396: Add Points Based Shape Indexing and Search that decomposes shapes

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/18c2300f/lucene/core/src/java/org/apache/lucene/geo/Polygon.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/geo/Polygon.java b/lucene/core/src/java/org/apache/lucene/geo/Polygon.java
index 39ba9b7..5e14286 100644
--- a/lucene/core/src/java/org/apache/lucene/geo/Polygon.java
+++ b/lucene/core/src/java/org/apache/lucene/geo/Polygon.java
@@ -202,6 +202,36 @@ public final class Polygon {
     return sb.toString();
   }
 
+  private String verticesToGeoJSON(final double[] lats, final double[] lons) {
+    StringBuilder sb = new StringBuilder();
+    sb.append('[');
+    for (int i = 0; i < lats.length; i++) {
+      sb.append("[")
+          .append(lons[i])
+          .append(", ")
+          .append(lats[i])
+          .append("]");
+      if (i != lats.length - 1) {
+        sb.append(", ");
+      }
+    }
+    sb.append(']');
+    return sb.toString();
+  }
+
+  /** prints polygons as geojson */
+  public String toGeoJSON() {
+    StringBuilder sb = new StringBuilder();
+    sb.append("[");
+    sb.append(verticesToGeoJSON(polyLats, polyLons));
+    for (Polygon hole : holes) {
+      sb.append(",");
+      sb.append(verticesToGeoJSON(hole.polyLats, hole.polyLons));
+    }
+    sb.append("]");
+    return sb.toString();
+  }
+
   /** Parses a standard GeoJSON polygon string.  The type of the incoming GeoJSON object must be a Polygon or MultiPolygon, optionally
    *  embedded under a "type: Feature".  A Polygon will return as a length 1 array, while a MultiPolygon will be 1 or more in length.
    *

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/18c2300f/lucene/core/src/java/org/apache/lucene/geo/Polygon2D.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/geo/Polygon2D.java b/lucene/core/src/java/org/apache/lucene/geo/Polygon2D.java
index 3feb012..64a3784 100644
--- a/lucene/core/src/java/org/apache/lucene/geo/Polygon2D.java
+++ b/lucene/core/src/java/org/apache/lucene/geo/Polygon2D.java
@@ -19,7 +19,6 @@ package org.apache.lucene.geo;
 import java.util.Arrays;
 import java.util.Comparator;
 
-import org.apache.lucene.geo.Polygon;
 import org.apache.lucene.index.PointValues.Relation;
 import org.apache.lucene.util.ArrayUtil;
 
@@ -123,7 +122,35 @@ public final class Polygon2D {
     
     return false;
   }
-  
+
+  /** Returns relation to the provided triangle */
+  public Relation relateTriangle(double ax, double ay, double bx, double by, double cx, double cy) {
+    // compute bounding box of triangle
+    double minLat = StrictMath.min(StrictMath.min(ay, by), cy);
+    double minLon = StrictMath.min(StrictMath.min(ax, bx), cx);
+    double maxLat = StrictMath.max(StrictMath.max(ay, by), cy);
+    double maxLon = StrictMath.max(StrictMath.max(ax, bx), cx);
+    if (minLat <= maxY && minLon <= maxX) {
+      Relation relation = componentRelateTriangle(ax, ay, bx, by, cx, cy);
+      if (relation != Relation.CELL_OUTSIDE_QUERY) {
+        return relation;
+      }
+      if (left != null) {
+        relation = left.relateTriangle(ax, ay, bx, by, cx, cy);
+        if (relation != Relation.CELL_OUTSIDE_QUERY) {
+          return relation;
+        }
+      }
+      if (right != null && ((splitX == false && maxLat >= this.minLat) || (splitX && maxLon >= this.minLon))) {
+        relation = right.relateTriangle(ax, ay, bx, by, cx, cy);
+        if (relation != Relation.CELL_OUTSIDE_QUERY) {
+          return relation;
+        }
+      }
+    }
+    return Relation.CELL_OUTSIDE_QUERY;
+  }
+
   /** Returns relation to the provided rectangle */
   public Relation relate(double minLat, double maxLat, double minLon, double maxLon) {
     if (minLat <= maxY && minLon <= maxX) {
@@ -147,6 +174,42 @@ public final class Polygon2D {
     return Relation.CELL_OUTSIDE_QUERY;
   }
 
+  private Relation componentRelateTriangle(double ax, double ay, double bx, double by, double cx, double cy) {
+    // compute bounding box of triangle
+    double minLat = StrictMath.min(StrictMath.min(ay, by), cy);
+    double minLon = StrictMath.min(StrictMath.min(ax, bx), cx);
+    double maxLat = StrictMath.max(StrictMath.max(ay, by), cy);
+    double maxLon = StrictMath.max(StrictMath.max(ax, bx), cx);
+    if (maxLon < this.minLon || minLon > this.maxLon || maxLat < this.minLat || minLat > this.maxLat) {
+      return Relation.CELL_OUTSIDE_QUERY;
+    }
+    // check any holes
+    if (holes != null) {
+      Relation holeRelation = holes.relateTriangle(ax, ay, bx, by, cx, cy);
+      if (holeRelation == Relation.CELL_CROSSES_QUERY) {
+        return Relation.CELL_CROSSES_QUERY;
+      } else if (holeRelation == Relation.CELL_INSIDE_QUERY) {
+        return Relation.CELL_OUTSIDE_QUERY;
+      }
+    }
+    // check each corner: if < 3 are present, its cheaper than crossesSlowly
+    int numCorners = numberOfTriangleCorners(ax, ay, bx, by, cx, cy);
+    if (numCorners == 3) {
+      if (tree.crossesTriangle(ax, ay, bx, by, cx, cy)) {
+        return Relation.CELL_CROSSES_QUERY;
+      }
+      return Relation.CELL_INSIDE_QUERY;
+    } else if (numCorners > 0) {
+      return Relation.CELL_CROSSES_QUERY;
+    }
+
+    // we cross
+    if (tree.crossesTriangle(ax, ay, bx, by, cx, cy)) {
+      return Relation.CELL_CROSSES_QUERY;
+    }
+    return Relation.CELL_OUTSIDE_QUERY;
+  }
+
   /** Returns relation to the provided rectangle for this component */
   private Relation componentRelate(double minLat, double maxLat, double minLon, double maxLon) {
     // if the bounding boxes are disjoint then the shape does not cross
@@ -184,7 +247,24 @@ public final class Polygon2D {
     
     return Relation.CELL_OUTSIDE_QUERY;
   }
-  
+
+  private int numberOfTriangleCorners(double ax, double ay, double bx, double by, double cx, double cy) {
+    int containsCount = 0;
+    if (componentContains(ay, ax)) {
+      containsCount++;
+    }
+    if (componentContains(by, bx)) {
+      containsCount++;
+    }
+    if (containsCount == 1) {
+      return containsCount;
+    }
+    if (componentContains(cy, cx)) {
+      containsCount++;
+    }
+    return containsCount;
+  }
+
   // returns 0, 4, or something in between
   private int numberOfCorners(double minLat, double maxLat, double minLon, double maxLon) {
     int containsCount = 0;
@@ -345,7 +425,66 @@ public final class Polygon2D {
       }
       return res;
     }
-    
+
+    /** Returns true if the triangle crosses any edge in this edge subtree */
+    boolean crossesTriangle(double ax, double ay, double bx, double by, double cx, double cy) {
+      // compute bounding box of triangle
+      double minLat = StrictMath.min(StrictMath.min(ay, by), cy);
+      double minLon = StrictMath.min(StrictMath.min(ax, bx), cx);
+      double maxLat = StrictMath.max(StrictMath.max(ay, by), cy);
+      double maxLon = StrictMath.max(StrictMath.max(ax, bx), cx);
+
+      if (minLat <= max) {
+        double dy = lat1;
+        double ey = lat2;
+        double dx = lon1;
+        double ex = lon2;
+
+        // optimization: see if the rectangle is outside of the "bounding box" of the polyline at all
+        // if not, don't waste our time trying more complicated stuff
+        boolean outside = (dy < minLat && ey < minLat) ||
+            (dy > maxLat && ey > maxLat) ||
+            (dx < minLon && ex < minLon) ||
+            (dx > maxLon && ex > maxLon);
+
+        if (outside == false) {
+          // does triangle's first edge intersect polyline?
+          // ax, ay -> bx, by
+          if (orient(dx, dy, ex, ey, ax, ay) * orient(dx, dy, ex, ey, bx, by) <= 0 &&
+              orient(ax, ay, bx, by, dx, dy) * orient(ax, ay, bx, by, ex, ey) <= 0) {
+            return true;
+          }
+
+          // does triangle's second edge intersect polyline?
+          // bx, by -> cx, cy
+          if (orient(dx, dy, ex, ey, bx, by) * orient(dx, dy, ex, ey, cx, cy) <= 0 &&
+              orient(bx, by, cx, cy, dx, dy) * orient(bx, by, cx, cy, ex, ey) <= 0) {
+            return true;
+          }
+
+          // does triangle's third edge intersect polyline?
+          // cx, cy -> ax, ay
+          if (orient(dx, dy, ex, ey, cx, cy) * orient(dx, dy, ex, ey, ax, ay) <= 0 &&
+              orient(cx, cy, ax, ay, dx, dy) * orient(cx, cy, ax, ay, ex, ey) <= 0) {
+            return true;
+          }
+        }
+
+        if (left != null) {
+          if (left.crossesTriangle(ax, ay, bx, by, cx, cy)) {
+            return true;
+          }
+        }
+
+        if (right != null && maxLat >= low) {
+          if (right.crossesTriangle(ax, ay, bx, by, cx, cy)) {
+            return true;
+          }
+        }
+      }
+      return false;
+    }
+
     /** Returns true if the box crosses any edge in this edge subtree */
     boolean crosses(double minLat, double maxLat, double minLon, double maxLon) {
       // we just have to cross one edge to answer the question, so we descend the tree and return when we do.

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/18c2300f/lucene/core/src/test/org/apache/lucene/geo/TestPolygon2D.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/geo/TestPolygon2D.java b/lucene/core/src/test/org/apache/lucene/geo/TestPolygon2D.java
index 31a42c0..053f008 100644
--- a/lucene/core/src/test/org/apache/lucene/geo/TestPolygon2D.java
+++ b/lucene/core/src/test/org/apache/lucene/geo/TestPolygon2D.java
@@ -16,10 +16,13 @@
  */
 package org.apache.lucene.geo;
 
+import static org.apache.lucene.geo.GeoTestUtil.createRegularPolygon;
 import static org.apache.lucene.geo.GeoTestUtil.nextLatitude;
 import static org.apache.lucene.geo.GeoTestUtil.nextLongitude;
+import static org.apache.lucene.geo.GeoTestUtil.nextPointNear;
 import static org.apache.lucene.geo.GeoTestUtil.nextPolygon;
 
+import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
 import org.apache.lucene.index.PointValues.Relation;
 import org.apache.lucene.util.LuceneTestCase;
 
@@ -289,4 +292,44 @@ public class TestPolygon2D extends LuceneTestCase {
       }
     }
   }
+
+  // targets the polygon directly
+  public void testRelateTriangle() {
+    for (int i = 0; i < 100; ++i) {
+      Polygon polygon = nextPolygon();
+      Polygon2D impl = Polygon2D.create(polygon);
+
+      for (int j = 0; j < 100; j++) {
+        double[] a = nextPointNear(polygon);
+        double[] b = nextPointNear(polygon);
+        double[] c = nextPointNear(polygon);
+
+        // if the point is within poly, then triangle should not intersect
+        if (impl.contains(a[0], a[1]) || impl.contains(b[0], b[1]) || impl.contains(c[0], c[1])) {
+          assertTrue(impl.relateTriangle(a[1], a[0], b[1], b[0], c[1], c[0]) != Relation.CELL_OUTSIDE_QUERY);
+        }
+      }
+    }
+  }
+
+  // test
+  public void testRelateTriangleEdgeCases() {
+    for (int i = 0; i < 100; ++i) {
+      // random radius between 1Km and 100Km
+      int randomRadius = RandomNumbers.randomIntBetween(random(), 1000, 100000);
+      // random number of vertices
+      int numVertices = RandomNumbers.randomIntBetween(random(), 100, 1000);
+      Polygon polygon = createRegularPolygon(0, 0, randomRadius, numVertices);
+      Polygon2D impl = Polygon2D.create(polygon);
+
+      // create and test a simple tessellation
+      for (int j = 1; j < numVertices; ++j) {
+        double[] a = new double[] {0d, 0d};  // center of poly
+        double[] b = new double[] {polygon.getPolyLat(j - 1), polygon.getPolyLon(j - 1)};
+        // occassionally test pancake triangles
+        double[] c = random().nextBoolean() ? new double[] {polygon.getPolyLat(j), polygon.getPolyLon(j)} : new double[] {a[0], a[1]};
+        assertTrue(impl.relateTriangle(a[0], a[1], b[0], b[1], c[0], c[1]) != Relation.CELL_OUTSIDE_QUERY);
+      }
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/18c2300f/lucene/sandbox/src/java/org/apache/lucene/document/LatLonShape.java
----------------------------------------------------------------------
diff --git a/lucene/sandbox/src/java/org/apache/lucene/document/LatLonShape.java b/lucene/sandbox/src/java/org/apache/lucene/document/LatLonShape.java
index eabc326..28c95e4 100644
--- a/lucene/sandbox/src/java/org/apache/lucene/document/LatLonShape.java
+++ b/lucene/sandbox/src/java/org/apache/lucene/document/LatLonShape.java
@@ -80,6 +80,10 @@ public class LatLonShape {
     return new LatLonShapeBoundingBoxQuery(field, minLatitude, maxLatitude, minLongitude, maxLongitude);
   }
 
+  public static Query newPolygonQuery(String field, Polygon... polygons) {
+    return new LatLonShapePolygonQuery(field, polygons);
+  }
+
   /** polygons are decomposed into tessellated triangles using {@link org.apache.lucene.geo.Tessellator}
    * these triangles are encoded and inserted as separate indexed POINT fields
    */

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/18c2300f/lucene/sandbox/src/java/org/apache/lucene/document/LatLonShapePolygonQuery.java
----------------------------------------------------------------------
diff --git a/lucene/sandbox/src/java/org/apache/lucene/document/LatLonShapePolygonQuery.java b/lucene/sandbox/src/java/org/apache/lucene/document/LatLonShapePolygonQuery.java
new file mode 100644
index 0000000..9a9b890
--- /dev/null
+++ b/lucene/sandbox/src/java/org/apache/lucene/document/LatLonShapePolygonQuery.java
@@ -0,0 +1,271 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.document;
+
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.Objects;
+
+import org.apache.lucene.geo.GeoEncodingUtils;
+import org.apache.lucene.geo.Polygon;
+import org.apache.lucene.geo.Polygon2D;
+import org.apache.lucene.geo.Rectangle;
+import org.apache.lucene.index.FieldInfo;
+import org.apache.lucene.index.LeafReader;
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.index.PointValues;
+import org.apache.lucene.index.PointValues.IntersectVisitor;
+import org.apache.lucene.index.PointValues.Relation;
+import org.apache.lucene.search.ConstantScoreScorer;
+import org.apache.lucene.search.ConstantScoreWeight;
+import org.apache.lucene.search.DocIdSetIterator;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.ScoreMode;
+import org.apache.lucene.search.Scorer;
+import org.apache.lucene.search.ScorerSupplier;
+import org.apache.lucene.search.Weight;
+import org.apache.lucene.util.DocIdSetBuilder;
+import org.apache.lucene.util.FutureArrays;
+import org.apache.lucene.util.NumericUtils;
+
+import static org.apache.lucene.geo.GeoEncodingUtils.encodeLatitude;
+import static org.apache.lucene.geo.GeoEncodingUtils.encodeLatitudeCeil;
+import static org.apache.lucene.geo.GeoEncodingUtils.encodeLongitude;
+import static org.apache.lucene.geo.GeoEncodingUtils.encodeLongitudeCeil;
+
+/**
+ * Finds all previously indexed shapes that intersect the specified arbitrary.
+ *
+ * <p>The field must be indexed using
+ * {@link org.apache.lucene.document.LatLonShape#createIndexableFields(String, Polygon)} added per document.
+ *
+ *  @lucene.experimental
+ **/
+public class LatLonShapePolygonQuery extends Query {
+  final String field;
+  final Polygon[] polygons;
+
+
+  public LatLonShapePolygonQuery(String field, Polygon... polygons) {
+    if (field == null) {
+      throw new IllegalArgumentException("field must not be null");
+    }
+    if (polygons == null) {
+      throw new IllegalArgumentException("polygons must not be null");
+    }
+    if (polygons.length == 0) {
+      throw new IllegalArgumentException("polygons must not be empty");
+    }
+    for (int i = 0; i < polygons.length; i++) {
+      if (polygons[i] == null) {
+        throw new IllegalArgumentException("polygon[" + i + "] must not be null");
+      }
+    }
+    this.field = field;
+    this.polygons = polygons.clone();
+  }
+
+  @Override
+  public final Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException {
+    final Rectangle box = Rectangle.fromPolygon(polygons);
+    final byte minLat[] = new byte[Integer.BYTES];
+    final byte maxLat[] = new byte[Integer.BYTES];
+    final byte minLon[] = new byte[Integer.BYTES];
+    final byte maxLon[] = new byte[Integer.BYTES];
+    NumericUtils.intToSortableBytes(encodeLatitudeCeil(box.minLat), minLat, 0);
+    NumericUtils.intToSortableBytes(encodeLatitude(box.maxLat), maxLat, 0);
+    NumericUtils.intToSortableBytes(encodeLongitudeCeil(box.minLon), minLon, 0);
+    NumericUtils.intToSortableBytes(encodeLongitude(box.maxLon), maxLon, 0);
+
+    final Polygon2D polygon = Polygon2D.create(polygons);
+
+    return new ConstantScoreWeight(this, boost) {
+
+      private Relation relateRangeToQuery(byte[] minTriangle, byte[] maxTriangle) {
+        // compute bounding box
+        int minXOfs = 0;
+        int minYOfs = 0;
+        int maxXOfs = 0;
+        int maxYOfs = 0;
+        for (int d = 1; d < 3; ++d) {
+          // check minX
+          int aOfs = (minXOfs * 2 * LatLonPoint.BYTES) + LatLonPoint.BYTES;
+          int bOfs = (d * 2 * LatLonPoint.BYTES) + LatLonPoint.BYTES;
+          if (FutureArrays.compareUnsigned(minTriangle, bOfs, bOfs + LatLonPoint.BYTES, minTriangle, aOfs, aOfs + LatLonPoint.BYTES) < 0) {
+            minXOfs = d;
+          }
+          // check maxX
+          aOfs = (maxXOfs * 2 * LatLonPoint.BYTES) + LatLonPoint.BYTES;
+          if (FutureArrays.compareUnsigned(maxTriangle, bOfs, bOfs + LatLonPoint.BYTES, maxTriangle, aOfs, aOfs + LatLonPoint.BYTES) > 0) {
+            maxXOfs = d;
+          }
+          // check minY
+          aOfs = minYOfs * 2 * LatLonPoint.BYTES;
+          bOfs = d * 2 * LatLonPoint.BYTES;
+          if (FutureArrays.compareUnsigned(minTriangle, bOfs, bOfs + LatLonPoint.BYTES, minTriangle, aOfs, aOfs + LatLonPoint.BYTES) < 0) {
+            minYOfs = d;
+          }
+          // check maxY
+          aOfs = maxYOfs * 2 * LatLonPoint.BYTES;
+          if (FutureArrays.compareUnsigned(maxTriangle, bOfs, bOfs + LatLonPoint.BYTES, maxTriangle, aOfs, aOfs + LatLonPoint.BYTES) > 0) {
+            maxYOfs = d;
+          }
+        }
+        minXOfs = (minXOfs * 2 * LatLonPoint.BYTES) + LatLonPoint.BYTES;
+        maxXOfs = (maxXOfs * 2 * LatLonPoint.BYTES) + LatLonPoint.BYTES;
+        minYOfs *= 2 * LatLonPoint.BYTES;
+        maxYOfs *= 2 * LatLonPoint.BYTES;
+
+        double minLat = GeoEncodingUtils.decodeLatitude(minTriangle, minYOfs);
+        double minLon = GeoEncodingUtils.decodeLongitude(minTriangle, minXOfs);
+        double maxLat = GeoEncodingUtils.decodeLatitude(maxTriangle, maxYOfs);
+        double maxLon = GeoEncodingUtils.decodeLongitude(maxTriangle, maxXOfs);
+
+        // check internal node against query
+        return polygon.relate(minLat, maxLat, minLon, maxLon);
+      }
+
+      private boolean queryCrossesTriangle(byte[] t) {
+        double ay = GeoEncodingUtils.decodeLatitude(t, 0);
+        double ax = GeoEncodingUtils.decodeLongitude(t, LatLonPoint.BYTES);
+        double by = GeoEncodingUtils.decodeLatitude(t, 2 * LatLonPoint.BYTES);
+        double bx = GeoEncodingUtils.decodeLongitude(t, 3 * LatLonPoint.BYTES);
+        double cy = GeoEncodingUtils.decodeLatitude(t, 4 * LatLonPoint.BYTES);
+        double cx = GeoEncodingUtils.decodeLongitude(t, 5 * LatLonPoint.BYTES);
+        return polygon.relateTriangle(ax, ay, bx, by, cx, cy) != Relation.CELL_OUTSIDE_QUERY;
+      }
+
+      private IntersectVisitor getIntersectVisitor(DocIdSetBuilder result) {
+        return new IntersectVisitor() {
+
+          DocIdSetBuilder.BulkAdder adder;
+
+          @Override
+          public void grow(int count) {
+            adder = result.grow(count);
+          }
+
+          @Override
+          public void visit(int docID) throws IOException {
+            adder.add(docID);
+          }
+
+          @Override
+          public void visit(int docID, byte[] t) throws IOException {
+            if (queryCrossesTriangle(t)) {
+              adder.add(docID);
+            }
+          }
+
+          @Override
+          public Relation compare(byte[] minTriangle, byte[] maxTriangle) {
+            return relateRangeToQuery(minTriangle, maxTriangle);
+          }
+        };
+      }
+
+      @Override
+      public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException {
+        LeafReader reader = context.reader();
+        PointValues values = reader.getPointValues(field);
+        if (values == null) {
+          // No docs in this segment had any points fields
+          return null;
+        }
+        FieldInfo fieldInfo = reader.getFieldInfos().fieldInfo(field);
+        if (fieldInfo == null) {
+          // No docs in this segment indexed this field at all
+          return null;
+        }
+
+        final Weight weight = this;
+        return new ScorerSupplier() {
+          final DocIdSetBuilder result = new DocIdSetBuilder(reader.maxDoc(), values, field);
+          final PointValues.IntersectVisitor visitor = getIntersectVisitor(result);
+          long cost = -1;
+
+          @Override
+          public Scorer get(long leadCost) throws IOException {
+            values.intersect(visitor);
+            DocIdSetIterator iterator = result.build().iterator();
+            return new ConstantScoreScorer(weight, score(), iterator);
+          }
+
+          @Override
+          public long cost() {
+            if (cost == -1) {
+              // Computing the cost may be expensive, so only do it if necessary
+              cost = values.estimatePointCount(visitor);
+              assert cost >= 0;
+            }
+            return cost;
+          }
+        };
+      }
+
+      @Override
+      public Scorer scorer(LeafReaderContext context) throws IOException {
+        ScorerSupplier scorerSupplier = scorerSupplier(context);
+        if (scorerSupplier == null) {
+          return null;
+        }
+        return scorerSupplier.get(Long.MAX_VALUE);
+      }
+
+      @Override
+      public boolean isCacheable(LeafReaderContext ctx) {
+        return true;
+      }
+    };
+  }
+
+  public String getField() {
+    return field;
+  }
+
+  @Override
+  public String toString(String field) {
+    final StringBuilder sb = new StringBuilder();
+    sb.append(getClass().getSimpleName());
+    sb.append(':');
+    if (this.field.equals(field) == false) {
+      sb.append(" field=");
+      sb.append(this.field);
+      sb.append(':');
+    }
+    sb.append("Polygon(" + polygons[0].toGeoJSON() + ")");
+    return sb.toString();
+  }
+
+  @Override
+  public boolean equals(Object o) {
+    return sameClassAs(o) && equalsTo(getClass().cast(o));
+  }
+
+  private boolean equalsTo(LatLonShapePolygonQuery o) {
+    return Objects.equals(field, o.field) && Arrays.equals(polygons, o.polygons);
+  }
+
+  @Override
+  public int hashCode() {
+    int hash = classHash();
+    hash = 31 * hash + field.hashCode();
+    hash = 31 * hash + Arrays.hashCode(polygons);
+    return hash;
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/18c2300f/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonPolygonShapeQueries.java
----------------------------------------------------------------------
diff --git a/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonPolygonShapeQueries.java b/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonPolygonShapeQueries.java
new file mode 100644
index 0000000..25d4888
--- /dev/null
+++ b/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonPolygonShapeQueries.java
@@ -0,0 +1,393 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.document;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+import org.apache.lucene.geo.GeoTestUtil;
+import org.apache.lucene.geo.Polygon;
+import org.apache.lucene.geo.Polygon2D;
+import org.apache.lucene.geo.Rectangle;
+import org.apache.lucene.geo.Tessellator;
+import org.apache.lucene.index.DirectoryReader;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.index.MultiDocValues;
+import org.apache.lucene.index.MultiFields;
+import org.apache.lucene.index.NumericDocValues;
+import org.apache.lucene.index.PointValues.Relation;
+import org.apache.lucene.index.SerialMergeScheduler;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.ScoreMode;
+import org.apache.lucene.search.SimpleCollector;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.util.Bits;
+import org.apache.lucene.util.FixedBitSet;
+import org.apache.lucene.util.IOUtils;
+import org.apache.lucene.util.LuceneTestCase;
+
+import static org.apache.lucene.geo.GeoEncodingUtils.decodeLatitude;
+import static org.apache.lucene.geo.GeoEncodingUtils.decodeLongitude;
+import static org.apache.lucene.geo.GeoEncodingUtils.encodeLatitude;
+import static org.apache.lucene.geo.GeoEncodingUtils.encodeLatitudeCeil;
+import static org.apache.lucene.geo.GeoEncodingUtils.encodeLongitude;
+import static org.apache.lucene.geo.GeoEncodingUtils.encodeLongitudeCeil;
+
+/** base Test case for {@link LatLonShape} indexing and search */
+public class TestLatLonPolygonShapeQueries extends LuceneTestCase {
+  protected static final String FIELD_NAME = "shape";
+
+  private Polygon quantizePolygon(Polygon polygon) {
+    double[] lats = new double[polygon.numPoints()];
+    double[] lons = new double[polygon.numPoints()];
+    for (int i = 0; i < lats.length; ++i) {
+      lats[i] = quantizeLat(polygon.getPolyLat(i));
+      lons[i] = quantizeLon(polygon.getPolyLon(i));
+    }
+    return new Polygon(lats, lons);
+  }
+
+  protected double quantizeLat(double rawLat) {
+    return decodeLatitude(encodeLatitude(rawLat));
+  }
+
+  protected double quantizeLatCeil(double rawLat) {
+    return decodeLatitude(encodeLatitudeCeil(rawLat));
+  }
+
+  protected double quantizeLon(double rawLon) {
+    return decodeLongitude(encodeLongitude(rawLon));
+  }
+
+  protected double quantizeLonCeil(double rawLon) {
+    return decodeLongitude(encodeLongitudeCeil(rawLon));
+  }
+
+  protected void addPolygonsToDoc(String field, Document doc, Polygon polygon) {
+    Field[] fields = LatLonShape.createIndexableFields(field, polygon);
+    for (Field f : fields) {
+      doc.add(f);
+    }
+  }
+
+  protected Query newRectQuery(String field, double minLat, double maxLat, double minLon, double maxLon) {
+    return LatLonShape.newBoxQuery(field, minLat, maxLat, minLon, maxLon);
+  }
+
+  protected Query newPolygonQuery(String field, Polygon... polygons) {
+    return LatLonShape.newPolygonQuery(field, polygons);
+  }
+
+  public void testRandomTiny() throws Exception {
+    // Make sure single-leaf-node case is OK:
+    doTestRandom(10);
+  }
+
+  public void testRandomMedium() throws Exception {
+    doTestRandom(10000);
+  }
+
+  @Nightly
+  public void testRandomBig() throws Exception {
+    doTestRandom(50000);
+  }
+
+  private void doTestRandom(int count) throws Exception {
+    int numPolygons = atLeast(count);
+
+    if (VERBOSE) {
+      System.out.println("TEST: numPolygons=" + numPolygons);
+    }
+
+    Polygon[] polygons = new Polygon[numPolygons];
+    for (int id = 0; id < numPolygons; ++id) {
+      int x = random().nextInt(20);
+      if (x == 17) {
+        polygons[id] = null;
+        if (VERBOSE) {
+          System.out.println("  id=" + id + " is missing");
+        }
+      } else {
+        // create a polygon that does not cross the dateline
+        polygons[id] = GeoTestUtil.nextPolygon();
+      }
+    }
+    verify(polygons);
+  }
+
+  private void verify(Polygon... polygons) throws Exception {
+    ArrayList<Polygon2D> poly2d = new ArrayList<>();
+    poly2d.ensureCapacity(polygons.length);
+    // index random polygons; poly2d will contain the Polygon2D objects needed for verification
+    IndexWriter w = indexRandomPolygons(poly2d, polygons);
+    Directory dir = w.getDirectory();
+    final IndexReader reader = DirectoryReader.open(w);
+    // test random bbox queries
+    verifyRandomBBoxQueries(reader, poly2d, polygons);
+    // test random polygon queires
+    verifyRandomPolygonQueries(reader, poly2d, polygons);
+    IOUtils.close(w, reader, dir);
+  }
+
+  protected IndexWriter indexRandomPolygons(List<Polygon2D> poly2d, Polygon... polygons) throws Exception {
+    IndexWriterConfig iwc = newIndexWriterConfig();
+    iwc.setMergeScheduler(new SerialMergeScheduler());
+    int mbd = iwc.getMaxBufferedDocs();
+    if (mbd != -1 && mbd < polygons.length / 100) {
+      iwc.setMaxBufferedDocs(polygons.length / 100);
+    }
+    Directory dir;
+    if (polygons.length > 1000) {
+      dir = newFSDirectory(createTempDir(getClass().getSimpleName()));
+    } else {
+      dir = newDirectory();
+    }
+
+    Set<Integer> deleted = new HashSet<>();
+    IndexWriter w = new IndexWriter(dir, iwc);
+    for (int id = 0; id < polygons.length; ++id) {
+      Document doc = new Document();
+      doc.add(newStringField("id", "" + id, Field.Store.NO));
+      doc.add(new NumericDocValuesField("id", id));
+      if (polygons[id] != null) {
+        try {
+          addPolygonsToDoc(FIELD_NAME, doc, polygons[id]);
+        } catch (IllegalArgumentException e) {
+          // GeoTestUtil will occassionally create invalid polygons
+          // invalid polygons will not tessellate
+          // we skip those polygons that will not tessellate, relying on the TestTessellator class
+          // to ensure the Tessellator correctly identified a malformed shape and its not a bug
+          if (VERBOSE) {
+            System.out.println("  id=" + id + " could not tessellate. Malformed shape " + polygons[id] + " detected");
+          }
+          // remove and skip the malformed shape
+          polygons[id] = null;
+          poly2d.add(id, null);
+          continue;
+        }
+        poly2d.add(id, Polygon2D.create(quantizePolygon(polygons[id])));
+      } else {
+        poly2d.add(id, null);
+      }
+      w.addDocument(doc);
+      if (id > 0 && random().nextInt(100) == 42) {
+        int idToDelete = random().nextInt(id);
+        w.deleteDocuments(new Term("id", ""+idToDelete));
+        deleted.add(idToDelete);
+        if (VERBOSE) {
+          System.out.println("   delete id=" + idToDelete);
+        }
+      }
+    }
+
+    if (random().nextBoolean()) {
+      w.forceMerge(1);
+    }
+
+    return w;
+  }
+
+  protected void verifyRandomBBoxQueries(IndexReader reader, List<Polygon2D> poly2d, Polygon... polygons) throws Exception {
+    IndexSearcher s = newSearcher(reader);
+
+    final int iters = atLeast(75);
+
+    Bits liveDocs = MultiFields.getLiveDocs(s.getIndexReader());
+    int maxDoc = s.getIndexReader().maxDoc();
+
+    for (int iter = 0; iter < iters; ++iter) {
+      if (VERBOSE) {
+        System.out.println("\nTEST: iter=" + (iter+1) + " of " + iters + " s=" + s);
+      }
+
+      // BBox
+      Rectangle rect = GeoTestUtil.nextBoxNotCrossingDateline();
+      Query query = newRectQuery(FIELD_NAME, rect.minLat, rect.maxLat, rect.minLon, rect.maxLon);
+
+      if (VERBOSE) {
+        System.out.println("  query=" + query);
+      }
+
+      final FixedBitSet hits = new FixedBitSet(maxDoc);
+      s.search(query, new SimpleCollector() {
+
+        private int docBase;
+
+        @Override
+        public ScoreMode scoreMode() {
+          return ScoreMode.COMPLETE_NO_SCORES;
+        }
+
+        @Override
+        protected void doSetNextReader(LeafReaderContext context) throws IOException {
+          docBase = context.docBase;
+        }
+
+        @Override
+        public void collect(int doc) throws IOException {
+          hits.set(docBase+doc);
+        }
+      });
+
+      boolean fail = false;
+      NumericDocValues docIDToID = MultiDocValues.getNumericValues(reader, "id");
+      for (int docID = 0; docID < maxDoc; ++docID) {
+        assertEquals(docID, docIDToID.nextDoc());
+        int id = (int) docIDToID.longValue();
+        boolean expected;
+        if (liveDocs != null && liveDocs.get(docID) == false) {
+          // document is deleted
+          expected = false;
+        } else if (polygons[id] == null) {
+          expected = false;
+        } else {
+          // check quantized poly against quantized query
+          expected = poly2d.get(id).relate(quantizeLatCeil(rect.minLat), quantizeLat(rect.maxLat),
+              quantizeLonCeil(rect.minLon), quantizeLon(rect.maxLon)) != Relation.CELL_OUTSIDE_QUERY;
+        }
+
+        if (hits.get(docID) != expected) {
+          StringBuilder b = new StringBuilder();
+
+          if (expected) {
+            b.append("FAIL: id=" + id + " should match but did not\n");
+          } else {
+            b.append("FAIL: id=" + id + " should not match but did\n");
+          }
+          b.append("  query=" + query + " docID=" + docID + "\n");
+          b.append("  polygon=" + quantizePolygon(polygons[id]) + "\n");
+          b.append("  deleted?=" + (liveDocs != null && liveDocs.get(docID) == false));
+          b.append("  rect=Rectangle(" + quantizeLatCeil(rect.minLat) + " TO " + quantizeLat(rect.maxLat) + " lon=" + quantizeLonCeil(rect.minLon) + " TO " + quantizeLon(rect.maxLon) + ")");
+          if (true) {
+            fail("wrong hit (first of possibly more):\n\n" + b);
+          } else {
+            System.out.println(b.toString());
+            fail = true;
+          }
+        }
+      }
+      if (fail) {
+        fail("some hits were wrong");
+      }
+    }
+  }
+
+  protected void verifyRandomPolygonQueries(IndexReader reader, List<Polygon2D> poly2d, Polygon... polygons) throws Exception {
+    IndexSearcher s = newSearcher(reader);
+
+    final int iters = atLeast(75);
+
+    Bits liveDocs = MultiFields.getLiveDocs(s.getIndexReader());
+    int maxDoc = s.getIndexReader().maxDoc();
+
+    for (int iter = 0; iter < iters; ++iter) {
+      if (VERBOSE) {
+        System.out.println("\nTEST: iter=" + (iter+1) + " of " + iters + " s=" + s);
+      }
+
+      // Polygon
+      Polygon queryPolygon = GeoTestUtil.nextPolygon();
+      Polygon2D queryPoly2D = Polygon2D.create(queryPolygon);
+      Query query = newPolygonQuery(FIELD_NAME, queryPolygon);
+
+      if (VERBOSE) {
+        System.out.println("  query=" + query);
+      }
+
+      final FixedBitSet hits = new FixedBitSet(maxDoc);
+      s.search(query, new SimpleCollector() {
+
+        private int docBase;
+
+        @Override
+        public ScoreMode scoreMode() {
+          return ScoreMode.COMPLETE_NO_SCORES;
+        }
+
+        @Override
+        protected void doSetNextReader(LeafReaderContext context) throws IOException {
+          docBase = context.docBase;
+        }
+
+        @Override
+        public void collect(int doc) throws IOException {
+          hits.set(docBase+doc);
+        }
+      });
+
+      boolean fail = false;
+      NumericDocValues docIDToID = MultiDocValues.getNumericValues(reader, "id");
+      for (int docID = 0; docID < maxDoc; ++docID) {
+        assertEquals(docID, docIDToID.nextDoc());
+        int id = (int) docIDToID.longValue();
+        boolean expected;
+        if (liveDocs != null && liveDocs.get(docID) == false) {
+          // document is deleted
+          expected = false;
+        } else if (polygons[id] == null) {
+          expected = false;
+        } else {
+          expected = false;
+          try {
+            // check poly (quantized the same way as indexed) against query polygon
+            List<Tessellator.Triangle> tesselation = Tessellator.tessellate(quantizePolygon(polygons[id]));
+            for (Tessellator.Triangle t : tesselation) {
+              if (queryPoly2D.relateTriangle(t.getLon(0), t.getLat(0),
+                  t.getLon(1), t.getLat(1), t.getLon(2), t.getLat(2)) != Relation.CELL_OUTSIDE_QUERY) {
+                expected = true;
+                break;
+              }
+            }
+          } catch (IllegalArgumentException e) {
+            continue;
+          }
+        }
+
+        if (hits.get(docID) != expected) {
+          StringBuilder b = new StringBuilder();
+
+          if (expected) {
+            b.append("FAIL: id=" + id + " should match but did not\n");
+          } else {
+            b.append("FAIL: id=" + id + " should not match but did\n");
+          }
+          b.append("  query=" + query + " docID=" + docID + "\n");
+          b.append("  polygon=" + quantizePolygon(polygons[id]).toGeoJSON() + "\n");
+          b.append("  deleted?=" + (liveDocs != null && liveDocs.get(docID) == false));
+          b.append("  queryPolygon=" + queryPolygon.toGeoJSON());
+          if (true) {
+            fail("wrong hit (first of possibly more):\n\n" + b);
+          } else {
+            System.out.println(b.toString());
+            fail = true;
+          }
+        }
+      }
+      if (fail) {
+        fail("some hits were wrong");
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/18c2300f/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonShapeQueries.java
----------------------------------------------------------------------
diff --git a/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonShapeQueries.java b/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonShapeQueries.java
deleted file mode 100644
index 2bb207e..0000000
--- a/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonShapeQueries.java
+++ /dev/null
@@ -1,276 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.lucene.document;
-
-import java.io.IOException;
-import java.util.HashSet;
-import java.util.Set;
-
-import org.apache.lucene.geo.GeoTestUtil;
-import org.apache.lucene.geo.Polygon;
-import org.apache.lucene.geo.Polygon2D;
-import org.apache.lucene.geo.Rectangle;
-import org.apache.lucene.index.DirectoryReader;
-import org.apache.lucene.index.IndexReader;
-import org.apache.lucene.index.IndexWriter;
-import org.apache.lucene.index.IndexWriterConfig;
-import org.apache.lucene.index.LeafReaderContext;
-import org.apache.lucene.index.MultiDocValues;
-import org.apache.lucene.index.MultiFields;
-import org.apache.lucene.index.NumericDocValues;
-import org.apache.lucene.index.PointValues.Relation;
-import org.apache.lucene.index.SerialMergeScheduler;
-import org.apache.lucene.index.Term;
-import org.apache.lucene.search.IndexSearcher;
-import org.apache.lucene.search.Query;
-import org.apache.lucene.search.ScoreMode;
-import org.apache.lucene.search.SimpleCollector;
-import org.apache.lucene.store.Directory;
-import org.apache.lucene.util.Bits;
-import org.apache.lucene.util.FixedBitSet;
-import org.apache.lucene.util.IOUtils;
-import org.apache.lucene.util.LuceneTestCase;
-
-import static org.apache.lucene.geo.GeoEncodingUtils.decodeLatitude;
-import static org.apache.lucene.geo.GeoEncodingUtils.decodeLongitude;
-import static org.apache.lucene.geo.GeoEncodingUtils.encodeLatitude;
-import static org.apache.lucene.geo.GeoEncodingUtils.encodeLatitudeCeil;
-import static org.apache.lucene.geo.GeoEncodingUtils.encodeLongitude;
-import static org.apache.lucene.geo.GeoEncodingUtils.encodeLongitudeCeil;
-
-/** base Test case for {@link LatLonShape} indexing and search */
-public class TestLatLonShapeQueries extends LuceneTestCase {
-  protected static final String FIELD_NAME = "shape";
-
-  private Polygon quantizePolygon(Polygon polygon) {
-    double[] lats = new double[polygon.numPoints()];
-    double[] lons = new double[polygon.numPoints()];
-    for (int i = 0; i < lats.length; ++i) {
-      lats[i] = quantizeLat(polygon.getPolyLat(i));
-      lons[i] = quantizeLon(polygon.getPolyLon(i));
-    }
-    return new Polygon(lats, lons);
-  }
-
-  protected double quantizeLat(double rawLat) {
-    return decodeLatitude(encodeLatitude(rawLat));
-  }
-
-  protected double quantizeLatCeil(double rawLat) {
-    return decodeLatitude(encodeLatitudeCeil(rawLat));
-  }
-
-  protected double quantizeLon(double rawLon) {
-    return decodeLongitude(encodeLongitude(rawLon));
-  }
-
-  protected double quantizeLonCeil(double rawLon) {
-    return decodeLongitude(encodeLongitudeCeil(rawLon));
-  }
-
-  protected void addPolygonsToDoc(String field, Document doc, Polygon polygon) {
-    Field[] fields = LatLonShape.createIndexableFields(field, polygon);
-    for (Field f : fields) {
-      doc.add(f);
-    }
-  }
-
-  protected Query newRectQuery(String field, double minLat, double maxLat, double minLon, double maxLon) {
-    return LatLonShape.newBoxQuery(field, minLat, maxLat, minLon, maxLon);
-  }
-
-  public void testRandomTiny() throws Exception {
-    // Make sure single-leaf-node case is OK:
-    doTestRandom(10);
-  }
-
-  public void testRandomMedium() throws Exception {
-    doTestRandom(10000);
-  }
-
-  @Nightly
-  public void testRandomBig() throws Exception {
-    doTestRandom(50000);
-  }
-
-  private void doTestRandom(int count) throws Exception {
-    int numPolygons = atLeast(count);
-
-    if (VERBOSE) {
-      System.out.println("TEST: numPolygons=" + numPolygons);
-    }
-
-    Polygon[] polygons = new Polygon[numPolygons];
-    for (int id = 0; id < numPolygons; ++id) {
-      int x = random().nextInt(20);
-      if (x == 17) {
-        polygons[id] = null;
-        if (VERBOSE) {
-          System.out.println("  id=" + id + " is missing");
-        }
-      } else {
-        // create a polygon that does not cross the dateline
-        polygons[id] = GeoTestUtil.nextPolygon();
-      }
-    }
-    verify(polygons);
-  }
-
-  private void verify(Polygon... polygons) throws Exception {
-    verifyRandomBBoxes(polygons);
-  }
-
-  protected void verifyRandomBBoxes(Polygon... polygons) throws Exception {
-    IndexWriterConfig iwc = newIndexWriterConfig();
-    iwc.setMergeScheduler(new SerialMergeScheduler());
-    int mbd = iwc.getMaxBufferedDocs();
-    if (mbd != -1 && mbd < polygons.length / 100) {
-      iwc.setMaxBufferedDocs(polygons.length / 100);
-    }
-    Directory dir;
-    if (polygons.length > 1000) {
-      dir = newFSDirectory(createTempDir(getClass().getSimpleName()));
-    } else {
-      dir = newDirectory();
-    }
-
-    Set<Integer> deleted = new HashSet<>();
-    IndexWriter w = new IndexWriter(dir, iwc);
-    Polygon2D[] poly2D = new Polygon2D[polygons.length];
-    for (int id = 0; id < polygons.length; ++id) {
-      Document doc = new Document();
-      doc.add(newStringField("id", "" + id, Field.Store.NO));
-      doc.add(new NumericDocValuesField("id", id));
-      if (polygons[id] != null) {
-        try {
-          addPolygonsToDoc(FIELD_NAME, doc, polygons[id]);
-        } catch (IllegalArgumentException e) {
-          // GeoTestUtil will occassionally create invalid polygons
-          // invalid polygons will not tessellate
-          // we skip those polygons that will not tessellate, relying on the TestTessellator class
-          // to ensure the Tessellator correctly identified a malformed shape and its not a bug
-          if (VERBOSE) {
-            System.out.println("  id=" + id + " could not tessellate. Malformed shape " + polygons[id] + " detected");
-          }
-          // remove and skip the malformed shape
-          polygons[id] = null;
-          continue;
-        }
-        poly2D[id] = Polygon2D.create(quantizePolygon(polygons[id]));
-      }
-      w.addDocument(doc);
-      if (id > 0 && random().nextInt(100) == 42) {
-        int idToDelete = random().nextInt(id);
-        w.deleteDocuments(new Term("id", ""+idToDelete));
-        deleted.add(idToDelete);
-        if (VERBOSE) {
-          System.out.println("   delete id=" + idToDelete);
-        }
-      }
-    }
-
-    if (random().nextBoolean()) {
-      w.forceMerge(1);
-    }
-    final IndexReader r = DirectoryReader.open(w);
-    w.close();
-
-    IndexSearcher s = newSearcher(r);
-
-    final int iters = atLeast(75);
-
-    Bits liveDocs = MultiFields.getLiveDocs(s.getIndexReader());
-    int maxDoc = s.getIndexReader().maxDoc();
-
-    for (int iter = 0; iter < iters; ++iter) {
-      if (VERBOSE) {
-        System.out.println("\nTEST: iter=" + (iter+1) + " of " + iters + " s=" + s);
-      }
-
-      // BBox
-      Rectangle rect = GeoTestUtil.nextBoxNotCrossingDateline();
-      Query query = newRectQuery(FIELD_NAME, rect.minLat, rect.maxLat, rect.minLon, rect.maxLon);
-
-      if (VERBOSE) {
-        System.out.println("  query=" + query);
-      }
-
-      final FixedBitSet hits = new FixedBitSet(maxDoc);
-      s.search(query, new SimpleCollector() {
-
-        private int docBase;
-
-        @Override
-        public ScoreMode scoreMode() {
-          return ScoreMode.COMPLETE_NO_SCORES;
-        }
-
-        @Override
-        protected void doSetNextReader(LeafReaderContext context) throws IOException {
-          docBase = context.docBase;
-        }
-
-        @Override
-        public void collect(int doc) throws IOException {
-          hits.set(docBase+doc);
-        }
-      });
-
-      boolean fail = false;
-      NumericDocValues docIDToID = MultiDocValues.getNumericValues(r, "id");
-      for (int docID = 0; docID < maxDoc; ++docID) {
-        assertEquals(docID, docIDToID.nextDoc());
-        int id = (int) docIDToID.longValue();
-        boolean expected;
-        if (liveDocs != null && liveDocs.get(docID) == false) {
-          // document is deleted
-          expected = false;
-        } else if (polygons[id] == null) {
-          expected = false;
-        } else {
-          // check quantized poly against quantized query
-          expected = poly2D[id].relate(quantizeLatCeil(rect.minLat), quantizeLat(rect.maxLat),
-              quantizeLonCeil(rect.minLon), quantizeLon(rect.maxLon)) != Relation.CELL_OUTSIDE_QUERY;
-        }
-
-        if (hits.get(docID) != expected) {
-          StringBuilder b = new StringBuilder();
-
-          if (expected) {
-            b.append("FAIL: id=" + id + " should match but did not\n");
-          } else {
-            b.append("FAIL: id=" + id + " should not match but did\n");
-          }
-          b.append("  query=" + query + " docID=" + docID + "\n");
-          b.append("  polygon=" + quantizePolygon(polygons[id]) + "\n");
-          b.append("  deleted?=" + (liveDocs != null && liveDocs.get(docID) == false));
-          b.append("  rect=Rectangle(" + quantizeLatCeil(rect.minLat) + " TO " + quantizeLat(rect.maxLat) + " lon=" + quantizeLonCeil(rect.minLon) + " TO " + quantizeLon(rect.maxLon) + ")");
-          if (true) {
-            fail("wrong hit (first of possibly more):\n\n" + b);
-          } else {
-            System.out.println(b.toString());
-            fail = true;
-          }
-        }
-      }
-      if (fail) {
-        fail("some hits were wrong");
-      }
-    }
-    IOUtils.close(r, dir);
-  }
-}


[48/48] lucene-solr:jira/http2: Merge branch 'master' into jira/http2

Posted by da...@apache.org.
Merge branch 'master' into jira/http2


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/d6d6fbdb
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/d6d6fbdb
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/d6d6fbdb

Branch: refs/heads/jira/http2
Commit: d6d6fbdbeecaf6109f59d5309fea2dd8e24d83c7
Parents: 8b20877 3b15be3
Author: Cao Manh Dat <da...@apache.org>
Authored: Mon Aug 6 11:13:13 2018 +0700
Committer: Cao Manh Dat <da...@apache.org>
Committed: Mon Aug 6 11:13:13 2018 +0700

----------------------------------------------------------------------
 lucene/CHANGES.txt                              |  14 +
 lucene/MIGRATE.txt                              |   8 +-
 .../src/java/org/apache/lucene/geo/Polygon.java |  30 +
 .../java/org/apache/lucene/geo/Polygon2D.java   | 147 +++-
 .../org/apache/lucene/index/CheckIndex.java     |  39 -
 .../lucene/index/DefaultIndexingChain.java      |  49 ++
 .../index/SortingStoredFieldsConsumer.java      |   6 +-
 .../index/SortingTermVectorsConsumer.java       |   6 +-
 .../org/apache/lucene/search/IndexSearcher.java |  23 +-
 .../apache/lucene/search/ReqOptSumScorer.java   |   4 +-
 .../org/apache/lucene/search/TermQuery.java     |   2 +-
 .../org/apache/lucene/geo/TestPolygon2D.java    |  43 ++
 .../apache/lucene/index/TestIndexSorting.java   |  40 +
 .../lucene/index/TestIndexWriterReader.java     |  15 +-
 .../lucene/index/TestTieredMergePolicy.java     |   2 +-
 .../index/TestUpgradeIndexMergePolicy.java      |   8 +-
 .../org/apache/lucene/search/TestBoolean2.java  |   5 +-
 .../apache/lucene/search/TestBooleanScorer.java |   2 +-
 .../apache/lucene/search/TestLRUQueryCache.java |   8 +-
 .../apache/lucene/search/TestNeedsScores.java   |   2 +-
 .../lucene/search/TestShardSearching.java       |   2 +-
 .../apache/lucene/search/TestTopDocsMerge.java  |   2 +-
 lucene/ivy-versions.properties                  |   1 -
 .../search/join/ToChildBlockJoinQuery.java      |   2 +-
 .../search/join/ToParentBlockJoinQuery.java     |   8 +-
 .../org/apache/lucene/document/LatLonShape.java |  71 +-
 .../document/LatLonShapeBoundingBoxQuery.java   |   4 +-
 .../document/LatLonShapePolygonQuery.java       | 341 +++++++++
 .../src/java/org/apache/lucene/geo/Line.java    | 139 ++++
 .../document/BaseLatLonShapeTestCase.java       | 473 ++++++++++++
 .../document/TestLatLonLineShapeQueries.java    |  94 +++
 .../document/TestLatLonPointShapeQueries.java   |  70 ++
 .../document/TestLatLonPolygonShapeQueries.java |  82 ++
 .../apache/lucene/document/TestLatLonShape.java |  31 +-
 .../lucene/document/TestLatLonShapeQueries.java | 276 -------
 .../org/apache/lucene/util/LuceneTestCase.java  |  54 +-
 .../java/org/apache/lucene/util/TestUtil.java   |  18 +-
 .../src/groovy/check-source-patterns.groovy     |   3 +
 solr/CHANGES.txt                                |  34 +
 .../org/apache/solr/ltr/TestLTROnSolrCloud.java |   2 +
 solr/contrib/velocity/ivy.xml                   |   1 -
 .../cloud/api/collections/AddReplicaCmd.java    |   2 +-
 .../cloud/api/collections/SplitShardCmd.java    | 154 ++--
 .../solr/cloud/overseer/ReplicaMutator.java     |  47 +-
 .../org/apache/solr/core/ConfigSetService.java  |   8 +-
 .../org/apache/solr/core/CoreContainer.java     |   4 +-
 .../apache/solr/core/CorePropertiesLocator.java |  22 +-
 .../org/apache/solr/handler/GraphHandler.java   |   4 +-
 .../org/apache/solr/handler/SQLHandler.java     |   4 +-
 .../solr/handler/SolrDefaultStreamFactory.java  |  53 ++
 .../org/apache/solr/handler/StreamHandler.java  |  15 +-
 .../solr/handler/admin/AdminHandlersProxy.java  | 128 ++++
 .../solr/handler/admin/CollectionsHandler.java  |   6 +-
 .../solr/handler/admin/MetricsHandler.java      |  13 +-
 .../handler/admin/MetricsHistoryHandler.java    |  11 +-
 .../org/apache/solr/handler/admin/SplitOp.java  |  24 +-
 .../solr/handler/admin/SystemInfoHandler.java   |  11 +-
 .../apache/solr/handler/sql/SolrEnumerator.java |   4 +-
 .../apache/solr/metrics/SolrMetricManager.java  |  14 +
 .../metrics/reporters/SolrSlf4jReporter.java    |  63 +-
 .../reporters/solr/SolrClusterReporter.java     |  11 +-
 .../solr/metrics/rrd/SolrRrdBackendFactory.java |   9 +-
 .../java/org/apache/solr/search/Grouping.java   |   8 +-
 .../apache/solr/search/HashQParserPlugin.java   |   4 +-
 .../solr/search/grouping/CommandHandler.java    |   4 +-
 .../solr/store/hdfs/HdfsLocalityReporter.java   |   8 +-
 .../solr/update/DirectUpdateHandler2.java       |   5 +-
 .../apache/solr/update/SolrIndexSplitter.java   | 469 +++++++++++-
 .../apache/solr/update/SplitIndexCommand.java   |  22 +-
 .../java/org/apache/solr/update/UpdateLog.java  |   6 +
 .../processor/DistributedUpdateProcessor.java   |  12 +-
 .../src/test-files/solr/solr-slf4jreporter.xml  |   7 +
 .../org/apache/solr/TestDistributedSearch.java  |   1 +
 .../org/apache/solr/cloud/AddReplicaTest.java   |   2 +-
 .../cloud/ChaosMonkeyNothingIsSafeTest.java     |   2 +
 .../solr/cloud/ChaosMonkeyShardSplitTest.java   |  11 +-
 .../apache/solr/cloud/DeleteReplicaTest.java    |   2 +-
 .../solr/cloud/DocValuesNotIndexedTest.java     |   2 +
 .../cloud/FullSolrCloudDistribCmdsTest.java     |   2 +
 .../cloud/LeaderElectionIntegrationTest.java    |   2 +
 .../solr/cloud/LeaderTragicEventTest.java       |  10 +-
 .../solr/cloud/LeaderVoteWaitTimeoutTest.java   |   2 +-
 .../org/apache/solr/cloud/MoveReplicaTest.java  |   2 +
 .../apache/solr/cloud/OverseerRolesTest.java    |   2 +-
 .../solr/cloud/PeerSyncReplicationTest.java     |   2 +-
 .../org/apache/solr/cloud/RecoveryZkTest.java   |   2 +-
 .../apache/solr/cloud/RollingRestartTest.java   |   3 +-
 .../apache/solr/cloud/TestCloudConsistency.java |   3 +-
 .../apache/solr/cloud/TestCloudPivotFacet.java  |   2 +-
 .../solr/cloud/TestMiniSolrCloudClusterSSL.java |   4 +-
 .../AbstractCloudBackupRestoreTestCase.java     |  18 +-
 .../CollectionsAPIAsyncDistributedZkTest.java   |   3 +-
 .../cloud/api/collections/ShardSplitTest.java   | 157 ++--
 .../collections/TestHdfsCloudBackupRestore.java |   2 +
 .../TestLocalFSCloudBackupRestore.java          |   2 +
 .../AutoAddReplicasIntegrationTest.java         |   2 +
 .../autoscaling/AutoScalingHandlerTest.java     |   2 +-
 .../autoscaling/ComputePlanActionTest.java      |   3 +
 .../MetricTriggerIntegrationTest.java           |   2 +
 .../ScheduledTriggerIntegrationTest.java        |   2 +
 .../solr/cloud/autoscaling/TestPolicyCloud.java |   6 +-
 .../sim/SimClusterStateProvider.java            |  41 +-
 .../sim/TestGenericDistributedQueue.java        |   1 +
 .../cloud/autoscaling/sim/TestLargeCluster.java |   2 +-
 .../cloud/autoscaling/sim/TestPolicyCloud.java  |   4 +-
 .../solr/cloud/cdcr/CdcrBootstrapTest.java      |   2 +
 .../hdfs/HdfsChaosMonkeySafeLeaderTest.java     |   2 +
 .../org/apache/solr/handler/TestSQLHandler.java |   2 +
 .../handler/admin/AdminHandlersProxyTest.java   | 119 +++
 .../solr/handler/admin/MetricsHandlerTest.java  |   8 +-
 .../admin/MetricsHistoryHandlerTest.java        |   4 +-
 .../component/DistributedMLTComponentTest.java  |   2 +
 .../handler/component/InfixSuggestersTest.java  |  33 +-
 .../reporters/SolrJmxReporterCloudTest.java     |   2 +
 .../reporters/SolrSlf4jReporterTest.java        |   5 +-
 .../reporters/solr/SolrCloudReportersTest.java  |   3 +
 .../solr/search/TestHashQParserPlugin.java      |  30 +-
 .../solr/search/stats/TestDistribIDF.java       |   2 +
 .../hadoop/TestDelegationWithHadoopAuth.java    |   2 +
 .../solr/servlet/HttpSolrCallGetCoreTest.java   |   2 +
 .../solr/update/MaxSizeAutoCommitTest.java      |   7 +-
 .../solr/update/SolrIndexSplitterTest.java      | 122 ++-
 .../solr/update/TestInPlaceUpdatesDistrib.java  |   2 +
 solr/licenses/commons-beanutils-1.8.3.jar.sha1  |   1 -
 solr/licenses/commons-beanutils-LICENSE-ASL.txt | 202 -----
 solr/licenses/commons-beanutils-NOTICE.txt      |   2 -
 solr/solr-ref-guide/src/cloud-screens.adoc      |  25 +-
 solr/solr-ref-guide/src/collections-api.adoc    |  28 +-
 .../solr-ref-guide/src/filter-descriptions.adoc |  50 +-
 .../src/images/cloud-screens/cloud-nodes.png    | Bin 0 -> 185805 bytes
 solr/solr-ref-guide/src/metrics-reporting.adoc  |  83 ++-
 .../src/running-solr-on-hdfs.adoc               |   2 +-
 ...olrcloud-autoscaling-policy-preferences.adoc |   7 +-
 .../client/solrj/cloud/autoscaling/Cell.java    |   5 +-
 .../client/solrj/cloud/autoscaling/Clause.java  | 235 +-----
 .../solrj/cloud/autoscaling/ComputedType.java   |  99 +++
 .../solrj/cloud/autoscaling/Condition.java      | 122 +++
 .../solrj/cloud/autoscaling/CoresVariable.java  | 115 +++
 .../cloud/autoscaling/FreeDiskVariable.java     | 135 ++++
 .../solrj/cloud/autoscaling/NodeVariable.java   |  45 ++
 .../client/solrj/cloud/autoscaling/Operand.java |  16 +-
 .../client/solrj/cloud/autoscaling/Policy.java  |  31 +-
 .../solrj/cloud/autoscaling/PolicyHelper.java   |  11 +-
 .../solrj/cloud/autoscaling/RangeVal.java       |  54 ++
 .../cloud/autoscaling/ReplicaVariable.java      | 135 ++++
 .../client/solrj/cloud/autoscaling/Row.java     |   4 +-
 .../solrj/cloud/autoscaling/SealedClause.java   |   2 +-
 .../solrj/cloud/autoscaling/Suggester.java      |   2 +-
 .../solrj/cloud/autoscaling/Suggestion.java     | 740 +------------------
 .../client/solrj/cloud/autoscaling/VarType.java |  43 --
 .../solrj/cloud/autoscaling/Variable.java       | 366 +++++++++
 .../solrj/cloud/autoscaling/VariableBase.java   | 205 +++++
 .../solrj/cloud/autoscaling/Violation.java      |  49 ++
 .../autoscaling/WithCollectionVarType.java      | 160 ----
 .../autoscaling/WithCollectionVariable.java     | 165 +++++
 .../solrj/impl/SolrClientNodeStateProvider.java |  13 +-
 .../solrj/io/stream/SignificantTermsStream.java |  16 +-
 .../solrj/request/CollectionAdminRequest.java   |  11 +
 .../solr/common/params/CommonAdminParams.java   |   2 +
 .../apache/solr/common/params/CommonParams.java |   2 +
 .../solr/common/params/CoreAdminParams.java     |   2 +-
 .../resources/apispec/collections.Commands.json |   3 +-
 .../collections.collection.Commands.modify.json |   4 +-
 .../solrj/cloud/autoscaling/TestPolicy.java     | 125 +++-
 .../solrj/cloud/autoscaling/TestPolicy2.java    |  76 +-
 .../solrj/embedded/LargeVolumeJettyTest.java    |   2 +
 .../client/solrj/impl/CloudSolrClientTest.java  |   5 +-
 .../solrj/io/graph/GraphExpressionTest.java     |   1 +
 .../solr/client/solrj/io/graph/GraphTest.java   |   1 +
 .../solrj/io/stream/MathExpressionTest.java     |   4 +-
 .../solrj/io/stream/StreamDecoratorTest.java    |  11 +
 .../solrj/io/stream/StreamExpressionTest.java   |   1 +
 solr/webapp/web/css/angular/cloud.css           | 193 +++++
 solr/webapp/web/css/angular/menu.css            |   2 +-
 solr/webapp/web/index.html                      |   4 +-
 solr/webapp/web/js/angular/controllers/cloud.js | 486 +++++++++++-
 solr/webapp/web/js/angular/services.js          |   7 +-
 solr/webapp/web/partials/cloud.html             | 132 ++++
 178 files changed, 5936 insertions(+), 2271 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d6d6fbdb/lucene/ivy-versions.properties
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d6d6fbdb/solr/core/src/java/org/apache/solr/update/processor/DistributedUpdateProcessor.java
----------------------------------------------------------------------


[26/48] lucene-solr:jira/http2: LUCENE-8441: IndexWriter now checks doc value type of index sort fields and fails the document if they are not compatible.

Posted by da...@apache.org.
LUCENE-8441: IndexWriter now checks doc value type of index sort fields and fails the document if they are not compatible.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/679b4aa7
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/679b4aa7
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/679b4aa7

Branch: refs/heads/jira/http2
Commit: 679b4aa71d205ac58621f6b2bad64637f6bd7d67
Parents: 1133bf9
Author: Jim Ferenczi <ji...@apache.org>
Authored: Wed Aug 1 18:28:51 2018 +0200
Committer: Jim Ferenczi <ji...@apache.org>
Committed: Wed Aug 1 18:28:51 2018 +0200

----------------------------------------------------------------------
 lucene/CHANGES.txt                              |  3 ++
 .../lucene/index/DefaultIndexingChain.java      | 49 ++++++++++++++++++++
 .../index/SortingStoredFieldsConsumer.java      |  6 ++-
 .../index/SortingTermVectorsConsumer.java       |  6 ++-
 .../apache/lucene/index/TestIndexSorting.java   | 40 ++++++++++++++++
 5 files changed, 100 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/679b4aa7/lucene/CHANGES.txt
----------------------------------------------------------------------
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index fdc8c98..b76cc6f 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -199,6 +199,9 @@ Bug Fixes:
 * LUCENE-8429: DaciukMihovAutomatonBuilder is no longer prone to stack
   overflows by enforcing a maximum term length. (Adrien Grand)
 
+* LUCENE-8441: IndexWriter now checks doc value type for index sort fields
+  and fails the document if they are not compatible. (Jim Ferenczi, Mike McCandless)
+
 Changes in Runtime Behavior:
 
 * LUCENE-7976: TieredMergePolicy now respects maxSegmentSizeMB by default when executing

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/679b4aa7/lucene/core/src/java/org/apache/lucene/index/DefaultIndexingChain.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/DefaultIndexingChain.java b/lucene/core/src/java/org/apache/lucene/index/DefaultIndexingChain.java
index e552516..d0a6974 100644
--- a/lucene/core/src/java/org/apache/lucene/index/DefaultIndexingChain.java
+++ b/lucene/core/src/java/org/apache/lucene/index/DefaultIndexingChain.java
@@ -39,6 +39,8 @@ import org.apache.lucene.document.FieldType;
 import org.apache.lucene.search.DocIdSetIterator;
 import org.apache.lucene.search.Sort;
 import org.apache.lucene.search.SortField;
+import org.apache.lucene.search.SortedNumericSortField;
+import org.apache.lucene.search.SortedSetSortField;
 import org.apache.lucene.search.similarities.Similarity;
 import org.apache.lucene.store.IOContext;
 import org.apache.lucene.util.ArrayUtil;
@@ -524,6 +526,48 @@ final class DefaultIndexingChain extends DocConsumer {
     fp.pointValuesWriter.addPackedValue(docState.docID, field.binaryValue());
   }
 
+  private void validateIndexSortDVType(Sort indexSort, String fieldName, DocValuesType dvType) {
+    for (SortField sortField : indexSort.getSort()) {
+      if (sortField.getField().equals(fieldName)) {
+        switch (dvType) {
+          case NUMERIC:
+            if (sortField.getType().equals(SortField.Type.INT) == false &&
+                  sortField.getType().equals(SortField.Type.LONG) == false &&
+                  sortField.getType().equals(SortField.Type.FLOAT) == false &&
+                  sortField.getType().equals(SortField.Type.DOUBLE) == false) {
+              throw new IllegalArgumentException("invalid doc value type:" + dvType + " for sortField:" + sortField);
+            }
+            break;
+
+          case BINARY:
+            throw new IllegalArgumentException("invalid doc value type:" + dvType + " for sortField:" + sortField);
+
+          case SORTED:
+            if (sortField.getType().equals(SortField.Type.STRING) == false) {
+              throw new IllegalArgumentException("invalid doc value type:" + dvType + " for sortField:" + sortField);
+            }
+            break;
+
+          case SORTED_NUMERIC:
+            if (sortField instanceof SortedNumericSortField == false) {
+              throw new IllegalArgumentException("invalid doc value type:" + dvType + " for sortField:" + sortField);
+            }
+            break;
+
+          case SORTED_SET:
+            if (sortField instanceof SortedSetSortField == false) {
+              throw new IllegalArgumentException("invalid doc value type:" + dvType + " for sortField:" + sortField);
+            }
+            break;
+
+          default:
+            throw new IllegalArgumentException("invalid doc value type:" + dvType + " for sortField:" + sortField);
+        }
+        break;
+      }
+    }
+  }
+
   /** Called from processDocument to index one field's doc value */
   private void indexDocValue(PerField fp, DocValuesType dvType, IndexableField field) throws IOException {
 
@@ -531,7 +575,12 @@ final class DefaultIndexingChain extends DocConsumer {
       // This is the first time we are seeing this field indexed with doc values, so we
       // now record the DV type so that any future attempt to (illegally) change
       // the DV type of this field, will throw an IllegalArgExc:
+      if (docWriter.getSegmentInfo().getIndexSort() != null) {
+        final Sort indexSort = docWriter.getSegmentInfo().getIndexSort();
+        validateIndexSortDVType(indexSort, fp.fieldInfo.name, dvType);
+      }
       fieldInfos.globalFieldNumbers.setDocValuesType(fp.fieldInfo.number, fp.fieldInfo.name, dvType);
+
     }
     fp.fieldInfo.setDocValuesType(dvType);
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/679b4aa7/lucene/core/src/java/org/apache/lucene/index/SortingStoredFieldsConsumer.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/SortingStoredFieldsConsumer.java b/lucene/core/src/java/org/apache/lucene/index/SortingStoredFieldsConsumer.java
index e5443b2..97253a5 100644
--- a/lucene/core/src/java/org/apache/lucene/index/SortingStoredFieldsConsumer.java
+++ b/lucene/core/src/java/org/apache/lucene/index/SortingStoredFieldsConsumer.java
@@ -83,8 +83,10 @@ final class SortingStoredFieldsConsumer extends StoredFieldsConsumer {
     try {
       super.abort();
     } finally {
-      IOUtils.deleteFilesIgnoringExceptions(tmpDirectory,
-          tmpDirectory.getTemporaryFiles().values());
+      if (tmpDirectory != null) {
+        IOUtils.deleteFilesIgnoringExceptions(tmpDirectory,
+            tmpDirectory.getTemporaryFiles().values());
+      }
     }
   }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/679b4aa7/lucene/core/src/java/org/apache/lucene/index/SortingTermVectorsConsumer.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/SortingTermVectorsConsumer.java b/lucene/core/src/java/org/apache/lucene/index/SortingTermVectorsConsumer.java
index 054ca50..955dd8a 100644
--- a/lucene/core/src/java/org/apache/lucene/index/SortingTermVectorsConsumer.java
+++ b/lucene/core/src/java/org/apache/lucene/index/SortingTermVectorsConsumer.java
@@ -83,8 +83,10 @@ final class SortingTermVectorsConsumer extends TermVectorsConsumer {
     try {
       super.abort();
     } finally {
-      IOUtils.deleteFilesIgnoringExceptions(tmpDirectory,
-          tmpDirectory.getTemporaryFiles().values());
+      if (tmpDirectory != null) {
+        IOUtils.deleteFilesIgnoringExceptions(tmpDirectory,
+            tmpDirectory.getTemporaryFiles().values());
+      }
     }
   }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/679b4aa7/lucene/core/src/test/org/apache/lucene/index/TestIndexSorting.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexSorting.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexSorting.java
index 9711a35..674972f 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestIndexSorting.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexSorting.java
@@ -84,6 +84,7 @@ import org.apache.lucene.util.NumericUtils;
 import org.apache.lucene.util.TestUtil;
 
 import static org.apache.lucene.search.DocIdSetIterator.NO_MORE_DOCS;
+import static org.junit.internal.matchers.StringContains.containsString;
 
 public class TestIndexSorting extends LuceneTestCase {
   static class AssertingNeedsIndexSortCodec extends FilterCodec {
@@ -2472,4 +2473,43 @@ public class TestIndexSorting extends LuceneTestCase {
     IOUtils.close(r, w, dir);
   }
 
+  public void testWrongSortFieldType() throws Exception {
+    Directory dir = newDirectory();
+    List<Field> dvs = new ArrayList<>();
+    dvs.add(new SortedDocValuesField("field", new BytesRef("")));
+    dvs.add(new SortedSetDocValuesField("field", new BytesRef("")));
+    dvs.add(new NumericDocValuesField("field", 42));
+    dvs.add(new SortedNumericDocValuesField("field", 42));
+
+    List<SortField> sortFields = new ArrayList<>();
+    sortFields.add(new SortField("field", SortField.Type.STRING));
+    sortFields.add(new SortedSetSortField("field", false));
+    sortFields.add(new SortField("field", SortField.Type.INT));
+    sortFields.add(new SortedNumericSortField("field", SortField.Type.INT));
+
+    for (int i = 0; i < sortFields.size(); i++) {
+      for (int j = 0; j < dvs.size(); j++) {
+        if (i == j) {
+          continue;
+        }
+        Sort indexSort = new Sort(sortFields.get(i));
+        IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()));
+        iwc.setIndexSort(indexSort);
+        IndexWriter w = new IndexWriter(dir, iwc);
+        Document doc = new Document();
+        doc.add(dvs.get(j));
+        IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, () -> w.addDocument(doc));
+        assertThat(exc.getMessage(), containsString("invalid doc value type"));
+        doc.clear();
+        doc.add(dvs.get(i));
+        w.addDocument(doc);
+        doc.add(dvs.get(j));
+        exc = expectThrows(IllegalArgumentException.class, () -> w.addDocument(doc));
+        assertThat(exc.getMessage(), containsString("cannot change DocValues type"));
+        w.rollback();
+        IOUtils.close(w);
+      }
+    }
+    IOUtils.close(dir);
+  }
 }


[20/48] lucene-solr:jira/http2: Fix AAIOOBE in GeoTestUtil.

Posted by da...@apache.org.
Fix AAIOOBE in GeoTestUtil.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/c3e81318
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/c3e81318
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/c3e81318

Branch: refs/heads/jira/http2
Commit: c3e813188eaf103ac8b6460cda3ce231db08b623
Parents: 1163091
Author: Adrien Grand <jp...@gmail.com>
Authored: Wed Aug 1 15:17:53 2018 +0200
Committer: Adrien Grand <jp...@gmail.com>
Committed: Wed Aug 1 15:17:53 2018 +0200

----------------------------------------------------------------------
 .../src/java/org/apache/lucene/geo/GeoTestUtil.java            | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/c3e81318/lucene/test-framework/src/java/org/apache/lucene/geo/GeoTestUtil.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/geo/GeoTestUtil.java b/lucene/test-framework/src/java/org/apache/lucene/geo/GeoTestUtil.java
index 8817d20..bb29b0b 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/geo/GeoTestUtil.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/geo/GeoTestUtil.java
@@ -245,7 +245,7 @@ public class GeoTestUtil {
       return new double[] { nextLatitudeBetween(polygon.minLat, polygon.maxLat), nextLongitudeBetween(polygon.minLon, polygon.maxLon) };
     } else if (surpriseMe < 20) {
       // target a vertex
-      int vertex = randomInt(polyLats.length - 1);
+      int vertex = randomInt(polyLats.length - 2);
       return new double[] { nextLatitudeNear(polyLats[vertex], polyLats[vertex+1] - polyLats[vertex]), 
                             nextLongitudeNear(polyLons[vertex], polyLons[vertex+1] - polyLons[vertex]) };
     } else if (surpriseMe < 30) {
@@ -253,12 +253,12 @@ public class GeoTestUtil {
       Polygon container = boxPolygon(new Rectangle(polygon.minLat, polygon.maxLat, polygon.minLon, polygon.maxLon));
       double containerLats[] = container.getPolyLats();
       double containerLons[] = container.getPolyLons();
-      int startVertex = randomInt(containerLats.length - 1);
+      int startVertex = randomInt(containerLats.length - 2);
       return nextPointAroundLine(containerLats[startVertex], containerLons[startVertex], 
                                  containerLats[startVertex+1], containerLons[startVertex+1]);
     } else {
       // target points around diagonals between vertices
-      int startVertex = randomInt(polyLats.length - 1);
+      int startVertex = randomInt(polyLats.length - 2);
       // but favor edges heavily
       int endVertex = randomBoolean() ? startVertex + 1 : randomInt(polyLats.length - 1);
       return nextPointAroundLine(polyLats[startVertex], polyLons[startVertex], 


[38/48] lucene-solr:jira/http2: LUCENE-8443: Fix InverseIntersectVisitor logic for LatLonShape queries, add adversarial test for same shape many times

Posted by da...@apache.org.
LUCENE-8443: Fix InverseIntersectVisitor logic for LatLonShape queries, add adversarial test for same shape many times


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/2a41cbd1
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/2a41cbd1
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/2a41cbd1

Branch: refs/heads/jira/http2
Commit: 2a41cbd1924510000f6e69ae2e6cccb7b2e26af2
Parents: 17a02c1
Author: Nicholas Knize <nk...@gmail.com>
Authored: Fri Aug 3 10:30:47 2018 -0500
Committer: Nicholas Knize <nk...@gmail.com>
Committed: Fri Aug 3 10:30:47 2018 -0500

----------------------------------------------------------------------
 .../document/LatLonShapeBoundingBoxQuery.java   |   4 +-
 .../document/LatLonShapePolygonQuery.java       | 108 +++++++++++++++----
 .../document/BaseLatLonShapeTestCase.java       |  15 +++
 .../document/TestLatLonPointShapeQueries.java   |   1 -
 4 files changed, 106 insertions(+), 22 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2a41cbd1/lucene/sandbox/src/java/org/apache/lucene/document/LatLonShapeBoundingBoxQuery.java
----------------------------------------------------------------------
diff --git a/lucene/sandbox/src/java/org/apache/lucene/document/LatLonShapeBoundingBoxQuery.java b/lucene/sandbox/src/java/org/apache/lucene/document/LatLonShapeBoundingBoxQuery.java
index 7bb78b8..9779210 100644
--- a/lucene/sandbox/src/java/org/apache/lucene/document/LatLonShapeBoundingBoxQuery.java
+++ b/lucene/sandbox/src/java/org/apache/lucene/document/LatLonShapeBoundingBoxQuery.java
@@ -273,7 +273,7 @@ class LatLonShapeBoundingBoxQuery extends Query {
 
           @Override
           public void visit(int docID, byte[] packedTriangle) {
-            if (queryCrossesTriangle(packedTriangle)) {
+            if (queryCrossesTriangle(packedTriangle) == false) {
               result.clear(docID);
               cost[0]--;
             }
@@ -284,7 +284,7 @@ class LatLonShapeBoundingBoxQuery extends Query {
             Relation r = relateRangeToQuery(minPackedValue, maxPackedValue);
             if (r == Relation.CELL_OUTSIDE_QUERY) {
               return Relation.CELL_INSIDE_QUERY;
-            } else if (r == Relation.CELL_INSIDE_QUERY || r == Relation.CELL_CROSSES_QUERY) {
+            } else if (r == Relation.CELL_INSIDE_QUERY) {
               return Relation.CELL_OUTSIDE_QUERY;
             }
             return r;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2a41cbd1/lucene/sandbox/src/java/org/apache/lucene/document/LatLonShapePolygonQuery.java
----------------------------------------------------------------------
diff --git a/lucene/sandbox/src/java/org/apache/lucene/document/LatLonShapePolygonQuery.java b/lucene/sandbox/src/java/org/apache/lucene/document/LatLonShapePolygonQuery.java
index 9a9b890..be47971 100644
--- a/lucene/sandbox/src/java/org/apache/lucene/document/LatLonShapePolygonQuery.java
+++ b/lucene/sandbox/src/java/org/apache/lucene/document/LatLonShapePolygonQuery.java
@@ -39,7 +39,9 @@ import org.apache.lucene.search.ScoreMode;
 import org.apache.lucene.search.Scorer;
 import org.apache.lucene.search.ScorerSupplier;
 import org.apache.lucene.search.Weight;
+import org.apache.lucene.util.BitSetIterator;
 import org.apache.lucene.util.DocIdSetBuilder;
+import org.apache.lucene.util.FixedBitSet;
 import org.apache.lucene.util.FutureArrays;
 import org.apache.lucene.util.NumericUtils;
 
@@ -179,6 +181,39 @@ public class LatLonShapePolygonQuery extends Query {
         };
       }
 
+      /**
+       * Create a visitor that clears documents that do NOT match the polygon query.
+       */
+      private IntersectVisitor getInverseIntersectVisitor(FixedBitSet result, int[] cost) {
+        return new IntersectVisitor() {
+
+          @Override
+          public void visit(int docID) {
+            result.clear(docID);
+            cost[0]--;
+          }
+
+          @Override
+          public void visit(int docID, byte[] packedTriangle) {
+            if (queryCrossesTriangle(packedTriangle) == false) {
+              result.clear(docID);
+              cost[0]--;
+            }
+          }
+
+          @Override
+          public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) {
+            Relation r = relateRangeToQuery(minPackedValue, maxPackedValue);
+            if (r == Relation.CELL_OUTSIDE_QUERY) {
+              return Relation.CELL_INSIDE_QUERY;
+            } else if (r == Relation.CELL_INSIDE_QUERY) {
+              return Relation.CELL_OUTSIDE_QUERY;
+            }
+            return r;
+          }
+        };
+      }
+
       @Override
       public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException {
         LeafReader reader = context.reader();
@@ -193,29 +228,64 @@ public class LatLonShapePolygonQuery extends Query {
           return null;
         }
 
+        boolean allDocsMatch = true;
+        if (values.getDocCount() != reader.maxDoc() ||
+            relateRangeToQuery(values.getMinPackedValue(), values.getMaxPackedValue()) != Relation.CELL_INSIDE_QUERY) {
+          allDocsMatch = false;
+        }
+
         final Weight weight = this;
-        return new ScorerSupplier() {
-          final DocIdSetBuilder result = new DocIdSetBuilder(reader.maxDoc(), values, field);
-          final PointValues.IntersectVisitor visitor = getIntersectVisitor(result);
-          long cost = -1;
+        if (allDocsMatch) {
+          return new ScorerSupplier() {
+            @Override
+            public Scorer get(long leadCost) throws IOException {
+              return new ConstantScoreScorer(weight, score(),
+                  DocIdSetIterator.all(reader.maxDoc()));
+            }
 
-          @Override
-          public Scorer get(long leadCost) throws IOException {
-            values.intersect(visitor);
-            DocIdSetIterator iterator = result.build().iterator();
-            return new ConstantScoreScorer(weight, score(), iterator);
-          }
+            @Override
+            public long cost() {
+              return reader.maxDoc();
+            }
+          };
+        } else {
+          return new ScorerSupplier() {
+            final DocIdSetBuilder result = new DocIdSetBuilder(reader.maxDoc(), values, field);
+            final IntersectVisitor visitor = getIntersectVisitor(result);
+            long cost = -1;
 
-          @Override
-          public long cost() {
-            if (cost == -1) {
-              // Computing the cost may be expensive, so only do it if necessary
-              cost = values.estimatePointCount(visitor);
-              assert cost >= 0;
+            @Override
+            public Scorer get(long leadCost) throws IOException {
+              if (values.getDocCount() == reader.maxDoc()
+                  && values.getDocCount() == values.size()
+                  && cost() > reader.maxDoc() / 2) {
+                // If all docs have exactly one value and the cost is greater
+                // than half the leaf size then maybe we can make things faster
+                // by computing the set of documents that do NOT match the query
+                final FixedBitSet result = new FixedBitSet(reader.maxDoc());
+                result.set(0, reader.maxDoc());
+                int[] cost = new int[]{reader.maxDoc()};
+                values.intersect(getInverseIntersectVisitor(result, cost));
+                final DocIdSetIterator iterator = new BitSetIterator(result, cost[0]);
+                return new ConstantScoreScorer(weight, score(), iterator);
+              }
+
+              values.intersect(visitor);
+              DocIdSetIterator iterator = result.build().iterator();
+              return new ConstantScoreScorer(weight, score(), iterator);
             }
-            return cost;
-          }
-        };
+
+            @Override
+            public long cost() {
+              if (cost == -1) {
+                // Computing the cost may be expensive, so only do it if necessary
+                cost = values.estimatePointCount(visitor);
+                assert cost >= 0;
+              }
+              return cost;
+            }
+          };
+        }
       }
 
       @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2a41cbd1/lucene/sandbox/src/test/org/apache/lucene/document/BaseLatLonShapeTestCase.java
----------------------------------------------------------------------
diff --git a/lucene/sandbox/src/test/org/apache/lucene/document/BaseLatLonShapeTestCase.java b/lucene/sandbox/src/test/org/apache/lucene/document/BaseLatLonShapeTestCase.java
index 3321f9a..a7560ee 100644
--- a/lucene/sandbox/src/test/org/apache/lucene/document/BaseLatLonShapeTestCase.java
+++ b/lucene/sandbox/src/test/org/apache/lucene/document/BaseLatLonShapeTestCase.java
@@ -17,6 +17,7 @@
 package org.apache.lucene.document;
 
 import java.io.IOException;
+import java.util.Arrays;
 import java.util.HashSet;
 import java.util.Set;
 
@@ -110,6 +111,19 @@ public abstract class BaseLatLonShapeTestCase extends LuceneTestCase {
     return LatLonShape.newPolygonQuery(field, polygons);
   }
 
+  // A particularly tricky adversary for BKD tree:
+  public void testSameShapeManyTimes() throws Exception {
+    int numShapes = atLeast(1000);
+
+    // Every doc has 2 points:
+    Object theShape = nextShape();
+
+    Object[] shapes = new Object[numShapes];
+    Arrays.fill(shapes, theShape);
+
+    verify(shapes);
+  }
+
   public void testRandomTiny() throws Exception {
     // Make sure single-leaf-node case is OK:
     doTestRandom(10);
@@ -398,6 +412,7 @@ public abstract class BaseLatLonShapeTestCase extends LuceneTestCase {
       sb.append(lon);
       sb.append(',');
       sb.append(lat);
+      sb.append(')');
       return sb.toString();
     }
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2a41cbd1/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonPointShapeQueries.java
----------------------------------------------------------------------
diff --git a/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonPointShapeQueries.java b/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonPointShapeQueries.java
index 62e4cdf..3adb26b 100644
--- a/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonPointShapeQueries.java
+++ b/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonPointShapeQueries.java
@@ -64,7 +64,6 @@ public class TestLatLonPointShapeQueries extends BaseLatLonShapeTestCase {
     }
   }
 
-  @AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/LUCENE-8443")
   @Override
   public void testRandomTiny() throws Exception {
   }


[29/48] lucene-solr:jira/http2: SOLR-12607: Fixed two separate bugs in shard splits which can cause data loss. The first case is when using TLOG replicas only, the updates forwarded from parent shard leader to the sub-shard leader are written only in tlo

Posted by da...@apache.org.
SOLR-12607: Fixed two separate bugs in shard splits which can cause data loss. The first case is when using TLOG replicas only, the updates forwarded from parent shard leader to the sub-shard leader are written only in tlog and not the index. If this happens after the buffered updates have been replayed then the updates can never be executed even though they remain the transaction log. The second case is when synchronously forwarding updates to sub-shard leader fails and the underlying errors are not propagated to the client


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/259bc2ba
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/259bc2ba
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/259bc2ba

Branch: refs/heads/jira/http2
Commit: 259bc2baf7ce58aa0143fa6a8d43da417506cd63
Parents: 64573c1
Author: Shalin Shekhar Mangar <sh...@apache.org>
Authored: Thu Aug 2 08:24:32 2018 +0530
Committer: Shalin Shekhar Mangar <sh...@apache.org>
Committed: Thu Aug 2 08:24:32 2018 +0530

----------------------------------------------------------------------
 solr/CHANGES.txt                                |  8 +-
 .../cloud/api/collections/AddReplicaCmd.java    |  2 +-
 .../org/apache/solr/core/CoreContainer.java     |  2 +-
 .../processor/DistributedUpdateProcessor.java   | 12 +--
 .../solr/cloud/ChaosMonkeyShardSplitTest.java   |  9 +-
 .../cloud/api/collections/ShardSplitTest.java   | 88 +++++++-------------
 6 files changed, 51 insertions(+), 70 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/259bc2ba/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 49fc7fe..6864ce7 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -186,7 +186,13 @@ Bug Fixes
 * SOLR-12477: An update would return a client error(400) if it hit a AlreadyClosedException.
   We now return the error as a server error(500) instead (Jeffery via Varun Thacker)
   
-* SOLR-12606: Fix InfixSuggestersTest.testShutdownDuringBuild() failures. (Steve Rowe) 
+* SOLR-12606: Fix InfixSuggestersTest.testShutdownDuringBuild() failures. (Steve Rowe)
+
+* SOLR-12607: Fixed two separate bugs in shard splits which can cause data loss. The first case is when using TLOG
+  replicas only, the updates forwarded from parent shard leader to the sub-shard leader are written only in tlog and
+  not the index. If this happens after the buffered updates have been replayed then the updates can never be executed
+  even though they remain the transaction log. The second case is when synchronously forwarding updates to sub-shard
+  leader fails and the underlying errors are not propagated to the client. (Cao Manh Dat, shalin)
 
 Optimizations
 ----------------------

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/259bc2ba/solr/core/src/java/org/apache/solr/cloud/api/collections/AddReplicaCmd.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/AddReplicaCmd.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/AddReplicaCmd.java
index 0feeec9..c9dbaec 100644
--- a/solr/core/src/java/org/apache/solr/cloud/api/collections/AddReplicaCmd.java
+++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/AddReplicaCmd.java
@@ -278,7 +278,7 @@ public class AddReplicaCmd implements OverseerCollectionMessageHandler.Cmd {
             cloudManager).get(0).nodeName;// TODO: use replica type in this logic too
       }
     }
-    log.info("Node Identified {} for creating new replica", node);
+    log.info("Node Identified {} for creating new replica of shard {}", node, shard);
 
     if (!clusterState.liveNodesContain(node)) {
       throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Node: " + node + " is not live");

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/259bc2ba/solr/core/src/java/org/apache/solr/core/CoreContainer.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/CoreContainer.java b/solr/core/src/java/org/apache/solr/core/CoreContainer.java
index 8659e04..6af7c97 100644
--- a/solr/core/src/java/org/apache/solr/core/CoreContainer.java
+++ b/solr/core/src/java/org/apache/solr/core/CoreContainer.java
@@ -1776,7 +1776,7 @@ public class CoreContainer {
    * @return true if we were able to successfuly perisist the repaired coreDescriptor, false otherwise.
    *
    * See SOLR-11503, This can be removed when there's no chance we'll need to upgrade a
-   * Solr isntallation createged with legacyCloud=true from 6.6.1 through 7.1
+   * Solr installation created with legacyCloud=true from 6.6.1 through 7.1
    */
   public boolean repairCoreProperty(CoreDescriptor cd, String prop) {
     // So far, coreNodeName is the only property that we need to repair, this may get more complex as other properties

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/259bc2ba/solr/core/src/java/org/apache/solr/update/processor/DistributedUpdateProcessor.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/processor/DistributedUpdateProcessor.java b/solr/core/src/java/org/apache/solr/update/processor/DistributedUpdateProcessor.java
index 8d715a6..a21d906 100644
--- a/solr/core/src/java/org/apache/solr/update/processor/DistributedUpdateProcessor.java
+++ b/solr/core/src/java/org/apache/solr/update/processor/DistributedUpdateProcessor.java
@@ -432,13 +432,13 @@ public class DistributedUpdateProcessor extends UpdateRequestProcessor {
         boolean isSubset = aslice.getRange() != null && aslice.getRange().isSubsetOf(myRange);
         if (isSubset &&
             (docId == null // in case of deletes
-            || (docId != null && coll.getRouter().isTargetSlice(docId, doc, req.getParams(), aslice.getName(), coll)))) {
+            || coll.getRouter().isTargetSlice(docId, doc, req.getParams(), aslice.getName(), coll))) {
           Replica sliceLeader = aslice.getLeader();
           // slice leader can be null because node/shard is created zk before leader election
           if (sliceLeader != null && zkController.getClusterState().liveNodesContain(sliceLeader.getNodeName()))  {
             if (nodes == null) nodes = new ArrayList<>();
             ZkCoreNodeProps nodeProps = new ZkCoreNodeProps(sliceLeader);
-            nodes.add(new StdNode(nodeProps, coll.getName(), shardId));
+            nodes.add(new StdNode(nodeProps, coll.getName(), aslice.getName()));
           }
         }
       }
@@ -896,7 +896,7 @@ public class DistributedUpdateProcessor extends UpdateRequestProcessor {
           // not the leader anymore maybe or the error'd node is not my replica?
           if (!foundErrorNodeInReplicaList) {
             log.warn("Core "+cloudDesc.getCoreNodeName()+" belonging to "+collection+" "+
-                shardId+", does not have error'd node " + stdNode.getNodeProps().getCoreUrl() + " as a replica. " +
+                cloudDesc.getShardId()+", does not have error'd node " + stdNode.getNodeProps().getCoreUrl() + " as a replica. " +
                 "No request recovery command will be sent!");
             if (!shardId.equals(cloudDesc.getShardId())) {
               // some replicas on other shard did not receive the updates (ex: during splitshard),
@@ -1150,7 +1150,7 @@ public class DistributedUpdateProcessor extends UpdateRequestProcessor {
                 }
               }
             }
-            if (replicaType == Replica.Type.TLOG && (cmd.getFlags() & UpdateCommand.REPLAY) == 0) {
+            if (!isSubShardLeader && replicaType == Replica.Type.TLOG && (cmd.getFlags() & UpdateCommand.REPLAY) == 0) {
               cmd.setFlags(cmd.getFlags() | UpdateCommand.IGNORE_INDEXWRITER);
             }
           }
@@ -1692,7 +1692,7 @@ public class DistributedUpdateProcessor extends UpdateRequestProcessor {
             return;
           }
 
-          if (replicaType == Replica.Type.TLOG && (cmd.getFlags() & UpdateCommand.REPLAY) == 0) {
+          if (!isSubShardLeader && replicaType == Replica.Type.TLOG && (cmd.getFlags() & UpdateCommand.REPLAY) == 0) {
             // TLOG replica not leader, don't write the DBQ to IW
             cmd.setFlags(cmd.getFlags() | UpdateCommand.IGNORE_INDEXWRITER);
           }
@@ -1851,7 +1851,7 @@ public class DistributedUpdateProcessor extends UpdateRequestProcessor {
               }
             }
 
-            if (replicaType == Replica.Type.TLOG && (cmd.getFlags() & UpdateCommand.REPLAY) == 0) {
+            if (!isSubShardLeader && replicaType == Replica.Type.TLOG && (cmd.getFlags() & UpdateCommand.REPLAY) == 0) {
               cmd.setFlags(cmd.getFlags() | UpdateCommand.IGNORE_INDEXWRITER);
             }
           }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/259bc2ba/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyShardSplitTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyShardSplitTest.java b/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyShardSplitTest.java
index dd5f9f3..0af70c1 100644
--- a/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyShardSplitTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyShardSplitTest.java
@@ -20,6 +20,8 @@ import java.io.IOException;
 import java.lang.invoke.MethodHandles;
 import java.util.Collection;
 import java.util.List;
+import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.atomic.AtomicInteger;
 
 import org.apache.lucene.util.LuceneTestCase.Slow;
@@ -70,6 +72,7 @@ public class ChaosMonkeyShardSplitTest extends ShardSplitTest {
     final List<DocRouter.Range> ranges = router.partitionRange(2, shard1Range);
     final int[] docCounts = new int[ranges.size()];
     int numReplicas = shard1.getReplicas().size();
+    final Set<String> documentIds = ConcurrentHashMap.newKeySet(1024);
 
     Thread indexThread = null;
     OverseerRestarter killer = null;
@@ -79,7 +82,7 @@ public class ChaosMonkeyShardSplitTest extends ShardSplitTest {
     try {
       del("*:*");
       for (int id = 0; id < 100; id++) {
-        indexAndUpdateCount(router, ranges, docCounts, String.valueOf(id), id);
+        indexAndUpdateCount(router, ranges, docCounts, String.valueOf(id), id, documentIds);
       }
       commit();
 
@@ -89,7 +92,7 @@ public class ChaosMonkeyShardSplitTest extends ShardSplitTest {
           int max = atLeast(401);
           for (int id = 101; id < max; id++) {
             try {
-              indexAndUpdateCount(router, ranges, docCounts, String.valueOf(id), id);
+              indexAndUpdateCount(router, ranges, docCounts, String.valueOf(id), id, documentIds);
               Thread.sleep(atLeast(25));
             } catch (Exception e) {
               log.error("Exception while adding doc", e);
@@ -153,7 +156,7 @@ public class ChaosMonkeyShardSplitTest extends ShardSplitTest {
       }
     }
 
-    checkDocCountsAndShardStates(docCounts, numReplicas);
+    checkDocCountsAndShardStates(docCounts, numReplicas, documentIds);
 
     // todo - can't call waitForThingsToLevelOut because it looks for
     // jettys of all shards

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/259bc2ba/solr/core/src/test/org/apache/solr/cloud/api/collections/ShardSplitTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/api/collections/ShardSplitTest.java b/solr/core/src/test/org/apache/solr/cloud/api/collections/ShardSplitTest.java
index f6ee7b4..a3fbd19 100644
--- a/solr/core/src/test/org/apache/solr/cloud/api/collections/ShardSplitTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/api/collections/ShardSplitTest.java
@@ -25,6 +25,7 @@ import java.util.List;
 import java.util.Map;
 import java.util.Random;
 import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicBoolean;
@@ -39,10 +40,8 @@ import org.apache.solr.client.solrj.embedded.JettySolrRunner;
 import org.apache.solr.client.solrj.impl.CloudSolrClient;
 import org.apache.solr.client.solrj.impl.HttpSolrClient;
 import org.apache.solr.client.solrj.request.CollectionAdminRequest;
-import org.apache.solr.client.solrj.request.CoreAdminRequest;
 import org.apache.solr.client.solrj.request.QueryRequest;
 import org.apache.solr.client.solrj.response.CollectionAdminResponse;
-import org.apache.solr.client.solrj.response.CoreAdminResponse;
 import org.apache.solr.client.solrj.response.QueryResponse;
 import org.apache.solr.client.solrj.response.RequestStatusState;
 import org.apache.solr.cloud.AbstractDistribZkTestBase;
@@ -75,8 +74,8 @@ import static org.apache.solr.common.cloud.ZkStateReader.BASE_URL_PROP;
 import static org.apache.solr.common.cloud.ZkStateReader.MAX_SHARDS_PER_NODE;
 import static org.apache.solr.common.cloud.ZkStateReader.REPLICATION_FACTOR;
 
-@LogLevel("org.apache.solr.cloud.api.collections=DEBUG")
 @Slow
+@LogLevel("org.apache.solr.cloud.Overseer=DEBUG;org.apache.solr.cloud.overseer=DEBUG;org.apache.solr.cloud.api.collections=DEBUG;org.apache.solr.cloud.OverseerTaskProcessor=DEBUG;org.apache.solr.util.TestInjection=DEBUG")
 public class ShardSplitTest extends BasicDistributedZkTest {
 
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
@@ -424,10 +423,13 @@ public class ShardSplitTest extends BasicDistributedZkTest {
   }
 
   @Test
-  @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028")
   public void testSplitWithChaosMonkey() throws Exception {
     waitForThingsToLevelOut(15);
 
+    log.info("Using legacyCloud=false for cluster");
+    CollectionAdminRequest.setClusterProperty(ZkStateReader.LEGACY_CLOUD, "false")
+        .process(cloudClient);
+
     List<StoppableIndexingThread> indexers = new ArrayList<>();
     try {
       for (int i = 0; i < 1; i++) {
@@ -502,7 +504,7 @@ public class ShardSplitTest extends BasicDistributedZkTest {
       // indexed are available in SolrCloud and if the split succeeded then all replicas of the sub-shard
       // must be consistent (i.e. have same numdocs)
 
-      log.info("Shard split request state is COMPLETED");
+      log.info("Shard split request state is {}", splitStatus == null ? "unknown" : splitStatus.getKey());
       stop.set(true);
       monkeyThread.join();
       Set<String> addFails = new HashSet<>();
@@ -518,37 +520,9 @@ public class ShardSplitTest extends BasicDistributedZkTest {
       cloudClient.getZkStateReader().forceUpdateCollection(AbstractDistribZkTestBase.DEFAULT_COLLECTION);
       log.info("Current collection state: {}", printClusterStateInfo(AbstractDistribZkTestBase.DEFAULT_COLLECTION));
 
-      boolean replicaCreationsFailed = false;
-      if (splitStatus == RequestStatusState.FAILED)  {
-        // either one or more replica creation failed (because it may have been created on the same parent shard leader node)
-        // or the split may have failed while trying to soft-commit *after* all replicas have been created
-        // the latter counts as a successful switch even if the API doesn't say so
-        // so we must find a way to distinguish between the two
-        // an easy way to do that is to look at the sub-shard replicas and check if the replica core actually exists
-        // instead of existing solely inside the cluster state
-        DocCollection collectionState = cloudClient.getZkStateReader().getClusterState().getCollection(AbstractDistribZkTestBase.DEFAULT_COLLECTION);
-        Slice slice10 = collectionState.getSlice(SHARD1_0);
-        Slice slice11 = collectionState.getSlice(SHARD1_1);
-        if (slice10 != null && slice11 != null) {
-          for (Replica replica : slice10) {
-            if (!doesReplicaCoreExist(replica)) {
-              replicaCreationsFailed = true;
-              break;
-            }
-          }
-          for (Replica replica : slice11) {
-            if (!doesReplicaCoreExist(replica)) {
-              replicaCreationsFailed = true;
-              break;
-            }
-          }
-        }
-      }
-
       // true if sub-shard states switch to 'active' eventually
       AtomicBoolean areSubShardsActive = new AtomicBoolean(false);
-
-      if (!replicaCreationsFailed)  {
+      if (splitStatus == RequestStatusState.COMPLETED) {
         // all sub-shard replicas were created successfully so all cores must recover eventually
         waitForRecoveriesToFinish(AbstractDistribZkTestBase.DEFAULT_COLLECTION, true);
         // let's wait for the overseer to switch shard states
@@ -611,23 +585,6 @@ public class ShardSplitTest extends BasicDistributedZkTest {
     }
   }
 
-  private boolean doesReplicaCoreExist(Replica replica) throws IOException {
-    try (HttpSolrClient client = new HttpSolrClient.Builder(replica.getStr(BASE_URL_PROP))
-        .withHttpClient(cloudClient.getLbClient().getHttpClient()).build())  {
-      String coreName = replica.getCoreName();
-      try {
-        CoreAdminResponse status = CoreAdminRequest.getStatus(coreName, client);
-        if (status.getCoreStatus(coreName) == null || status.getCoreStatus(coreName).size() == 0) {
-          return false;
-        }
-      } catch (Exception e) {
-        log.warn("Error gettting core status of replica " + replica + ". Perhaps it does not exist!", e);
-        return false;
-      }
-    }
-    return true;
-  }
-
   @Test
   public void testSplitShardWithRule() throws Exception {
     doSplitShardWithRule(SolrIndexSplitter.SplitMethod.LINK);
@@ -718,6 +675,7 @@ public class ShardSplitTest extends BasicDistributedZkTest {
     } else  {
       subRanges = router.partitionRange(2, shard1Range);
     }
+    final Set<String> documentIds = ConcurrentHashMap.newKeySet(1024);
     final List<DocRouter.Range> ranges = subRanges;
     final int[] docCounts = new int[ranges.size()];
     int numReplicas = shard1.getReplicas().size();
@@ -728,7 +686,7 @@ public class ShardSplitTest extends BasicDistributedZkTest {
     del("*:*");
     for (int id = 0; id <= 100; id++) {
       String shardKey = "" + (char)('a' + (id % 26)); // See comment in ShardRoutingTest for hash distribution
-      indexAndUpdateCount(router, ranges, docCounts, shardKey + "!" + String.valueOf(id), id);
+      indexAndUpdateCount(router, ranges, docCounts, shardKey + "!" + String.valueOf(id), id, documentIds);
     }
     commit();
 
@@ -742,7 +700,7 @@ public class ShardSplitTest extends BasicDistributedZkTest {
         Set<String> deleted = new HashSet<>();
         for (int id = 101; id < max; id++) {
           try {
-            indexAndUpdateCount(router, ranges, docCounts, String.valueOf(id), id);
+            indexAndUpdateCount(router, ranges, docCounts, String.valueOf(id), id, documentIds);
             Thread.sleep(sleep);
             if (usually(random))  {
               String delId = String.valueOf(random.nextInt(id - 101 + 1) + 101);
@@ -750,6 +708,7 @@ public class ShardSplitTest extends BasicDistributedZkTest {
               try {
                 deleteAndUpdateCount(router, ranges, docCounts, delId);
                 deleted.add(delId);
+                documentIds.remove(String.valueOf(delId));
               } catch (Exception e) {
                 log.error("Exception while deleting docs", e);
               }
@@ -790,7 +749,7 @@ public class ShardSplitTest extends BasicDistributedZkTest {
     }
 
     waitForRecoveriesToFinish(true);
-    checkDocCountsAndShardStates(docCounts, numReplicas);
+    checkDocCountsAndShardStates(docCounts, numReplicas, documentIds);
   }
 
 
@@ -957,7 +916,7 @@ public class ShardSplitTest extends BasicDistributedZkTest {
     }
   }
 
-  protected void checkDocCountsAndShardStates(int[] docCounts, int numReplicas) throws Exception {
+  protected void checkDocCountsAndShardStates(int[] docCounts, int numReplicas, Set<String> documentIds) throws Exception {
     ClusterState clusterState = null;
     Slice slice1_0 = null, slice1_1 = null;
     int i = 0;
@@ -1005,7 +964,7 @@ public class ShardSplitTest extends BasicDistributedZkTest {
     }
     long shard11Count = response2.getResults().getNumFound();
 
-    logDebugHelp(docCounts, response, shard10Count, response2, shard11Count);
+    logDebugHelp(docCounts, response, shard10Count, response2, shard11Count, documentIds);
 
     assertEquals("Wrong doc count on shard1_0. See SOLR-5309", docCounts[0], shard10Count);
     assertEquals("Wrong doc count on shard1_1. See SOLR-5309", docCounts[1], shard11Count);
@@ -1068,12 +1027,13 @@ public class ShardSplitTest extends BasicDistributedZkTest {
     }
   }
 
-  protected void indexAndUpdateCount(DocRouter router, List<DocRouter.Range> ranges, int[] docCounts, String id, int n) throws Exception {
+  protected void indexAndUpdateCount(DocRouter router, List<DocRouter.Range> ranges, int[] docCounts, String id, int n, Set<String> documentIds) throws Exception {
     index("id", id, "n_ti", n);
 
     int idx = getHashRangeIdx(router, ranges, id);
     if (idx != -1)  {
       docCounts[idx]++;
+      documentIds.add(String.valueOf(id));
     }
   }
 
@@ -1101,12 +1061,14 @@ public class ShardSplitTest extends BasicDistributedZkTest {
     return -1;
   }
 
-  protected void logDebugHelp(int[] docCounts, QueryResponse response, long shard10Count, QueryResponse response2, long shard11Count) {
+  protected void logDebugHelp(int[] docCounts, QueryResponse response, long shard10Count, QueryResponse response2, long shard11Count, Set<String> documentIds) {
     for (int i = 0; i < docCounts.length; i++) {
       int docCount = docCounts[i];
       log.info("Expected docCount for shard1_{} = {}", i, docCount);
     }
 
+    Set<String> found = new HashSet<>(1024);
+
     log.info("Actual docCount for shard1_0 = {}", shard10Count);
     log.info("Actual docCount for shard1_1 = {}", shard11Count);
     Map<String, String> idVsVersion = new HashMap<>();
@@ -1119,6 +1081,7 @@ public class ShardSplitTest extends BasicDistributedZkTest {
       if (old != null) {
         log.error("EXTRA: ID: " + document.getFieldValue("id") + " on shard1_0. Old version: " + old.getFieldValue("_version_") + " new version: " + document.getFieldValue("_version_"));
       }
+      found.add(document.getFieldValue("id").toString());
     }
     for (int i = 0; i < response2.getResults().size(); i++) {
       SolrDocument document = response2.getResults().get(i);
@@ -1131,6 +1094,15 @@ public class ShardSplitTest extends BasicDistributedZkTest {
       if (old != null) {
         log.error("EXTRA: ID: " + document.getFieldValue("id") + " on shard1_1. Old version: " + old.getFieldValue("_version_") + " new version: " + document.getFieldValue("_version_"));
       }
+      found.add(document.getFieldValue("id").toString());
+    }
+
+    if (found.size() < documentIds.size())  {
+      documentIds.removeAll(found);
+      log.error("MISSING: ID: " + documentIds);
+    } else if (found.size() > documentIds.size()) {
+      found.removeAll(documentIds);
+      log.error("EXTRA: ID: " + found);
     }
   }
 


[44/48] lucene-solr:jira/http2: SOLR-12615: HashQParserPlugin won't throw an NPE for string hash key and documents with empty value

Posted by da...@apache.org.
SOLR-12615: HashQParserPlugin won't throw an NPE for string hash key and documents with empty value


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/592899a4
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/592899a4
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/592899a4

Branch: refs/heads/jira/http2
Commit: 592899a419c2a15e75f73e906fa61b7e922c9830
Parents: b33df4e
Author: Varun Thacker <va...@apache.org>
Authored: Sat Aug 4 14:31:07 2018 -0700
Committer: Varun Thacker <va...@apache.org>
Committed: Sat Aug 4 14:32:36 2018 -0700

----------------------------------------------------------------------
 solr/CHANGES.txt                                |  5 ++++
 .../apache/solr/search/HashQParserPlugin.java   |  4 +--
 .../solr/search/TestHashQParserPlugin.java      | 30 +++++++++++++++++++-
 3 files changed, 36 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/592899a4/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 3d9c68c..605e837 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -207,6 +207,11 @@ Bug Fixes
 
 * SOLR-12594: MetricsHistoryHandler.getOverseerLeader fails when hostname contains hyphen. (ab)
 
+* SOLR-12615: HashQParserPlugin will no longer throw an NPE if the hash key field is a string when there are documents
+  with empty values. All documents with empty values ( string , numeric ) will be processed by worker=0
+  This would fix the NPE when using the search stream with partitionKeys. (Varun Thacker)
+
+
 Optimizations
 ----------------------
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/592899a4/solr/core/src/java/org/apache/solr/search/HashQParserPlugin.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/HashQParserPlugin.java b/solr/core/src/java/org/apache/solr/search/HashQParserPlugin.java
index 8832bb7..bd8f425 100644
--- a/solr/core/src/java/org/apache/solr/search/HashQParserPlugin.java
+++ b/solr/core/src/java/org/apache/solr/search/HashQParserPlugin.java
@@ -297,7 +297,7 @@ public class HashQParserPlugin extends QParserPlugin {
       if (doc == values.docID()) {
         ref = values.binaryValue();
       } else {
-        ref = null;
+        ref = new BytesRef(); // EMPTY_BYTES . worker=0 will always process empty values
       }
       this.fieldType.indexedToReadable(ref, charsRefBuilder);
       CharsRef charsRef = charsRefBuilder.get();
@@ -327,7 +327,7 @@ public class HashQParserPlugin extends QParserPlugin {
       if (valuesDocID == doc) {
         l = values.longValue();
       } else {
-        l = 0;
+        l = 0; //worker=0 will always process empty values
       }
       return Longs.hashCode(l);
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/592899a4/solr/core/src/test/org/apache/solr/search/TestHashQParserPlugin.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/search/TestHashQParserPlugin.java b/solr/core/src/test/org/apache/solr/search/TestHashQParserPlugin.java
index 3f320ce..6f68906 100644
--- a/solr/core/src/test/org/apache/solr/search/TestHashQParserPlugin.java
+++ b/solr/core/src/test/org/apache/solr/search/TestHashQParserPlugin.java
@@ -54,6 +54,34 @@ public class TestHashQParserPlugin extends SolrTestCaseJ4 {
     }
   }
 
+  @Test
+  public void testHashPartitionWithEmptyValues() throws Exception {
+
+    assertU(adoc("id", "1", "a_s", "one", "a_i" , "1"));
+    assertU(adoc("id", "2", "a_s", "one", "a_i" , "1"));
+    assertU(adoc("id", "3"));
+    assertU(adoc("id", "4"));
+    assertU(commit());
+
+    //Test with string hash
+    ModifiableSolrParams params = new ModifiableSolrParams();
+    params.add("q", "*:*");
+    params.add("fq", "{!hash worker=0 workers=1 cost="+getCost(random())+"}");
+    params.add("partitionKeys", "a_s");
+    params.add("wt", "xml");
+    String response = h.query(req(params));
+    h.validateXPath(response, "//*[@numFound='4']");
+
+    //Test with int hash
+    params = new ModifiableSolrParams();
+    params.add("q", "*:*");
+    params.add("fq", "{!hash worker=0 workers=1 cost="+getCost(random())+"}");
+    params.add("partitionKeys", "a_i");
+    params.add("wt", "xml");
+    response = h.query(req(params));
+    h.validateXPath(response, "//*[@numFound='4']");
+  }
+
 
   @Test
   public void testHashPartition() throws Exception {
@@ -62,7 +90,7 @@ public class TestHashQParserPlugin extends SolrTestCaseJ4 {
     Random random = random();
     HashSet<String> set = new HashSet();
 
-    for(int i=0; i<50; i++) {
+    for (int i=0; i<50; i++) {
       int v = random.nextInt(1000000);
       String val = Integer.toString(v);
       if(!set.contains(val)){


[41/48] lucene-solr:jira/http2: SOLR-12614: Make "Nodes" view the default in AdminUI "Cloud" tab

Posted by da...@apache.org.
SOLR-12614: Make "Nodes" view the default in AdminUI "Cloud" tab


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/f8db5d0a
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/f8db5d0a
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/f8db5d0a

Branch: refs/heads/jira/http2
Commit: f8db5d0afd34ebea4ae414a2eb148f926830be34
Parents: 0b59b0e
Author: Jan Høydahl <ja...@apache.org>
Authored: Fri Aug 3 23:20:48 2018 +0200
Committer: Jan Høydahl <ja...@apache.org>
Committed: Fri Aug 3 23:20:48 2018 +0200

----------------------------------------------------------------------
 solr/CHANGES.txt                                | 5 +++++
 solr/webapp/web/js/angular/controllers/cloud.js | 2 +-
 2 files changed, 6 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f8db5d0a/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index b9846bd..314d045 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -44,6 +44,11 @@ Upgrade Notes
   MemoryDocValues). If you used postingsFormat="Memory" or docValuesFormat="Memory"
   switch to "Direct" instead. (Dawid Weiss)
 
+Other Changes
+----------------------
+
+* SOLR-12614: Make "Nodes" view the default in AdminUI "Cloud" tab (janhoy)
+
 ==================  7.5.0 ==================
 
 Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f8db5d0a/solr/webapp/web/js/angular/controllers/cloud.js
----------------------------------------------------------------------
diff --git a/solr/webapp/web/js/angular/controllers/cloud.js b/solr/webapp/web/js/angular/controllers/cloud.js
index 08eea38..a033ce5 100644
--- a/solr/webapp/web/js/angular/controllers/cloud.js
+++ b/solr/webapp/web/js/angular/controllers/cloud.js
@@ -28,7 +28,7 @@ solrAdminApp.controller('CloudController',
             $scope.showDebug = false;
         };
 
-        var view = $location.search().view ? $location.search().view : "graph";
+        var view = $location.search().view ? $location.search().view : "nodes";
         if (view === "tree") {
             $scope.resetMenu("cloud-tree", Constants.IS_ROOT_PAGE);
             treeSubController($scope, Zookeeper);


[17/48] lucene-solr:jira/http2: LUCENE-8437: CheckIndex shouldn't duplicate SegmentInfos' serialization logic.

Posted by da...@apache.org.
LUCENE-8437: CheckIndex shouldn't duplicate SegmentInfos' serialization logic.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/5dffff7d
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/5dffff7d
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/5dffff7d

Branch: refs/heads/jira/http2
Commit: 5dffff7df7b0d1b122976a10cf05ace13a9ad6e1
Parents: 99dbe93
Author: Adrien Grand <jp...@gmail.com>
Authored: Wed Aug 1 10:11:40 2018 +0200
Committer: Adrien Grand <jp...@gmail.com>
Committed: Wed Aug 1 10:11:40 2018 +0200

----------------------------------------------------------------------
 lucene/CHANGES.txt                              |  3 ++
 .../org/apache/lucene/index/CheckIndex.java     | 39 --------------------
 2 files changed, 3 insertions(+), 39 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5dffff7d/lucene/CHANGES.txt
----------------------------------------------------------------------
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index 76815f5..fdc8c98 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -171,6 +171,9 @@ API Changes:
   been changed from a protected method to a java.util.function.Supplier as a
   constructor argument. (Adrien Grand)
 
+* LUCENE-8437: CheckIndex.Status.cantOpenSegments and missingSegmentVersion
+  have been removed as they were not computed correctly. (Adrien Grand)
+
 Bug Fixes:
 
 * LUCENE-8380: UTF8TaxonomyWriterCache inconsistency. (Ruslan Torobaev, Dawid Weiss)

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5dffff7d/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java b/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java
index e8abd0d..9d84a0c 100644
--- a/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java
+++ b/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java
@@ -54,7 +54,6 @@ import org.apache.lucene.store.AlreadyClosedException;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.FSDirectory;
 import org.apache.lucene.store.IOContext;
-import org.apache.lucene.store.IndexInput;
 import org.apache.lucene.store.Lock;
 import org.apache.lucene.util.Accountables;
 import org.apache.lucene.util.Bits;
@@ -107,12 +106,6 @@ public final class CheckIndex implements Closeable {
     /** True if we were unable to locate and load the segments_N file. */
     public boolean missingSegments;
 
-    /** True if we were unable to open the segments_N file. */
-    public boolean cantOpenSegments;
-
-    /** True if we were unable to read the version number from segments_N file. */
-    public boolean missingSegmentVersion;
-
     /** Name of latest segments_N file in the index. */
     public String segmentsFileName;
 
@@ -573,38 +566,6 @@ public final class CheckIndex implements Closeable {
 
     final int numSegments = sis.size();
     final String segmentsFileName = sis.getSegmentsFileName();
-    // note: we only read the format byte (required preamble) here!
-    IndexInput input = null;
-    try {
-      input = dir.openInput(segmentsFileName, IOContext.READONCE);
-    } catch (Throwable t) {
-      if (failFast) {
-        throw IOUtils.rethrowAlways(t);
-      }
-      msg(infoStream, "ERROR: could not open segments file in directory");
-      if (infoStream != null) {
-        t.printStackTrace(infoStream);
-      }
-      result.cantOpenSegments = true;
-      return result;
-    }
-    try {
-      /*int format =*/ input.readInt();
-    } catch (Throwable t) {
-      if (failFast) {
-        throw IOUtils.rethrowAlways(t);
-      }
-      msg(infoStream, "ERROR: could not read segment file version in directory");
-      if (infoStream != null) {
-        t.printStackTrace(infoStream);
-      }
-      result.missingSegmentVersion = true;
-      return result;
-    } finally {
-      if (input != null)
-        input.close();
-    }
-
     result.segmentsFileName = segmentsFileName;
     result.numSegments = numSegments;
     result.userData = sis.getUserData();


[06/48] lucene-solr:jira/http2: SOLR-12557: standardise solr/core (private) logger names

Posted by da...@apache.org.
SOLR-12557: standardise solr/core (private) logger names


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/6fbaf698
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/6fbaf698
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/6fbaf698

Branch: refs/heads/jira/http2
Commit: 6fbaf698b9111bcc7eb579d35da46eb98ebe668b
Parents: 9262ed7
Author: Christine Poerschke <cp...@apache.org>
Authored: Tue Jul 31 17:00:11 2018 +0100
Committer: Christine Poerschke <cp...@apache.org>
Committed: Tue Jul 31 17:00:11 2018 +0100

----------------------------------------------------------------------
 lucene/tools/src/groovy/check-source-patterns.groovy | 3 +++
 1 file changed, 3 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6fbaf698/lucene/tools/src/groovy/check-source-patterns.groovy
----------------------------------------------------------------------
diff --git a/lucene/tools/src/groovy/check-source-patterns.groovy b/lucene/tools/src/groovy/check-source-patterns.groovy
index 63541c2..d7af361 100644
--- a/lucene/tools/src/groovy/check-source-patterns.groovy
+++ b/lucene/tools/src/groovy/check-source-patterns.groovy
@@ -173,6 +173,9 @@ ant.fileScanner{
       if (f.toString().contains('solr/contrib') && !validLoggerNamePattern.matcher(text).find()) {
         reportViolation(f, 'invalid logger name [not log or LOG]');
       }
+      if (f.toString().contains('solr/core') && !validLoggerNamePattern.matcher(text).find()) {
+        reportViolation(f, 'invalid logger name [not log or LOG]');
+      }
     }
     checkLicenseHeaderPrecedes(f, 'package', packagePattern, javaCommentPattern, text, ratDocument);
     if (f.name.contains("Test")) {


[03/48] lucene-solr:jira/http2: SOLR-12601: Refactor the autoscaling package to improve readability

Posted by da...@apache.org.
SOLR-12601: Refactor the autoscaling package to improve readability


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/4602e4de
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/4602e4de
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/4602e4de

Branch: refs/heads/jira/http2
Commit: 4602e4de458352f8363c3d4078f1df24d2ac9430
Parents: a08eadb
Author: Noble Paul <no...@apache.org>
Authored: Tue Jul 31 13:12:34 2018 +1000
Committer: Noble Paul <no...@apache.org>
Committed: Tue Jul 31 13:12:34 2018 +1000

----------------------------------------------------------------------
 .../cloud/api/collections/SplitShardCmd.java    |  10 +-
 .../solr/cloud/autoscaling/TestPolicyCloud.java |   6 +-
 .../sim/SimClusterStateProvider.java            |  41 +-
 .../cloud/autoscaling/sim/TestPolicyCloud.java  |   4 +-
 .../client/solrj/cloud/autoscaling/Cell.java    |   5 +-
 .../client/solrj/cloud/autoscaling/Clause.java  |  48 +-
 .../solrj/cloud/autoscaling/CoresVariable.java  |  69 ++
 .../cloud/autoscaling/FreeDiskVariable.java     | 135 ++++
 .../solrj/cloud/autoscaling/NodeVariable.java   |  45 ++
 .../client/solrj/cloud/autoscaling/Operand.java |  10 +-
 .../client/solrj/cloud/autoscaling/Policy.java  |  27 +-
 .../solrj/cloud/autoscaling/PolicyHelper.java   |  11 +-
 .../solrj/cloud/autoscaling/RangeVal.java       |  54 ++
 .../cloud/autoscaling/ReplicaVariable.java      | 131 ++++
 .../client/solrj/cloud/autoscaling/Row.java     |   4 +-
 .../solrj/cloud/autoscaling/Suggester.java      |   2 +-
 .../solrj/cloud/autoscaling/Suggestion.java     | 740 +------------------
 .../client/solrj/cloud/autoscaling/VarType.java |  43 --
 .../solrj/cloud/autoscaling/Variable.java       | 364 +++++++++
 .../solrj/cloud/autoscaling/VariableBase.java   | 205 +++++
 .../solrj/cloud/autoscaling/Violation.java      |  49 ++
 .../autoscaling/WithCollectionVarType.java      | 160 ----
 .../autoscaling/WithCollectionVariable.java     | 165 +++++
 .../solrj/impl/SolrClientNodeStateProvider.java |  13 +-
 .../solrj/cloud/autoscaling/TestPolicy.java     |  17 +-
 .../solrj/cloud/autoscaling/TestPolicy2.java    |   2 +-
 26 files changed, 1300 insertions(+), 1060 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4602e4de/solr/core/src/java/org/apache/solr/cloud/api/collections/SplitShardCmd.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/SplitShardCmd.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/SplitShardCmd.java
index ccb111a..b5408f8 100644
--- a/solr/core/src/java/org/apache/solr/cloud/api/collections/SplitShardCmd.java
+++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/SplitShardCmd.java
@@ -31,10 +31,10 @@ import java.util.concurrent.atomic.AtomicReference;
 
 import org.apache.solr.client.solrj.cloud.DistributedQueue;
 import org.apache.solr.client.solrj.cloud.NodeStateProvider;
-import org.apache.solr.client.solrj.cloud.autoscaling.PolicyHelper;
 import org.apache.solr.client.solrj.cloud.SolrCloudManager;
+import org.apache.solr.client.solrj.cloud.autoscaling.PolicyHelper;
 import org.apache.solr.client.solrj.cloud.autoscaling.ReplicaInfo;
-import org.apache.solr.client.solrj.cloud.autoscaling.Suggestion;
+import org.apache.solr.client.solrj.cloud.autoscaling.Variable.Type;
 import org.apache.solr.client.solrj.request.CoreAdminRequest;
 import org.apache.solr.cloud.Overseer;
 import org.apache.solr.cloud.overseer.OverseerAction;
@@ -475,7 +475,7 @@ public class SplitShardCmd implements OverseerCollectionMessageHandler.Cmd {
     Map<String, Object> nodeValues = nodeStateProvider.getNodeValues(parentShardLeader.getNodeName(),
         Collections.singletonList(ImplicitSnitch.DISK));
     Map<String, Map<String, List<ReplicaInfo>>> infos = nodeStateProvider.getReplicaInfo(parentShardLeader.getNodeName(),
-        Collections.singletonList(Suggestion.ConditionType.CORE_IDX.metricsAttribute));
+        Collections.singletonList(Type.CORE_IDX.metricsAttribute));
     if (infos.get(collection) == null || infos.get(collection).get(shard) == null) {
       throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "missing replica information for parent shard leader");
     }
@@ -484,11 +484,11 @@ public class SplitShardCmd implements OverseerCollectionMessageHandler.Cmd {
     Double indexSize = null;
     for (ReplicaInfo info : lst) {
       if (info.getCore().equals(parentShardLeader.getCoreName())) {
-        Number size = (Number)info.getVariable(Suggestion.ConditionType.CORE_IDX.metricsAttribute);
+        Number size = (Number)info.getVariable(Type.CORE_IDX.metricsAttribute);
         if (size == null) {
           throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "missing index size information for parent shard leader");
         }
-        indexSize = (Double)Suggestion.ConditionType.CORE_IDX.convertVal(size);
+        indexSize = (Double) Type.CORE_IDX.convertVal(size);
         break;
       }
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4602e4de/solr/core/src/test/org/apache/solr/cloud/autoscaling/TestPolicyCloud.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/TestPolicyCloud.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/TestPolicyCloud.java
index a25a67a..f1dbad5 100644
--- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/TestPolicyCloud.java
+++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/TestPolicyCloud.java
@@ -38,7 +38,7 @@ import org.apache.solr.client.solrj.cloud.autoscaling.AutoScalingConfig;
 import org.apache.solr.client.solrj.cloud.autoscaling.Policy;
 import org.apache.solr.client.solrj.cloud.autoscaling.ReplicaInfo;
 import org.apache.solr.client.solrj.cloud.autoscaling.Row;
-import org.apache.solr.client.solrj.cloud.autoscaling.Suggestion;
+import org.apache.solr.client.solrj.cloud.autoscaling.Variable.Type;
 import org.apache.solr.client.solrj.embedded.JettySolrRunner;
 import org.apache.solr.client.solrj.impl.CloudSolrClient;
 import org.apache.solr.client.solrj.impl.HttpSolrClient;
@@ -120,7 +120,7 @@ public class TestPolicyCloud extends SolrCloudTestCase {
     Policy.Session session = config.getPolicy().createSession(cloudManager);
 
     for (Row row : session.getSortedNodes()) {
-      Object val = row.getVal(Suggestion.ConditionType.TOTALDISK.tagName, null);
+      Object val = row.getVal(Type.TOTALDISK.tagName, null);
       log.info("node: {} , totaldisk : {}, freedisk : {}", row.node, val, row.getVal("freedisk",null));
       assertTrue(val != null);
 
@@ -130,7 +130,7 @@ public class TestPolicyCloud extends SolrCloudTestCase {
     for (Row row : session.getSortedNodes()) {
       row.collectionVsShardVsReplicas.forEach((c, shardVsReplicas) -> shardVsReplicas.forEach((s, replicaInfos) -> {
         for (ReplicaInfo replicaInfo : replicaInfos) {
-          if (replicaInfo.getVariables().containsKey(Suggestion.ConditionType.CORE_IDX.tagName)) count.incrementAndGet();
+          if (replicaInfo.getVariables().containsKey(Type.CORE_IDX.tagName)) count.incrementAndGet();
         }
       }));
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4602e4de/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/SimClusterStateProvider.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/SimClusterStateProvider.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/SimClusterStateProvider.java
index 6024790..abc3ccf 100644
--- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/SimClusterStateProvider.java
+++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/SimClusterStateProvider.java
@@ -40,13 +40,14 @@ import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.atomic.AtomicReference;
 import java.util.concurrent.locks.ReentrantLock;
 
-import org.apache.solr.client.solrj.cloud.autoscaling.AutoScalingConfig;
 import org.apache.solr.client.solrj.cloud.DistribStateManager;
+import org.apache.solr.client.solrj.cloud.autoscaling.AutoScalingConfig;
 import org.apache.solr.client.solrj.cloud.autoscaling.Policy;
 import org.apache.solr.client.solrj.cloud.autoscaling.PolicyHelper;
 import org.apache.solr.client.solrj.cloud.autoscaling.ReplicaInfo;
-import org.apache.solr.client.solrj.cloud.autoscaling.Suggestion;
 import org.apache.solr.client.solrj.cloud.autoscaling.TriggerEventType;
+import org.apache.solr.client.solrj.cloud.autoscaling.Variable;
+import org.apache.solr.client.solrj.cloud.autoscaling.Variable.Type;
 import org.apache.solr.client.solrj.cloud.autoscaling.VersionedData;
 import org.apache.solr.client.solrj.impl.ClusterStateProvider;
 import org.apache.solr.client.solrj.request.UpdateRequest;
@@ -473,10 +474,10 @@ public class SimClusterStateProvider implements ClusterStateProvider {
       // mark replica as active
       replicaInfo.getVariables().put(ZkStateReader.STATE_PROP, Replica.State.ACTIVE.toString());
       // add a property expected in Policy calculations, if missing
-      if (replicaInfo.getVariable(Suggestion.ConditionType.CORE_IDX.metricsAttribute) == null) {
-        replicaInfo.getVariables().put(Suggestion.ConditionType.CORE_IDX.metricsAttribute, SimCloudManager.DEFAULT_IDX_SIZE_BYTES);
-        replicaInfo.getVariables().put(Suggestion.coreidxsize,
-            Suggestion.ConditionType.CORE_IDX.convertVal(SimCloudManager.DEFAULT_IDX_SIZE_BYTES));
+      if (replicaInfo.getVariable(Type.CORE_IDX.metricsAttribute) == null) {
+        replicaInfo.getVariables().put(Type.CORE_IDX.metricsAttribute, SimCloudManager.DEFAULT_IDX_SIZE_BYTES);
+        replicaInfo.getVariables().put(Variable.coreidxsize,
+            Type.CORE_IDX.convertVal(SimCloudManager.DEFAULT_IDX_SIZE_BYTES));
       }
 
       replicas.add(replicaInfo);
@@ -502,7 +503,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
       cloudManager.getMetricManager().registry(registry).counter("UPDATE./update.requests");
       cloudManager.getMetricManager().registry(registry).counter("QUERY./select.requests");
       cloudManager.getMetricManager().registerGauge(null, registry,
-          () -> replicaInfo.getVariable(Suggestion.ConditionType.CORE_IDX.metricsAttribute),
+          () -> replicaInfo.getVariable(Type.CORE_IDX.metricsAttribute),
           "", true, "INDEX.sizeInBytes");
       if (runLeaderElection) {
         simRunLeaderElection(Collections.singleton(replicaInfo.getCollection()), true);
@@ -1085,8 +1086,8 @@ public class SimClusterStateProvider implements ClusterStateProvider {
       replicaProps.put("SEARCHER.searcher.numDocs", replicasNumDocs);
       replicaProps.put("SEARCHER.searcher.maxDoc", replicasNumDocs);
       replicaProps.put("SEARCHER.searcher.deletedDocs", 0);
-      replicaProps.put(Suggestion.ConditionType.CORE_IDX.metricsAttribute, replicasIndexSize);
-      replicaProps.put(Suggestion.coreidxsize, Suggestion.ConditionType.CORE_IDX.convertVal(replicasIndexSize));
+      replicaProps.put(Type.CORE_IDX.metricsAttribute, replicasIndexSize);
+      replicaProps.put(Variable.coreidxsize, Type.CORE_IDX.convertVal(replicasIndexSize));
 
       ReplicaInfo ri = new ReplicaInfo("core_node" + Assign.incAndGetId(stateManager, collectionName, 0),
           solrCoreName, collectionName, replicaPosition.shard, replicaPosition.type, subShardNodeName, replicaProps);
@@ -1246,13 +1247,13 @@ public class SimClusterStateProvider implements ClusterStateProvider {
           try {
             simSetShardValue(collection, s.getName(), "SEARCHER.searcher.deletedDocs", 1, true, false);
             simSetShardValue(collection, s.getName(), "SEARCHER.searcher.numDocs", -1, true, false);
-            Number indexSize = (Number)ri.getVariable(Suggestion.ConditionType.CORE_IDX.metricsAttribute);
+            Number indexSize = (Number)ri.getVariable(Type.CORE_IDX.metricsAttribute);
             if (indexSize != null && indexSize.longValue() > SimCloudManager.DEFAULT_IDX_SIZE_BYTES) {
               indexSize = indexSize.longValue() - DEFAULT_DOC_SIZE_BYTES;
-              simSetShardValue(collection, s.getName(), Suggestion.ConditionType.CORE_IDX.metricsAttribute,
+              simSetShardValue(collection, s.getName(), Type.CORE_IDX.metricsAttribute,
                   indexSize.intValue(), false, false);
-              simSetShardValue(collection, s.getName(), Suggestion.coreidxsize,
-                  Suggestion.ConditionType.CORE_IDX.convertVal(indexSize), false, false);
+              simSetShardValue(collection, s.getName(), Variable.coreidxsize,
+                  Type.CORE_IDX.convertVal(indexSize), false, false);
             } else {
               throw new Exception("unexpected indexSize ri=" + ri);
             }
@@ -1284,10 +1285,10 @@ public class SimClusterStateProvider implements ClusterStateProvider {
             try {
               simSetShardValue(collection, s.getName(), "SEARCHER.searcher.deletedDocs", numDocs, false, false);
               simSetShardValue(collection, s.getName(), "SEARCHER.searcher.numDocs", 0, false, false);
-              simSetShardValue(collection, s.getName(), Suggestion.ConditionType.CORE_IDX.metricsAttribute,
+              simSetShardValue(collection, s.getName(), Type.CORE_IDX.metricsAttribute,
                   SimCloudManager.DEFAULT_IDX_SIZE_BYTES, false, false);
-              simSetShardValue(collection, s.getName(), Suggestion.coreidxsize,
-                  Suggestion.ConditionType.CORE_IDX.convertVal(SimCloudManager.DEFAULT_IDX_SIZE_BYTES), false, false);
+              simSetShardValue(collection, s.getName(), Variable.coreidxsize,
+                  Type.CORE_IDX.convertVal(SimCloudManager.DEFAULT_IDX_SIZE_BYTES), false, false);
             } catch (Exception e) {
               throw new IOException(e);
             }
@@ -1314,13 +1315,13 @@ public class SimClusterStateProvider implements ClusterStateProvider {
             simSetShardValue(collection, s.getName(), "SEARCHER.searcher.maxDoc", 1, true, false);
 
             ReplicaInfo ri = getReplicaInfo(leader);
-            Number indexSize = (Number)ri.getVariable(Suggestion.ConditionType.CORE_IDX.metricsAttribute);
+            Number indexSize = (Number)ri.getVariable(Type.CORE_IDX.metricsAttribute);
             // for each new document increase the size by DEFAULT_DOC_SIZE_BYTES
             indexSize = indexSize.longValue() + DEFAULT_DOC_SIZE_BYTES;
-            simSetShardValue(collection, s.getName(), Suggestion.ConditionType.CORE_IDX.metricsAttribute,
+            simSetShardValue(collection, s.getName(), Type.CORE_IDX.metricsAttribute,
                 indexSize.longValue(), false, false);
-            simSetShardValue(collection, s.getName(), Suggestion.coreidxsize,
-                Suggestion.ConditionType.CORE_IDX.convertVal(indexSize), false, false);
+            simSetShardValue(collection, s.getName(), Variable.coreidxsize,
+                Type.CORE_IDX.convertVal(indexSize), false, false);
           } catch (Exception e) {
             throw new IOException(e);
           }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4602e4de/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestPolicyCloud.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestPolicyCloud.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestPolicyCloud.java
index fad637d..1adbabb 100644
--- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestPolicyCloud.java
+++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestPolicyCloud.java
@@ -33,7 +33,7 @@ import org.apache.solr.client.solrj.cloud.autoscaling.AutoScalingConfig;
 import org.apache.solr.client.solrj.cloud.autoscaling.Policy;
 import org.apache.solr.client.solrj.cloud.autoscaling.ReplicaInfo;
 import org.apache.solr.client.solrj.cloud.autoscaling.Row;
-import org.apache.solr.client.solrj.cloud.autoscaling.Suggestion;
+import org.apache.solr.client.solrj.cloud.autoscaling.Variable.Type;
 import org.apache.solr.client.solrj.request.CollectionAdminRequest;
 import org.apache.solr.cloud.CloudTestUtils;
 import org.apache.solr.cloud.autoscaling.AutoScalingHandlerTest;
@@ -95,7 +95,7 @@ public class TestPolicyCloud extends SimSolrCloudTestCase {
     for (Row row : session.getSortedNodes()) {
       row.collectionVsShardVsReplicas.forEach((c, shardVsReplicas) -> shardVsReplicas.forEach((s, replicaInfos) -> {
         for (ReplicaInfo replicaInfo : replicaInfos) {
-          if (replicaInfo.getVariables().containsKey(Suggestion.ConditionType.CORE_IDX.tagName)) count.incrementAndGet();
+          if (replicaInfo.getVariables().containsKey(Type.CORE_IDX.tagName)) count.incrementAndGet();
         }
       }));
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4602e4de/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Cell.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Cell.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Cell.java
index 0fa2db2..e222541 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Cell.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Cell.java
@@ -20,6 +20,7 @@ package org.apache.solr.client.solrj.cloud.autoscaling;
 import java.io.IOException;
 import java.util.HashMap;
 
+import org.apache.solr.client.solrj.cloud.autoscaling.Variable.Type;
 import org.apache.solr.common.MapWriter;
 import org.apache.solr.common.util.Utils;
 
@@ -28,12 +29,12 @@ import org.apache.solr.common.util.Utils;
  */
 public class Cell implements MapWriter {
   final int index;
-  final Suggestion.ConditionType type;
+  final Type type;
   final String name;
   Object val, approxVal;
   Row row;
 
-  public Cell(int index, String name, Object val, Object approxVal, Suggestion.ConditionType type, Row row) {
+  public Cell(int index, String name, Object val, Object approxVal, Type type, Row row) {
     this.index = index;
     this.name = name;
     this.val = val;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4602e4de/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Clause.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Clause.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Clause.java
index 8fd815e..5fd9f8b 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Clause.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Clause.java
@@ -31,7 +31,7 @@ import java.util.Set;
 import java.util.function.Function;
 import java.util.stream.Collectors;
 
-import org.apache.solr.client.solrj.cloud.autoscaling.Suggestion.ConditionType;
+import org.apache.solr.client.solrj.cloud.autoscaling.Variable.Type;
 import org.apache.solr.common.MapWriter;
 import org.apache.solr.common.cloud.Replica;
 import org.apache.solr.common.util.StrUtils;
@@ -254,7 +254,7 @@ public class Clause implements MapWriter, Comparable<Clause> {
     Object expectedVal = null;
     ComputedType computedType = null;
     Object val = m.get(s);
-    ConditionType varType = Suggestion.getTagType(s);
+    Type varType = VariableBase.getTagType(s);
     if (varType.meta.isHidden()) {
       throw new IllegalArgumentException(formatString("''{0}'' is not allowed in a policy rule :  ''{1}''  ", varType.tagName, toJSONString(m)));
     }
@@ -309,7 +309,7 @@ public class Clause implements MapWriter, Comparable<Clause> {
     }
   }
 
-  private List readListVal(Map m, List val, ConditionType varType, String conditionName) {
+  private List readListVal(Map m, List val, Type varType, String conditionName) {
     List list = val;
     list = (List) list.stream()
         .map(it -> varType.validate(conditionName, it, true))
@@ -354,7 +354,7 @@ public class Clause implements MapWriter, Comparable<Clause> {
 
   public List<Violation> test(Policy.Session session) {
     ComputedValueEvaluator computedValueEvaluator = new ComputedValueEvaluator(session);
-    Suggestion.ViolationCtx ctx = new Suggestion.ViolationCtx(this, session.matrix, computedValueEvaluator);
+    Violation.Ctx ctx = new Violation.Ctx(this, session.matrix, computedValueEvaluator);
     if (isPerCollectiontag()) {
       Map<String, Map<String, Map<String, ReplicaCount>>> replicaCounts = computeReplicaCounts(session.matrix, computedValueEvaluator);
       for (Map.Entry<String, Map<String, Map<String, ReplicaCount>>> e : replicaCounts.entrySet()) {
@@ -448,7 +448,7 @@ public class Clause implements MapWriter, Comparable<Clause> {
     return false;
   }
 
-  enum ComputedType {
+  public enum ComputedType {
     NULL(),
     EQUAL() {
       @Override
@@ -532,7 +532,7 @@ public class Clause implements MapWriter, Comparable<Clause> {
   public static class Condition implements MapWriter {
     final String name;
     final Object val;
-    final ConditionType varType;
+    final Type varType;
     final ComputedType computedType;
     final Operand op;
     private Clause clause;
@@ -541,7 +541,7 @@ public class Clause implements MapWriter, Comparable<Clause> {
       this.name = name;
       this.val = val;
       this.op = op;
-      varType = Suggestion.getTagType(name);
+      varType = VariableBase.getTagType(name);
       this.computedType = computedType;
       this.clause = parent;
     }
@@ -668,7 +668,7 @@ public class Clause implements MapWriter, Comparable<Clause> {
    */
   public static Object  validate(String name, Object val, boolean isRuleVal) {
     if (val == null) return null;
-    ConditionType info = Suggestion.getTagType(name);
+    Type info = VariableBase.getTagType(name);
     if (info == null) throw new RuntimeException("Unknown type :" + name);
     return info.validate(name, val, isRuleVal);
   }
@@ -723,36 +723,4 @@ public class Clause implements MapWriter, Comparable<Clause> {
 
   public static final String METRICS_PREFIX = "metrics:";
 
-  static class RangeVal implements MapWriter {
-    final Number min, max, actual;
-
-    RangeVal(Number min, Number max, Number actual) {
-      this.min = min;
-      this.max = max;
-      this.actual = actual;
-    }
-
-    public boolean match(Number testVal) {
-      return Double.compare(testVal.doubleValue(), min.doubleValue()) >= 0 &&
-          Double.compare(testVal.doubleValue(), max.doubleValue()) <= 0;
-    }
-
-    public Double delta(double v) {
-//      if (actual != null) return v - actual.doubleValue();
-      if (v >= max.doubleValue()) return v - max.doubleValue();
-      if (v <= min.doubleValue()) return v - min.doubleValue();
-      return 0d;
-    }
-
-    @Override
-    public String toString() {
-      return jsonStr();
-    }
-
-    @Override
-    public void writeMap(EntryWriter ew) throws IOException {
-      ew.put("min", min).put("max", max).putIfNotNull("actual", actual);
-    }
-  }
-
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4602e4de/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/CoresVariable.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/CoresVariable.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/CoresVariable.java
new file mode 100644
index 0000000..577717f
--- /dev/null
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/CoresVariable.java
@@ -0,0 +1,69 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.client.solrj.cloud.autoscaling;
+
+import java.util.function.Consumer;
+
+import org.apache.solr.common.cloud.rule.ImplicitSnitch;
+
+import static org.apache.solr.common.params.CollectionParams.CollectionAction.MOVEREPLICA;
+
+public class CoresVariable extends VariableBase {
+  public CoresVariable(Type type) {
+    super(type);
+  }
+
+  @Override
+  public Object validate(String name, Object val, boolean isRuleVal) {
+    return VariableBase.getOperandAdjustedValue(super.validate(name, val, isRuleVal), val);
+  }
+
+  @Override
+  public void addViolatingReplicas(Violation.Ctx ctx) {
+    for (Row r : ctx.allRows) {
+      if (!ctx.clause.tag.isPass(r)) {
+        r.forEachReplica(replicaInfo -> ctx.currentViolation
+            .addReplica(new Violation.ReplicaInfoAndErr(replicaInfo)
+                .withDelta(ctx.clause.tag.delta(r.getVal(ImplicitSnitch.CORES)))));
+      }
+    }
+
+  }
+
+  @Override
+  public void getSuggestions(Suggestion.Ctx ctx) {
+    if (ctx.violation == null || ctx.violation.replicaCountDelta == 0) return;
+    if (ctx.violation.replicaCountDelta > 0) {//there are more replicas than necessary
+      for (int i = 0; i < Math.abs(ctx.violation.replicaCountDelta); i++) {
+        Suggester suggester = ctx.session.getSuggester(MOVEREPLICA)
+            .hint(Suggester.Hint.SRC_NODE, ctx.violation.node);
+        ctx.addSuggestion(suggester);
+      }
+    }
+  }
+
+  @Override
+  public void projectAddReplica(Cell cell, ReplicaInfo ri, Consumer<Row.OperationInfo> ops, boolean strictMode) {
+    cell.val = cell.val == null ? 0 : ((Number) cell.val).longValue() + 1;
+  }
+
+  @Override
+  public void projectRemoveReplica(Cell cell, ReplicaInfo ri, Consumer<Row.OperationInfo> opCollector) {
+    cell.val = cell.val == null ? 0 : ((Number) cell.val).longValue() - 1;
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4602e4de/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/FreeDiskVariable.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/FreeDiskVariable.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/FreeDiskVariable.java
new file mode 100644
index 0000000..b598207
--- /dev/null
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/FreeDiskVariable.java
@@ -0,0 +1,135 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.client.solrj.cloud.autoscaling;
+
+import java.util.ArrayList;
+import java.util.Comparator;
+import java.util.List;
+import java.util.function.Consumer;
+import java.util.stream.Collectors;
+
+import org.apache.solr.common.cloud.rule.ImplicitSnitch;
+import org.apache.solr.common.util.Pair;
+
+import static org.apache.solr.client.solrj.cloud.autoscaling.Variable.Type.CORE_IDX;
+import static org.apache.solr.client.solrj.cloud.autoscaling.Variable.Type.TOTALDISK;
+import static org.apache.solr.common.params.CollectionParams.CollectionAction.MOVEREPLICA;
+
+public class FreeDiskVariable extends VariableBase {
+
+  public FreeDiskVariable(Type type) {
+    super(type);
+  }
+
+  @Override
+  public Object convertVal(Object val) {
+    Number value = (Number) super.validate(ImplicitSnitch.DISK, val, false);
+    if (value != null) {
+      value = value.doubleValue() / 1024.0d / 1024.0d / 1024.0d;
+    }
+    return value;
+  }
+
+  @Override
+  public Object computeValue(Policy.Session session, Clause.Condition condition, String collection, String shard, String node) {
+    if (condition.computedType == Clause.ComputedType.PERCENT) {
+      Row r = session.getNode(node);
+      if (r == null) return 0d;
+      return Clause.ComputedType.PERCENT.compute(r.getVal(TOTALDISK.tagName), condition);
+    }
+    throw new IllegalArgumentException("Unsupported type " + condition.computedType);
+  }
+
+
+
+  @Override
+  public int compareViolation(Violation v1, Violation v2) {
+    //TODO use tolerance compare
+    return Double.compare(
+        v1.getViolatingReplicas().stream().mapToDouble(v -> v.delta == null ? 0 : v.delta).max().orElse(0d),
+        v2.getViolatingReplicas().stream().mapToDouble(v3 -> v3.delta == null ? 0 : v3.delta).max().orElse(0d));
+  }
+
+  @Override
+  public void getSuggestions(Suggestion.Ctx ctx) {
+    if (ctx.violation == null) return;
+    if (ctx.violation.replicaCountDelta < 0 && !ctx.violation.getViolatingReplicas().isEmpty()) {
+
+      Comparator<Row> rowComparator = Comparator.comparing(r -> ((Double) r.getVal(ImplicitSnitch.DISK, 0d)));
+      List<Row> matchingNodes = ctx.session.matrix.stream().filter(
+          row -> ctx.violation.getViolatingReplicas()
+              .stream()
+              .anyMatch(p -> row.node.equals(p.replicaInfo.getNode())))
+          .sorted(rowComparator)
+          .collect(Collectors.toList());
+
+
+      for (Row node : matchingNodes) {
+        //lets try to start moving the smallest cores off of the node
+        ArrayList<ReplicaInfo> replicas = new ArrayList<>();
+        node.forEachReplica(replicas::add);
+        replicas.sort((r1, r2) -> {
+          Long s1 = Clause.parseLong(CORE_IDX.tagName, r1.getVariables().get(CORE_IDX.tagName));
+          Long s2 = Clause.parseLong(CORE_IDX.tagName, r2.getVariables().get(CORE_IDX.tagName));
+          if (s1 != null && s2 != null) return s1.compareTo(s2);
+          return 0;
+        });
+        double currentDelta = ctx.violation.getClause().tag.delta(node.getVal(ImplicitSnitch.DISK));
+        for (ReplicaInfo replica : replicas) {
+          if (currentDelta < 1) break;
+          if (replica.getVariables().get(CORE_IDX.tagName) == null) continue;
+          Suggester suggester = ctx.session.getSuggester(MOVEREPLICA)
+              .hint(Suggester.Hint.COLL_SHARD, new Pair<>(replica.getCollection(), replica.getShard()))
+              .hint(Suggester.Hint.SRC_NODE, node.node)
+              .forceOperation(true);
+          if (ctx.addSuggestion(suggester) == null) break;
+          currentDelta -= Clause.parseLong(CORE_IDX.tagName, replica.getVariable(CORE_IDX.tagName));
+        }
+      }
+    }
+  }
+
+  //When a replica is added, freedisk should be incremented
+  @Override
+  public void projectAddReplica(Cell cell, ReplicaInfo ri, Consumer<Row.OperationInfo> ops, boolean strictMode) {
+    //go through other replicas of this shard and copy the index size value into this
+    for (Row row : cell.getRow().session.matrix) {
+      row.forEachReplica(replicaInfo -> {
+        if (ri != replicaInfo &&
+            ri.getCollection().equals(replicaInfo.getCollection()) &&
+            ri.getShard().equals(replicaInfo.getShard()) &&
+            ri.getVariable(CORE_IDX.tagName) == null &&
+            replicaInfo.getVariable(CORE_IDX.tagName) != null) {
+          ri.getVariables().put(CORE_IDX.tagName, validate(CORE_IDX.tagName, replicaInfo.getVariable(CORE_IDX.tagName), false));
+        }
+      });
+    }
+    Double idxSize = (Double) validate(CORE_IDX.tagName, ri.getVariable(CORE_IDX.tagName), false);
+    if (idxSize == null) return;
+    Double currFreeDisk = cell.val == null ? 0.0d : (Double) cell.val;
+    cell.val = currFreeDisk - idxSize;
+  }
+
+  @Override
+  public void projectRemoveReplica(Cell cell, ReplicaInfo ri, Consumer<Row.OperationInfo> opCollector) {
+    Double idxSize = (Double) validate(CORE_IDX.tagName, ri.getVariable(CORE_IDX.tagName), false);
+    if (idxSize == null) return;
+    Double currFreeDisk = cell.val == null ? 0.0d : (Double) cell.val;
+    cell.val = currFreeDisk + idxSize;
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4602e4de/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/NodeVariable.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/NodeVariable.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/NodeVariable.java
new file mode 100644
index 0000000..3292800
--- /dev/null
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/NodeVariable.java
@@ -0,0 +1,45 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.client.solrj.cloud.autoscaling;
+
+import org.apache.solr.common.util.Pair;
+
+import static org.apache.solr.client.solrj.cloud.autoscaling.Policy.ANY;
+import static org.apache.solr.common.params.CollectionParams.CollectionAction.MOVEREPLICA;
+
+public class NodeVariable extends VariableBase {
+  public NodeVariable(Type type) {
+    super(type);
+  }
+
+  @Override
+  public void getSuggestions(Suggestion.Ctx ctx) {
+    if (ctx.violation == null || ctx.violation.replicaCountDelta == 0) return;
+    if (ctx.violation.replicaCountDelta > 0) {//there are more replicas than necessary
+      for (int i = 0; i < Math.abs(ctx.violation.replicaCountDelta); i++) {
+        Suggester suggester = ctx.session.getSuggester(MOVEREPLICA)
+            .hint(Suggester.Hint.SRC_NODE, ctx.violation.node)
+            .hint(ctx.violation.shard.equals(ANY) ? Suggester.Hint.COLL : Suggester.Hint.COLL_SHARD,
+                ctx.violation.shard.equals(ANY) ? ctx.violation.coll : new Pair<>(ctx.violation.coll, ctx.violation.shard));
+        ctx.addSuggestion(suggester);
+      }
+    }
+
+  }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4602e4de/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Operand.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Operand.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Operand.java
index b458646..3e2368c 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Operand.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Operand.java
@@ -40,12 +40,12 @@ public enum Operand {
   RANGE_EQUAL("", 0) {
     @Override
     public TestStatus match(Object ruleVal, Object testVal) {
-      return ((Clause.RangeVal) ruleVal).match((Number) testVal) ? PASS : FAIL;
+      return ((RangeVal) ruleVal).match((Number) testVal) ? PASS : FAIL;
     }
 
     @Override
     public Double delta(Object expected, Object actual) {
-      return ((Clause.RangeVal) expected).delta(((Number) actual).doubleValue());
+      return ((RangeVal) expected).delta(((Number) actual).doubleValue());
     }
 
     @Override
@@ -56,7 +56,7 @@ public enum Operand {
         if (hyphenIdx > 0) {
           String minS = strVal.substring(0, hyphenIdx).trim();
           String maxS = strVal.substring(hyphenIdx + 1, strVal.length()).trim();
-          return new Clause.RangeVal(
+          return new RangeVal(
               (Number) condition.varType.validate(condition.name, minS, true),
               (Number) condition.varType.validate(condition.name, maxS, true),
               null
@@ -68,7 +68,7 @@ public enum Operand {
 
 
       Number num = (Number) condition.varType.validate(condition.name, condition.val, true);
-      return new Clause.RangeVal(Math.floor(num.doubleValue()), Math.ceil(num.doubleValue()), num);
+      return new RangeVal(Math.floor(num.doubleValue()), Math.ceil(num.doubleValue()), num);
     }
   },
   EQUAL("", 0) {
@@ -87,7 +87,7 @@ public enum Operand {
   RANGE_NOT_EQUAL("", 2) {
     @Override
     public TestStatus match(Object ruleVal, Object testVal) {
-      return ((Clause.RangeVal) ruleVal).match((Number) testVal) ? FAIL : PASS;
+      return ((RangeVal) ruleVal).match((Number) testVal) ? FAIL : PASS;
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4602e4de/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Policy.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Policy.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Policy.java
index 879bb74..09aa244 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Policy.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Policy.java
@@ -38,7 +38,7 @@ import java.util.stream.Collectors;
 
 import org.apache.solr.client.solrj.cloud.NodeStateProvider;
 import org.apache.solr.client.solrj.cloud.SolrCloudManager;
-import org.apache.solr.client.solrj.cloud.autoscaling.Suggestion.ConditionType;
+import org.apache.solr.client.solrj.cloud.autoscaling.Variable.Type;
 import org.apache.solr.client.solrj.impl.ClusterStateProvider;
 import org.apache.solr.common.IteratorWriter;
 import org.apache.solr.common.MapWriter;
@@ -56,8 +56,8 @@ import static java.util.Collections.emptyList;
 import static java.util.Collections.emptyMap;
 import static java.util.stream.Collectors.collectingAndThen;
 import static java.util.stream.Collectors.toList;
-import static org.apache.solr.client.solrj.cloud.autoscaling.Suggestion.ConditionType.NODE;
-import static org.apache.solr.client.solrj.cloud.autoscaling.Suggestion.ConditionType.WITH_COLLECTION;
+import static org.apache.solr.client.solrj.cloud.autoscaling.Variable.Type.NODE;
+import static org.apache.solr.client.solrj.cloud.autoscaling.Variable.Type.WITH_COLLECTION;
 
 /*The class that reads, parses and applies policies specified in
  * autoscaling.json
@@ -91,7 +91,7 @@ public class Policy implements MapWriter {
   final Map<String, List<Clause>> policies;
   final List<Clause> clusterPolicy;
   final List<Preference> clusterPreferences;
-  final List<Pair<String, ConditionType>> params;
+  final List<Pair<String, Type>> params;
   final List<String> perReplicaAttributes;
 
   public Policy() {
@@ -124,7 +124,7 @@ public class Policy implements MapWriter {
         .collect(collectingAndThen(toList(), Collections::unmodifiableList));
 
     for (String newParam : new ArrayList<>(newParams)) {
-      ConditionType t = Suggestion.getTagType(newParam);
+      Type t = VariableBase.getTagType(newParam);
       if(t != null && !t.associatedPerNodeValues.isEmpty()){
         for (String s : t.associatedPerNodeValues) {
           if(!newParams.contains(s)) newParams.add(s);
@@ -134,8 +134,8 @@ public class Policy implements MapWriter {
 
     this.policies = Collections.unmodifiableMap(
         policiesFromMap((Map<String, List<Map<String, Object>>>) jsonMap.getOrDefault(POLICIES, emptyMap()), newParams));
-    List<Pair<String, Suggestion.ConditionType>> params = newParams.stream()
-        .map(s -> new Pair<>(s, Suggestion.getTagType(s)))
+    List<Pair<String, Type>> params = newParams.stream()
+        .map(s -> new Pair<>(s, VariableBase.getTagType(s)))
         .collect(toList());
     //let this be there always, there is no extra cost
     params.add(new Pair<>(WITH_COLLECTION.tagName, WITH_COLLECTION));
@@ -156,7 +156,7 @@ public class Policy implements MapWriter {
     this.clusterPreferences = clusterPreferences != null ? Collections.unmodifiableList(clusterPreferences) : DEFAULT_PREFERENCES;
     this.params = Collections.unmodifiableList(
         buildParams(this.clusterPreferences, this.clusterPolicy, this.policies).stream()
-            .map(s -> new Pair<>(s, Suggestion.getTagType(s)))
+            .map(s -> new Pair<>(s, VariableBase.getTagType(s)))
             .collect(toList())
     );
     perReplicaAttributes = readPerReplicaAttrs();
@@ -615,15 +615,4 @@ public class Policy implements MapWriter {
       throw new RuntimeException("NO such node found " + node);
     }
   }
-  static final Map<String, Suggestion.ConditionType> validatetypes = new HashMap<>();
-  static {
-    for (Suggestion.ConditionType t : Suggestion.ConditionType.values())
-      validatetypes.put(t.tagName, t);
-  }
-  public static ConditionType getTagType(String name) {
-    ConditionType info = validatetypes.get(name);
-    if (info == null && name.startsWith(ImplicitSnitch.SYSPROP)) info = ConditionType.STRING;
-    if (info == null && name.startsWith(Clause.METRICS_PREFIX)) info = ConditionType.LAZY;
-    return info;
-  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4602e4de/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/PolicyHelper.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/PolicyHelper.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/PolicyHelper.java
index ed2a395..f662eff 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/PolicyHelper.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/PolicyHelper.java
@@ -48,7 +48,7 @@ import org.slf4j.LoggerFactory;
 import static java.util.Collections.emptyMap;
 import static java.util.Collections.singletonList;
 import static java.util.concurrent.TimeUnit.MILLISECONDS;
-import static org.apache.solr.client.solrj.cloud.autoscaling.Suggestion.ConditionType.FREEDISK;
+import static org.apache.solr.client.solrj.cloud.autoscaling.Variable.Type.FREEDISK;
 import static org.apache.solr.common.params.CollectionParams.CollectionAction.ADDREPLICA;
 import static org.apache.solr.common.params.CoreAdminParams.NODE;
 import static org.apache.solr.common.util.Utils.time;
@@ -127,7 +127,7 @@ public class PolicyHelper {
               if (replicaInfo != null) {
                 Object idxSz = replicaInfo.getVariables().get(FREEDISK.perReplicaValue);
                 if (idxSz != null) {
-                  diskSpaceReqd.put(shardName, 1.5 * (Double) Suggestion.ConditionType.FREEDISK.validate(null, idxSz, false));
+                  diskSpaceReqd.put(shardName, 1.5 * (Double) Variable.Type.FREEDISK.validate(null, idxSz, false));
                 }
               }
             }
@@ -213,13 +213,14 @@ public class PolicyHelper {
 
   public static List<Suggester.SuggestionInfo> getSuggestions(AutoScalingConfig autoScalingConf, SolrCloudManager cloudManager) {
     Policy policy = autoScalingConf.getPolicy();
-    Suggestion.SuggestionCtx suggestionCtx = new Suggestion.SuggestionCtx();
+    Suggestion.Ctx suggestionCtx = new Suggestion.Ctx();
     suggestionCtx.session = policy.createSession(cloudManager);
     List<Violation> violations = suggestionCtx.session.getViolations();
     for (Violation violation : violations) {
-      Suggestion.ConditionType tagType = Suggestion.getTagType(violation.getClause().isPerCollectiontag() ?
+      String name = violation.getClause().isPerCollectiontag() ?
           violation.getClause().tag.name :
-          violation.getClause().globalTag.name);
+          violation.getClause().globalTag.name;
+      Variable.Type tagType = VariableBase.getTagType(name);
       tagType.getSuggestions(suggestionCtx.setViolation(violation));
       suggestionCtx.violation = null;
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4602e4de/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/RangeVal.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/RangeVal.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/RangeVal.java
new file mode 100644
index 0000000..16cfe6a
--- /dev/null
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/RangeVal.java
@@ -0,0 +1,54 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.client.solrj.cloud.autoscaling;
+
+import java.io.IOException;
+
+import org.apache.solr.common.MapWriter;
+
+class RangeVal implements MapWriter {
+  final Number min, max, actual;
+
+  RangeVal(Number min, Number max, Number actual) {
+    this.min = min;
+    this.max = max;
+    this.actual = actual;
+  }
+
+  public boolean match(Number testVal) {
+    return Double.compare(testVal.doubleValue(), min.doubleValue()) >= 0 &&
+        Double.compare(testVal.doubleValue(), max.doubleValue()) <= 0;
+  }
+
+  public Double delta(double v) {
+//      if (actual != null) return v - actual.doubleValue();
+    if (v >= max.doubleValue()) return v - max.doubleValue();
+    if (v <= min.doubleValue()) return v - min.doubleValue();
+    return 0d;
+  }
+
+  @Override
+  public String toString() {
+    return jsonStr();
+  }
+
+  @Override
+  public void writeMap(EntryWriter ew) throws IOException {
+    ew.put("min", min).put("max", max).putIfNotNull("actual", actual);
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4602e4de/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/ReplicaVariable.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/ReplicaVariable.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/ReplicaVariable.java
new file mode 100644
index 0000000..2f66609
--- /dev/null
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/ReplicaVariable.java
@@ -0,0 +1,131 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.client.solrj.cloud.autoscaling;
+
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.Set;
+import java.util.concurrent.atomic.AtomicInteger;
+
+import org.apache.solr.common.util.StrUtils;
+
+class ReplicaVariable extends VariableBase {
+
+  public ReplicaVariable(Type type) {
+    super(type);
+  }
+
+  static int getRelevantReplicasCount(Policy.Session session, Clause.Condition cv, String collection, String shard) {
+    AtomicInteger totalReplicasOfInterest = new AtomicInteger(0);
+    Clause clause = cv.getClause();
+    for (Row row : session.matrix) {
+      row.forEachReplica(replicaInfo -> {
+        if (clause.isMatch(replicaInfo, collection, shard))
+          totalReplicasOfInterest.incrementAndGet();
+      });
+    }
+    return totalReplicasOfInterest.get();
+  }
+
+  @Override
+  public Object validate(String name, Object val, boolean isRuleVal) {
+    return getOperandAdjustedValue(super.validate(name, val, isRuleVal), val);
+  }
+
+
+
+  @Override
+  public Operand getOperand(Operand expected, Object strVal, Clause.ComputedType computedType) {
+    if (computedType == Clause.ComputedType.ALL) return expected;
+    if (strVal instanceof String) {
+      String s = ((String) strVal).trim();
+      int hyphenIdx = s.indexOf('-');
+      if (hyphenIdx > 0) {
+        if (hyphenIdx == s.length() - 1) {
+          throw new IllegalArgumentException("bad range input :" + expected);
+        }
+        if (expected == Operand.EQUAL) return Operand.RANGE_EQUAL;
+        if (expected == Operand.NOT_EQUAL) return Operand.RANGE_NOT_EQUAL;
+      }
+
+    }
+
+    if (expected == Operand.EQUAL && (computedType != null || !isIntegerEquivalent(strVal))) {
+      return Operand.RANGE_EQUAL;
+    }
+    if (expected == Operand.NOT_EQUAL && (computedType != null || !isIntegerEquivalent(strVal)))
+      return Operand.RANGE_NOT_EQUAL;
+
+    return expected;
+  }
+
+  @Override
+  public String postValidate(Clause.Condition condition) {
+    if (condition.computedType == Clause.ComputedType.EQUAL) {
+      if (condition.getClause().tag != null &&
+//              condition.getClause().tag.varType == NODE &&
+          (condition.getClause().tag.op == Operand.WILDCARD || condition.getClause().tag.op == Operand.IN)) {
+        return null;
+      } else {
+        return "'replica': '#EQUAL` must be used with 'node':'#ANY'";
+      }
+    }
+    if (condition.computedType == Clause.ComputedType.ALL) {
+      if (condition.getClause().tag != null && (condition.getClause().getTag().op == Operand.IN ||
+          condition.getClause().getTag().op == Operand.WILDCARD)) {
+        return StrUtils.formatString("array value or wild card cannot be used for tag {0} with replica : '#ALL'",
+            condition.getClause().tag.getName());
+      }
+    }
+    return null;
+  }
+
+  @Override
+  public Object computeValue(Policy.Session session, Clause.Condition cv, String collection, String shard, String node) {
+    if (cv.computedType == Clause.ComputedType.ALL)
+      return Double.valueOf(getRelevantReplicasCount(session, cv, collection, shard));
+    if (cv.computedType == Clause.ComputedType.EQUAL) {
+      int relevantReplicasCount = getRelevantReplicasCount(session, cv, collection, shard);
+      double bucketsCount = getNumBuckets(session, cv.getClause());
+      if (relevantReplicasCount == 0 || bucketsCount == 0) return 0;
+      return (double) relevantReplicasCount / bucketsCount;
+    } else if (cv.computedType == Clause.ComputedType.PERCENT) {
+      return Clause.ComputedType.PERCENT.compute(getRelevantReplicasCount(session, cv, collection, shard), cv);
+    } else {
+      throw new IllegalArgumentException("Unsupported type " + cv.computedType);
+
+    }
+  }
+
+  private int getNumBuckets(Policy.Session session, Clause clause) {
+    if (clause.getTag().getOperand() == Operand.IN) {
+      return ((Collection) clause.getTag().val).size();
+    } else if (clause.getTag().getOperand() == Operand.WILDCARD) {
+      if (clause.getTag().varType == Type.NODE) return session.matrix.size();
+      Set uniqueVals = new HashSet();
+      for (Row matrix : session.matrix) {
+        Object val = matrix.getVal(clause.getTag().name);
+        if (val != null) uniqueVals.add(val);
+      }
+      return uniqueVals.size();
+    } else {
+      throw new IllegalArgumentException("Invalid operand for the tag in  " + clause);
+    }
+
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4602e4de/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Row.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Row.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Row.java
index 8a14abd..88e9921 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Row.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Row.java
@@ -54,7 +54,7 @@ public class Row implements MapWriter {
   boolean isLive = true;
   Policy.Session session;
 
-  public Row(String node, List<Pair<String, Suggestion.ConditionType>> params, List<String> perReplicaAttributes, Policy.Session session) {
+  public Row(String node, List<Pair<String, Variable.Type>> params, List<String> perReplicaAttributes, Policy.Session session) {
     this.session = session;
     collectionVsShardVsReplicas = session.nodeStateProvider.getReplicaInfo(node, perReplicaAttributes);
     if (collectionVsShardVsReplicas == null) collectionVsShardVsReplicas = new HashMap<>();
@@ -64,7 +64,7 @@ public class Row implements MapWriter {
     List<String> paramNames = params.stream().map(Pair::first).collect(Collectors.toList());
     Map<String, Object> vals = isLive ? session.nodeStateProvider.getNodeValues(node, paramNames) : Collections.emptyMap();
     for (int i = 0; i < params.size(); i++) {
-      Pair<String, Suggestion.ConditionType> pair = params.get(i);
+      Pair<String, Variable.Type> pair = params.get(i);
       cells[i] = new Cell(i, pair.first(), Clause.validate(pair.first(), vals.get(pair.first()), false), null, pair.second(), this);
       if (NODE.equals(pair.first())) cells[i].val = node;
       if (cells[i].val == null) anyValueMissing = true;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4602e4de/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Suggester.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Suggester.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Suggester.java
index 67721ba..9b9b60e 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Suggester.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Suggester.java
@@ -46,7 +46,7 @@ import org.apache.solr.common.util.Utils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import static org.apache.solr.client.solrj.cloud.autoscaling.Suggestion.ConditionType.FREEDISK;
+import static org.apache.solr.client.solrj.cloud.autoscaling.Variable.Type.FREEDISK;
 import static org.apache.solr.common.params.CollectionAdminParams.WITH_COLLECTION;
 
 /* A suggester is capable of suggesting a collection operation

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4602e4de/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Suggestion.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Suggestion.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Suggestion.java
index af20fac..3b18e02 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Suggestion.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Suggestion.java
@@ -17,102 +17,18 @@
 
 package org.apache.solr.client.solrj.cloud.autoscaling;
 
-import java.lang.annotation.ElementType;
-import java.lang.annotation.Retention;
-import java.lang.annotation.RetentionPolicy;
-import java.lang.annotation.Target;
 import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Comparator;
-import java.util.HashSet;
 import java.util.List;
-import java.util.Set;
-import java.util.concurrent.atomic.AtomicInteger;
-import java.util.function.Consumer;
-import java.util.function.Function;
-import java.util.stream.Collectors;
 
 import org.apache.solr.client.solrj.SolrRequest;
 import org.apache.solr.client.solrj.V2RequestSupport;
-import org.apache.solr.client.solrj.cloud.autoscaling.Clause.ComputedType;
 import org.apache.solr.client.solrj.cloud.autoscaling.Violation.ReplicaInfoAndErr;
-import org.apache.solr.common.cloud.rule.ImplicitSnitch;
 import org.apache.solr.common.util.Pair;
-import org.apache.solr.common.util.StrUtils;
 
-import static java.util.Collections.emptySet;
-import static java.util.Collections.unmodifiableSet;
-import static org.apache.solr.client.solrj.cloud.autoscaling.Clause.parseString;
-import static org.apache.solr.client.solrj.cloud.autoscaling.Policy.ANY;
 import static org.apache.solr.common.params.CollectionParams.CollectionAction.MOVEREPLICA;
 
 public class Suggestion {
-  public static final String coreidxsize = "INDEX.sizeInGB";
-
-
-  private static final String NULL = "";
-
-  @Target(ElementType.FIELD)
-  @Retention(RetentionPolicy.RUNTIME)
-  @interface Meta {
-    String name();
-
-    Class type();
-
-    String[] associatedPerNodeValue() default NULL;
-
-    String associatedPerReplicaValue() default NULL;
-
-    String[] enumVals() default NULL;
-
-    String[] wildCards() default NULL;
-
-    boolean isNodeSpecificVal() default false;
-
-    boolean isHidden() default false;
-
-    boolean isAdditive() default true;
-
-    double min() default -1d;
-
-    double max() default -1d;
-
-    boolean supportArrayVals() default false;
-
-    String metricsKey() default NULL;
-
-    Class implementation() default void.class;
-
-    ComputedType[] computedValues() default ComputedType.NULL;
-  }
-
-  public static ConditionType getTagType(String name) {
-    return Policy.getTagType(name);
-  }
-
-  private static Object getOperandAdjustedValue(Object val, Object original) {
-    if (original instanceof Clause.Condition) {
-      Clause.Condition condition = (Clause.Condition) original;
-      if (condition.computedType == null && isIntegerEquivalent(val)) {
-        if (condition.op == Operand.LESS_THAN) {
-          //replica : '<3'
-          val = val instanceof Long ?
-              (Long) val - 1 :
-              (Double) val - 1;
-        } else if (condition.op == Operand.GREATER_THAN) {
-          //replica : '>4'
-          val = val instanceof Long ?
-              (Long) val + 1 :
-              (Double) val + 1;
-        }
-      }
-    }
-    return val;
-  }
-
-
-  static class SuggestionCtx {
+  static class Ctx {
     public Policy.Session session;
     public Violation violation;
     private List<Suggester.SuggestionInfo> suggestions = new ArrayList<>();
@@ -128,7 +44,7 @@ public class Suggestion {
     }
 
 
-    public SuggestionCtx setViolation(Violation violation) {
+    public Ctx setViolation(Violation violation) {
       this.violation = violation;
       return this;
     }
@@ -138,658 +54,8 @@ public class Suggestion {
     }
   }
 
-  static boolean isIntegerEquivalent(Object val) {
-    if (val instanceof Number) {
-      Number number = (Number) val;
-      return Math.ceil(number.doubleValue()) == Math.floor(number.doubleValue());
-    } else if (val instanceof String) {
-      try {
-        double dval = Double.parseDouble((String) val);
-        return Math.ceil(dval) == Math.floor(dval);
-      } catch (NumberFormatException e) {
-        return false;
-      }
-    } else {
-      return false;
-    }
-
-  }
-
-
-  /**
-   * Type details of each variable in policies
-   */
-  public enum ConditionType implements VarType {
-    @Meta(name = "withCollection", type = String.class, isNodeSpecificVal = true, implementation = WithCollectionVarType.class)
-    WITH_COLLECTION(),
-
-    @Meta(name = "collection",
-        type = String.class)
-    COLL(),
-    @Meta(
-        name = "shard",
-        type = String.class,
-        wildCards = {Policy.EACH, Policy.ANY})
-    SHARD(),
-
-    @Meta(name = "replica",
-        type = Double.class,
-        min = 0, max = -1,
-        computedValues = {ComputedType.EQUAL, ComputedType.PERCENT, ComputedType.ALL})
-    REPLICA() {
-      @Override
-      public Object validate(String name, Object val, boolean isRuleVal) {
-        return getOperandAdjustedValue(super.validate(name, val, isRuleVal), val);
-      }
-
-      @Override
-      public Operand getOperand(Operand expected, Object strVal, ComputedType computedType) {
-        if (computedType == ComputedType.ALL) return expected;
-        if (strVal instanceof String) {
-          String s = ((String) strVal).trim();
-          int hyphenIdx = s.indexOf('-');
-          if (hyphenIdx > 0) {
-            if (hyphenIdx == s.length() - 1) {
-              throw new IllegalArgumentException("bad range input :" + expected);
-            }
-            if (expected == Operand.EQUAL) return Operand.RANGE_EQUAL;
-            if (expected == Operand.NOT_EQUAL) return Operand.RANGE_NOT_EQUAL;
-          }
-
-        }
-
-        if (expected == Operand.EQUAL && (computedType != null || !isIntegerEquivalent(strVal))) {
-          return Operand.RANGE_EQUAL;
-        }
-        if (expected == Operand.NOT_EQUAL && (computedType != null || !isIntegerEquivalent(strVal)))
-          return Operand.RANGE_NOT_EQUAL;
-
-        return expected;
-      }
-
-      @Override
-      public String postValidate(Clause.Condition condition) {
-        if (condition.computedType == ComputedType.EQUAL) {
-          if (condition.getClause().tag != null &&
-//              condition.getClause().tag.varType == NODE &&
-              (condition.getClause().tag.op == Operand.WILDCARD || condition.getClause().tag.op == Operand.IN)) {
-            return null;
-          } else {
-            return "'replica': '#EQUAL` must be used with 'node':'#ANY'";
-          }
-        }
-        if (condition.computedType == ComputedType.ALL) {
-          if (condition.getClause().tag != null && (condition.getClause().getTag().op == Operand.IN ||
-              condition.getClause().getTag().op == Operand.WILDCARD)) {
-            return StrUtils.formatString("array value or wild card cannot be used for tag {0} with replica : '#ALL'",
-                condition.getClause().tag.getName());
-          }
-        }
-        return null;
-      }
-
-      @Override
-      public Object computeValue(Policy.Session session, Clause.Condition cv, String collection, String shard, String node) {
-        if (cv.computedType == ComputedType.ALL)
-          return Double.valueOf(getRelevantReplicasCount(session, cv, collection, shard));
-        if (cv.computedType == ComputedType.EQUAL) {
-          int relevantReplicasCount = getRelevantReplicasCount(session, cv, collection, shard);
-          double bucketsCount = getNumBuckets(session, cv.getClause());
-          if (relevantReplicasCount == 0 || bucketsCount == 0) return 0;
-          return (double) relevantReplicasCount / bucketsCount;
-        } else if (cv.computedType == ComputedType.PERCENT) {
-          return ComputedType.PERCENT.compute(getRelevantReplicasCount(session, cv, collection, shard), cv);
-        } else {
-          throw new IllegalArgumentException("Unsupported type " + cv.computedType);
-
-        }
-      }
-
-      private int getNumBuckets(Policy.Session session, Clause clause) {
-        if (clause.getTag().getOperand() == Operand.IN) {
-          return ((Collection) clause.getTag().val).size();
-        } else if (clause.getTag().getOperand() == Operand.WILDCARD) {
-          if (clause.getTag().varType == NODE) return session.matrix.size();
-          Set uniqueVals = new HashSet();
-          for (Row matrix : session.matrix) {
-            Object val = matrix.getVal(clause.getTag().name);
-            if (val != null) uniqueVals.add(val);
-          }
-          return uniqueVals.size();
-        } else {
-          throw new IllegalArgumentException("Invalid operand for the tag in  " + clause);
-        }
-
-      }
-    },
-    @Meta(name = ImplicitSnitch.PORT,
-        type = Long.class,
-        min = 1,
-        max = 65535,
-        supportArrayVals = true,
-        wildCards = Policy.EACH
-    )
-    PORT(),
-    @Meta(name = "ip_1",
-        type = Long.class,
-        min = 0,
-        max = 255,
-        supportArrayVals = true,
-        wildCards = Policy.EACH)
-    IP_1(),
-    @Meta(name = "ip_2",
-        type = Long.class,
-        min = 0,
-        max = 255,
-        supportArrayVals = true,
-        wildCards = Policy.EACH)
-    IP_2(),
-    @Meta(name = "ip_3",
-        type = Long.class,
-        min = 0,
-        max = 255,
-        supportArrayVals = true,
-        wildCards = Policy.EACH)
-    IP_3(),
-    @Meta(name = "ip_4",
-        type = Long.class,
-        min = 0,
-        max = 255,
-        supportArrayVals = true,
-        wildCards = Policy.EACH)
-    IP_4(),
-    @Meta(name = ImplicitSnitch.DISK,
-        type = Double.class,
-        min = 0,
-        isNodeSpecificVal = true,
-        associatedPerReplicaValue = coreidxsize,
-        associatedPerNodeValue = "totaldisk",
-        computedValues = ComputedType.PERCENT)
-    FREEDISK() {
-      @Override
-      public Object convertVal(Object val) {
-        Number value = (Number) super.validate(ImplicitSnitch.DISK, val, false);
-        if (value != null) {
-          value = value.doubleValue() / 1024.0d / 1024.0d / 1024.0d;
-        }
-        return value;
-      }
-
-      @Override
-      public Object computeValue(Policy.Session session, Clause.Condition condition, String collection, String shard, String node) {
-        if (condition.computedType == ComputedType.PERCENT) {
-          Row r = session.getNode(node);
-          if (r == null) return 0d;
-          return ComputedType.PERCENT.compute(r.getVal(TOTALDISK.tagName), condition);
-        }
-        throw new IllegalArgumentException("Unsupported type " + condition.computedType);
-      }
-
-
-
-      @Override
-      public int compareViolation(Violation v1, Violation v2) {
-        //TODO use tolerance compare
-        return Double.compare(
-            v1.getViolatingReplicas().stream().mapToDouble(v -> v.delta == null ? 0 : v.delta).max().orElse(0d),
-            v2.getViolatingReplicas().stream().mapToDouble(v3 -> v3.delta == null ? 0 : v3.delta).max().orElse(0d));
-      }
-
-      @Override
-      public void getSuggestions(SuggestionCtx ctx) {
-        if (ctx.violation == null) return;
-        if (ctx.violation.replicaCountDelta < 0 && !ctx.violation.getViolatingReplicas().isEmpty()) {
-
-          Comparator<Row> rowComparator = Comparator.comparing(r -> ((Double) r.getVal(ImplicitSnitch.DISK, 0d)));
-          List<Row> matchingNodes = ctx.session.matrix.stream().filter(
-              row -> ctx.violation.getViolatingReplicas()
-                  .stream()
-                  .anyMatch(p -> row.node.equals(p.replicaInfo.getNode())))
-              .sorted(rowComparator)
-              .collect(Collectors.toList());
-
-
-          for (Row node : matchingNodes) {
-            //lets try to start moving the smallest cores off of the node
-            ArrayList<ReplicaInfo> replicas = new ArrayList<>();
-            node.forEachReplica(replicas::add);
-            replicas.sort((r1, r2) -> {
-              Long s1 = Clause.parseLong(ConditionType.CORE_IDX.tagName, r1.getVariables().get(ConditionType.CORE_IDX.tagName));
-              Long s2 = Clause.parseLong(ConditionType.CORE_IDX.tagName, r2.getVariables().get(ConditionType.CORE_IDX.tagName));
-              if (s1 != null && s2 != null) return s1.compareTo(s2);
-              return 0;
-            });
-            double currentDelta = ctx.violation.getClause().tag.delta(node.getVal(ImplicitSnitch.DISK));
-            for (ReplicaInfo replica : replicas) {
-              if (currentDelta < 1) break;
-              if (replica.getVariables().get(ConditionType.CORE_IDX.tagName) == null) continue;
-              Suggester suggester = ctx.session.getSuggester(MOVEREPLICA)
-                  .hint(Suggester.Hint.COLL_SHARD, new Pair<>(replica.getCollection(), replica.getShard()))
-                  .hint(Suggester.Hint.SRC_NODE, node.node)
-                  .forceOperation(true);
-              if (ctx.addSuggestion(suggester) == null) break;
-              currentDelta -= Clause.parseLong(ConditionType.CORE_IDX.tagName, replica.getVariable(ConditionType.CORE_IDX.tagName));
-            }
-          }
-        }
-      }
-
-      //When a replica is added, freedisk should be incremented
-      @Override
-      public void projectAddReplica(Cell cell, ReplicaInfo ri, Consumer<Row.OperationInfo> ops, boolean strictMode) {
-        //go through other replicas of this shard and copy the index size value into this
-        for (Row row : cell.getRow().session.matrix) {
-          row.forEachReplica(replicaInfo -> {
-            if (ri != replicaInfo &&
-                ri.getCollection().equals(replicaInfo.getCollection()) &&
-                ri.getShard().equals(replicaInfo.getShard()) &&
-                ri.getVariable(CORE_IDX.tagName) == null &&
-                replicaInfo.getVariable(CORE_IDX.tagName) != null) {
-              ri.getVariables().put(CORE_IDX.tagName, validate(CORE_IDX.tagName, replicaInfo.getVariable(CORE_IDX.tagName), false));
-            }
-          });
-        }
-        Double idxSize = (Double) validate(CORE_IDX.tagName, ri.getVariable(CORE_IDX.tagName), false);
-        if (idxSize == null) return;
-        Double currFreeDisk = cell.val == null ? 0.0d : (Double) cell.val;
-        cell.val = currFreeDisk - idxSize;
-      }
-
-      @Override
-      public void projectRemoveReplica(Cell cell, ReplicaInfo ri, Consumer<Row.OperationInfo> opCollector) {
-        Double idxSize = (Double) validate(CORE_IDX.tagName, ri.getVariable(CORE_IDX.tagName), false);
-        if (idxSize == null) return;
-        Double currFreeDisk = cell.val == null ? 0.0d : (Double) cell.val;
-        cell.val = currFreeDisk + idxSize;
-      }
-    },
-
-    @Meta(name = "totaldisk",
-        type = Double.class,
-        isHidden = true)
-    TOTALDISK() {
-      @Override
-      public Object convertVal(Object val) {
-        return FREEDISK.convertVal(val);
-      }
-    },
-
-    @Meta(name = coreidxsize,
-        type = Double.class,
-        isNodeSpecificVal = true,
-        isHidden = true,
-        min = 0,
-        metricsKey = "INDEX.sizeInBytes")
-    CORE_IDX() {
-      @Override
-      public Object convertVal(Object val) {
-        return FREEDISK.convertVal(val);
-      }
-    },
-    @Meta(name = ImplicitSnitch.NODEROLE,
-        type = String.class,
-        enumVals = "overseer")
-    NODE_ROLE(),
-
-    @Meta(name = ImplicitSnitch.CORES,
-        type = Long.class,
-        min = 0)
-    CORES() {
-      @Override
-      public Object validate(String name, Object val, boolean isRuleVal) {
-        return getOperandAdjustedValue(super.validate(name, val, isRuleVal), val);
-      }
-
-      @Override
-      public void addViolatingReplicas(ViolationCtx ctx) {
-        for (Row r : ctx.allRows) {
-          if (!ctx.clause.tag.isPass(r)) {
-            r.forEachReplica(replicaInfo -> ctx.currentViolation
-                .addReplica(new ReplicaInfoAndErr(replicaInfo)
-                    .withDelta(ctx.clause.tag.delta(r.getVal(ImplicitSnitch.CORES)))));
-          }
-        }
-
-      }
-
-      @Override
-      public void getSuggestions(SuggestionCtx ctx) {
-        if (ctx.violation == null || ctx.violation.replicaCountDelta == 0) return;
-        if (ctx.violation.replicaCountDelta > 0) {//there are more replicas than necessary
-          for (int i = 0; i < Math.abs(ctx.violation.replicaCountDelta); i++) {
-            Suggester suggester = ctx.session.getSuggester(MOVEREPLICA)
-                .hint(Suggester.Hint.SRC_NODE, ctx.violation.node);
-            ctx.addSuggestion(suggester);
-          }
-        }
-      }
-
-      @Override
-      public void projectAddReplica(Cell cell, ReplicaInfo ri, Consumer<Row.OperationInfo> ops, boolean strictMode) {
-        cell.val = cell.val == null ? 0 : ((Number) cell.val).longValue() + 1;
-      }
-
-      @Override
-      public void projectRemoveReplica(Cell cell, ReplicaInfo ri, Consumer<Row.OperationInfo> opCollector) {
-        cell.val = cell.val == null ? 0 : ((Number) cell.val).longValue() - 1;
-      }
-    },
-
-    @Meta(name = ImplicitSnitch.SYSLOADAVG,
-        type = Double.class,
-        min = 0,
-        max = 100,
-        isNodeSpecificVal = true)
-    SYSLOADAVG(),
-
-    @Meta(name = ImplicitSnitch.HEAPUSAGE,
-        type = Double.class,
-        min = 0,
-        isNodeSpecificVal = true)
-    HEAPUSAGE(),
-    @Meta(name = "NUMBER",
-        type = Long.class,
-        min = 0)
-    NUMBER(),
-
-    @Meta(name = "STRING",
-        type = String.class,
-        wildCards = Policy.EACH,
-        supportArrayVals = true)
-    STRING(),
-
-    @Meta(name = "node",
-        type = String.class,
-        isNodeSpecificVal = true,
-        wildCards = {Policy.ANY, Policy.EACH},
-        supportArrayVals = true)
-    NODE() {
-      @Override
-      public void getSuggestions(SuggestionCtx ctx) {
-        if (ctx.violation == null || ctx.violation.replicaCountDelta == 0) return;
-        if (ctx.violation.replicaCountDelta > 0) {//there are more replicas than necessary
-          for (int i = 0; i < Math.abs(ctx.violation.replicaCountDelta); i++) {
-            Suggester suggester = ctx.session.getSuggester(MOVEREPLICA)
-                .hint(Suggester.Hint.SRC_NODE, ctx.violation.node)
-                .hint(ctx.violation.shard.equals(ANY) ? Suggester.Hint.COLL : Suggester.Hint.COLL_SHARD,
-                    ctx.violation.shard.equals(ANY) ? ctx.violation.coll : new Pair<>(ctx.violation.coll, ctx.violation.shard));
-            ctx.addSuggestion(suggester);
-          }
-        }
-
-      }
-
-    },
-
-    @Meta(name = "LAZY",
-        type = void.class)
-    LAZY() {
-      @Override
-      public Object validate(String name, Object val, boolean isRuleVal) {
-        return parseString(val);
-      }
-
-      @Override
-      public boolean match(Object inputVal, Operand op, Object val, String name, Row row) {
-        return op.match(parseString(val), parseString(inputVal)) == Clause.TestStatus.PASS;
-      }
-
-      @Override
-      public void getSuggestions(SuggestionCtx ctx) {
-        perNodeSuggestions(ctx);
-      }
-    },
-
-    @Meta(name = ImplicitSnitch.DISKTYPE,
-        type = String.class,
-        enumVals = {"ssd", "rotational"},
-        supportArrayVals = true)
-    DISKTYPE() {
-      @Override
-      public void getSuggestions(SuggestionCtx ctx) {
-        perNodeSuggestions(ctx);
-      }
-
-
-    };
-
-    public final String tagName;
-    public final Class type;
-    public Meta meta;
-
-    public final Set<String> vals;
-    public final Number min;
-    public final Number max;
-    public final Boolean additive;
-    public final Set<String> wildCards;
-    public final String perReplicaValue;
-    public final Set<String> associatedPerNodeValues;
-    public final String metricsAttribute;
-    public final Set<ComputedType> supportedComputedTypes;
-    private final VarType impl;
-
-
-    ConditionType() {
-      try {
-        meta = ConditionType.class.getField(name()).getAnnotation(Meta.class);
-        if (meta == null) {
-          throw new RuntimeException("Invalid type, should have a @Meta annotation " + name());
-        }
-      } catch (NoSuchFieldException e) {
-        //cannot happen
-      }
-      if (meta.implementation() != void.class) {
-        try {
-          impl = (VarType) meta.implementation().newInstance();
-        } catch (Exception e) {
-          throw new RuntimeException("Unable to instantiate: " + meta.implementation().getName());
-        }
-      } else {
-        impl = null;
-      }
-      this.tagName = meta.name();
-      this.type = meta.type();
-
-      this.vals = readSet(meta.enumVals());
-      this.max = readNum(meta.max());
-      this.min = readNum(meta.min());
-      this.perReplicaValue = readStr(meta.associatedPerReplicaValue());
-      this.associatedPerNodeValues = readSet(meta.associatedPerNodeValue());
-      this.additive = meta.isAdditive();
-      this.metricsAttribute = readStr(meta.metricsKey());
-      this.supportedComputedTypes = meta.computedValues()[0] == ComputedType.NULL ?
-          emptySet() :
-          unmodifiableSet(new HashSet(Arrays.asList(meta.computedValues())));
-      this.wildCards = readSet(meta.wildCards());
-
-    }
-
-    public String getTagName() {
-      return meta.name();
-    }
-
-    private String readStr(String s) {
-      return NULL.equals(s) ? null : s;
-    }
-
-    private Number readNum(double v) {
-      return v == -1 ? null :
-          (Number) validate(null, v, true);
-    }
-
-    Set<String> readSet(String[] vals) {
-      if (NULL.equals(vals[0])) return emptySet();
-      return unmodifiableSet(new HashSet<>(Arrays.asList(vals)));
-    }
-
-    @Override
-    public void getSuggestions(SuggestionCtx ctx) {
-      if (impl != null) {
-        impl.getSuggestions(ctx);
-        return;
-      }
-      perNodeSuggestions(ctx);
-    }
-
-    @Override
-    public void addViolatingReplicas(ViolationCtx ctx) {
-      if (impl != null) {
-        impl.addViolatingReplicas(ctx);
-        return;
-      }
-      for (Row row : ctx.allRows) {
-        if (ctx.clause.tag.varType.meta.isNodeSpecificVal() && !row.node.equals(ctx.tagKey)) continue;
-        collectViolatingReplicas(ctx, row);
-      }
-    }
-
-    public Operand getOperand(Operand expected, Object val, ComputedType computedType) {
-      return expected;
-    }
-
-
-    public Object convertVal(Object val) {
-      return val;
-    }
-
-    public String postValidate(Clause.Condition condition) {
-      return null;
-    }
-
-    public Object validate(String name, Object val, boolean isRuleVal) {
-      if (val instanceof Clause.Condition) {
-        Clause.Condition condition = (Clause.Condition) val;
-        val = condition.op.readRuleValue(condition);
-        if (val != condition.val) return val;
-      }
-      if (name == null) name = this.tagName;
-      if (type == Double.class) {
-        Double num = Clause.parseDouble(name, val);
-        if (isRuleVal) {
-          if (min != null)
-            if (Double.compare(num, min.doubleValue()) == -1)
-              throw new RuntimeException(name + ": " + val + " must be greater than " + min);
-          if (max != null)
-            if (Double.compare(num, max.doubleValue()) == 1)
-              throw new RuntimeException(name + ": " + val + " must be less than " + max);
-        }
-        return num;
-      } else if (type == Long.class) {
-        Long num = Clause.parseLong(name, val);
-        if (isRuleVal) {
-          if (min != null)
-            if (num < min.longValue())
-              throw new RuntimeException(name + ": " + val + " must be greater than " + min);
-          if (max != null)
-            if (num > max.longValue())
-              throw new RuntimeException(name + ": " + val + " must be less than " + max);
-        }
-        return num;
-      } else if (type == String.class) {
-        if (isRuleVal && !vals.isEmpty() && !vals.contains(val))
-          throw new RuntimeException(name + ": " + val + " must be one of " + StrUtils.join(vals, ','));
-        return val;
-      } else {
-        throw new RuntimeException("Invalid type ");
-      }
-
-    }
-
-    /**
-     * Simulate a replica addition to a node in the cluster
-     */
-    public void projectAddReplica(Cell cell, ReplicaInfo ri, Consumer<Row.OperationInfo> opCollector, boolean strictMode) {
-      if (impl != null) impl.projectAddReplica(cell, ri, opCollector, strictMode);
-    }
-
-    public void projectRemoveReplica(Cell cell, ReplicaInfo ri, Consumer<Row.OperationInfo> opCollector) {
-      if (impl != null) {
-        impl.projectRemoveReplica(cell, ri, opCollector);
-      }
-    }
-
-    @Override
-    public int compareViolation(Violation v1, Violation v2) {
-      if (impl != null) return impl.compareViolation(v1, v2);
-      if (v2.replicaCountDelta == null || v1.replicaCountDelta == null) return 0;
-      if (Math.abs(v1.replicaCountDelta) == Math.abs(v2.replicaCountDelta)) return 0;
-      return Math.abs(v1.replicaCountDelta) < Math.abs(v2.replicaCountDelta) ? -1 : 1;
-    }
-
-    @Override
-    public Object computeValue(Policy.Session session, Clause.Condition condition, String collection, String shard, String node) {
-      if (impl != null) return impl.computeValue(session, condition, collection, shard, node);
-      return condition.val;
-    }
-
-    @Override
-    public boolean match(Object inputVal, Operand op, Object val, String name, Row row) {
-      if (impl != null) return impl.match(inputVal, op, val, name, row);
-      return op.match(val, validate(name, inputVal, false)) == Clause.TestStatus.PASS;
-    }
-  }
-
-  private static void collectViolatingReplicas(ViolationCtx ctx, Row row) {
-    if (ctx.clause.tag.varType.meta.isNodeSpecificVal()) {
-      row.forEachReplica(replica -> {
-        if (ctx.clause.collection.isPass(replica.getCollection()) && ctx.clause.getShard().isPass(replica.getShard())) {
-          ctx.currentViolation.addReplica(new ReplicaInfoAndErr(replica)
-              .withDelta(ctx.clause.tag.delta(row.getVal(ctx.clause.tag.name))));
-        }
-      });
-    } else {
-      row.forEachReplica(replica -> {
-        if (ctx.clause.replica.isPass(0) && !ctx.clause.tag.isPass(row)) return;
-        if (!ctx.clause.replica.isPass(0) && ctx.clause.tag.isPass(row)) return;
-        if(!ctx.currentViolation.getClause().matchShard(replica.getShard(), ctx.currentViolation.shard)) return;
-        if (!ctx.clause.collection.isPass(ctx.currentViolation.coll) || !ctx.clause.shard.isPass(ctx.currentViolation.shard))
-          return;
-        ctx.currentViolation.addReplica(new ReplicaInfoAndErr(replica).withDelta(ctx.clause.tag.delta(row.getVal(ctx.clause.tag.name))));
-      });
-
-    }
-
-
-  }
-
-  private static int getRelevantReplicasCount(Policy.Session session, Clause.Condition cv, String collection, String shard) {
-    AtomicInteger totalReplicasOfInterest = new AtomicInteger(0);
-    Clause clause = cv.getClause();
-    for (Row row : session.matrix) {
-      row.forEachReplica(replicaInfo -> {
-        if (clause.isMatch(replicaInfo, collection, shard))
-          totalReplicasOfInterest.incrementAndGet();
-      });
-    }
-    return totalReplicasOfInterest.get();
-  }
-
-  static class ViolationCtx {
-    final Function<Clause.Condition, Object> evaluator;
-    String tagKey;
-    Clause clause;
-    ReplicaCount count;
-    Violation currentViolation;
-    List<Row> allRows;
-    List<Violation> allViolations = new ArrayList<>();
-
-    public ViolationCtx(Clause clause, List<Row> allRows, Function<Clause.Condition, Object> evaluator) {
-      this.allRows = allRows;
-      this.clause = clause;
-      this.evaluator = evaluator;
-    }
-
-    public ViolationCtx reset(String tagKey, ReplicaCount count, Violation currentViolation) {
-      this.tagKey = tagKey;
-      this.count = count;
-      this.currentViolation = currentViolation;
-      allViolations.add(currentViolation);
-      this.clause = currentViolation.getClause();
-      return this;
-    }
-  }
 
-  private static void perNodeSuggestions(SuggestionCtx ctx) {
+  static void perNodeSuggestions(Ctx ctx) {
     if (ctx.violation == null) return;
     for (ReplicaInfoAndErr e : ctx.violation.getViolatingReplicas()) {
       Suggester suggester = ctx.session.getSuggester(MOVEREPLICA)

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4602e4de/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/VarType.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/VarType.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/VarType.java
deleted file mode 100644
index 00224a9..0000000
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/VarType.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.solr.client.solrj.cloud.autoscaling;
-
-import java.util.function.Consumer;
-
-/**
- * A Variable Type used in Autoscaling policy rules
- */
-public interface VarType {
-  boolean match(Object inputVal, Operand op, Object val, String name, Row row);
-
-  void projectAddReplica(Cell cell, ReplicaInfo ri, Consumer<Row.OperationInfo> opCollector, boolean strictMode);
-
-  void addViolatingReplicas(Suggestion.ViolationCtx ctx);
-
-  default void getSuggestions(Suggestion.SuggestionCtx ctx) {
-  }
-
-  default Object computeValue(Policy.Session session, Clause.Condition condition, String collection, String shard, String node) {
-    return condition.val;
-  }
-
-  int compareViolation(Violation v1, Violation v2);
-
-  default void projectRemoveReplica(Cell cell, ReplicaInfo ri, Consumer<Row.OperationInfo> opCollector) {
-  }
-}


[36/48] lucene-solr:jira/http2: LUCENE-8443: Mute failing test temporarily

Posted by da...@apache.org.
LUCENE-8443: Mute failing test temporarily


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/1af7686c
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/1af7686c
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/1af7686c

Branch: refs/heads/jira/http2
Commit: 1af7686cb6a8b56db508c9870f35e48fe5e1b281
Parents: 6afd3d1
Author: Jim Ferenczi <ji...@apache.org>
Authored: Fri Aug 3 12:38:36 2018 +0200
Committer: Jim Ferenczi <ji...@apache.org>
Committed: Fri Aug 3 12:38:36 2018 +0200

----------------------------------------------------------------------
 .../org/apache/lucene/document/TestLatLonPointShapeQueries.java | 5 +++++
 1 file changed, 5 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/1af7686c/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonPointShapeQueries.java
----------------------------------------------------------------------
diff --git a/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonPointShapeQueries.java b/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonPointShapeQueries.java
index e98cb73..62e4cdf 100644
--- a/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonPointShapeQueries.java
+++ b/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonPointShapeQueries.java
@@ -63,4 +63,9 @@ public class TestLatLonPointShapeQueries extends BaseLatLonShapeTestCase {
       return poly2d.relateTriangle(lon, lat, lon, lat, lon, lat) != Relation.CELL_OUTSIDE_QUERY;
     }
   }
+
+  @AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/LUCENE-8443")
+  @Override
+  public void testRandomTiny() throws Exception {
+  }
 }


[23/48] lucene-solr:jira/http2: Make the nightly test smaller.

Posted by da...@apache.org.
Make the nightly test smaller.

Cherry-picked from 3203e99d8fbcaac3458fcf882d4ec229f97dfa43.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/c6e0c287
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/c6e0c287
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/c6e0c287

Branch: refs/heads/jira/http2
Commit: c6e0c2875db0f96fae841ee2b9d91dc0a38d7636
Parents: e56c872
Author: Adrien Grand <jp...@gmail.com>
Authored: Wed Aug 1 16:24:59 2018 +0200
Committer: Adrien Grand <jp...@gmail.com>
Committed: Wed Aug 1 16:24:59 2018 +0200

----------------------------------------------------------------------
 .../test/org/apache/lucene/document/TestLatLonShapeQueries.java    | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/c6e0c287/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonShapeQueries.java
----------------------------------------------------------------------
diff --git a/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonShapeQueries.java b/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonShapeQueries.java
index 03941b9..2bb207e 100644
--- a/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonShapeQueries.java
+++ b/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonShapeQueries.java
@@ -104,7 +104,7 @@ public class TestLatLonShapeQueries extends LuceneTestCase {
 
   @Nightly
   public void testRandomBig() throws Exception {
-    doTestRandom(200000);
+    doTestRandom(50000);
   }
 
   private void doTestRandom(int count) throws Exception {


[47/48] lucene-solr:jira/http2: LUCENE-8060: Fix test bug.

Posted by da...@apache.org.
LUCENE-8060: Fix test bug.

Should use IndexSearcher#count to get accurate counts.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/3b15be37
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/3b15be37
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/3b15be37

Branch: refs/heads/jira/http2
Commit: 3b15be378101ddd1e6f4529f6b02694128fb9ae4
Parents: fb7fce8
Author: Adrien Grand <jp...@gmail.com>
Authored: Sun Aug 5 22:41:02 2018 +0200
Committer: Adrien Grand <jp...@gmail.com>
Committed: Sun Aug 5 22:41:02 2018 +0200

----------------------------------------------------------------------
 .../apache/lucene/index/TestIndexWriterReader.java   | 15 +++++++--------
 1 file changed, 7 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/3b15be37/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterReader.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterReader.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterReader.java
index de05b20..f5bf9c0 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterReader.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterReader.java
@@ -32,7 +32,6 @@ import org.apache.lucene.search.DocIdSetIterator;
 import org.apache.lucene.search.IndexSearcher;
 import org.apache.lucene.search.Query;
 import org.apache.lucene.search.TermQuery;
-import org.apache.lucene.search.TopDocs;
 import org.apache.lucene.store.AlreadyClosedException;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.MockDirectoryWrapper.FakeIOException;
@@ -700,7 +699,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
     assertEquals(100, r.numDocs());
     Query q = new TermQuery(new Term("indexname", "test"));
     IndexSearcher searcher = newSearcher(r);
-    assertEquals(100, searcher.search(q, 10).totalHits.value);
+    assertEquals(100, searcher.count(q));
 
     expectThrows(AlreadyClosedException.class, () -> {
       DirectoryReader.openIfChanged(r);
@@ -768,7 +767,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
         r = r2;
         Query q = new TermQuery(new Term("indexname", "test"));
         IndexSearcher searcher = newSearcher(r);
-        final long count = searcher.search(q, 10).totalHits.value;
+        final long count = searcher.count(q);
         assertTrue(count >= lastCount);
         lastCount = count;
       }
@@ -785,7 +784,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
     }
     Query q = new TermQuery(new Term("indexname", "test"));
     IndexSearcher searcher = newSearcher(r);
-    final long count = searcher.search(q, 10).totalHits.value;
+    final long count = searcher.count(q);
     assertTrue(count >= lastCount);
 
     assertEquals(0, excs.size());
@@ -865,7 +864,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
         r = r2;
         Query q = new TermQuery(new Term("indexname", "test"));
         IndexSearcher searcher = newSearcher(r);
-        sum += searcher.search(q, 10).totalHits.value;
+        sum += searcher.count(q);
       }
     }
 
@@ -880,7 +879,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
     }
     Query q = new TermQuery(new Term("indexname", "test"));
     IndexSearcher searcher = newSearcher(r);
-    sum += searcher.search(q, 10).totalHits.value;
+    sum += searcher.count(q);
     assertTrue("no documents found at all", sum > 0);
 
     assertEquals(0, excs.size());
@@ -965,8 +964,8 @@ public class TestIndexWriterReader extends LuceneTestCase {
            .setReaderPooling(true)
            .setMergedSegmentWarmer((r) -> {
               IndexSearcher s = newSearcher(r);
-              TopDocs hits = s.search(new TermQuery(new Term("foo", "bar")), 10);
-              assertEquals(20, hits.totalHits.value);
+              int count = s.count(new TermQuery(new Term("foo", "bar")));
+              assertEquals(20, count);
               didWarm.set(true);
            })
            .setMergePolicy(newLogMergePolicy(10))


[45/48] lucene-solr:jira/http2: SOLR-12592: added javadoc

Posted by da...@apache.org.
SOLR-12592: added javadoc


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/ffedb99d
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/ffedb99d
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/ffedb99d

Branch: refs/heads/jira/http2
Commit: ffedb99dc8127e14ad32c6c1345bb7d4516a570e
Parents: 592899a
Author: Noble Paul <no...@apache.org>
Authored: Sun Aug 5 11:56:22 2018 +1000
Committer: Noble Paul <no...@apache.org>
Committed: Sun Aug 5 11:56:22 2018 +1000

----------------------------------------------------------------------
 .../apache/solr/client/solrj/cloud/autoscaling/SealedClause.java   | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ffedb99d/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/SealedClause.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/SealedClause.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/SealedClause.java
index 5a40b42..495bcb4 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/SealedClause.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/SealedClause.java
@@ -20,7 +20,7 @@ package org.apache.solr.client.solrj.cloud.autoscaling;
 import java.util.function.Function;
 
 /**
- * This clause is an instance with no conditions with computed value
+ * This clause is an instance with no conditions with computed value. every value is computed just in time
  */
 public class SealedClause extends Clause {
   SealedClause(Clause clause, Function<Condition, Object> computedValueEvaluator) {


[33/48] lucene-solr:jira/http2: SOLR-12509: Fix a bug when using round-robin doc assignment.

Posted by da...@apache.org.
SOLR-12509: Fix a bug when using round-robin doc assignment.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/b5ed6350
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/b5ed6350
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/b5ed6350

Branch: refs/heads/jira/http2
Commit: b5ed6350a0ea444553242ef2b141161c0fc3830b
Parents: d1173b8
Author: Andrzej Bialecki <ab...@apache.org>
Authored: Thu Aug 2 21:10:01 2018 +0200
Committer: Andrzej Bialecki <ab...@apache.org>
Committed: Thu Aug 2 21:17:15 2018 +0200

----------------------------------------------------------------------
 .../apache/solr/update/SolrIndexSplitter.java   | 27 ++++++++++++--------
 .../solr/update/SolrIndexSplitterTest.java      | 10 +++++---
 2 files changed, 23 insertions(+), 14 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/b5ed6350/solr/core/src/java/org/apache/solr/update/SolrIndexSplitter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/SolrIndexSplitter.java b/solr/core/src/java/org/apache/solr/update/SolrIndexSplitter.java
index 75234fa..334a29d 100644
--- a/solr/core/src/java/org/apache/solr/update/SolrIndexSplitter.java
+++ b/solr/core/src/java/org/apache/solr/update/SolrIndexSplitter.java
@@ -26,6 +26,7 @@ import java.util.Locale;
 import java.util.Map;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.Future;
+import java.util.concurrent.atomic.AtomicInteger;
 
 import org.apache.lucene.index.CodecReader;
 import org.apache.lucene.index.FilterCodecReader;
@@ -212,11 +213,14 @@ public class SolrIndexSplitter {
     log.info("SolrIndexSplitter: partitions=" + numPieces + " segments=" + leaves.size());
     RTimerTree t;
 
+    // this tracks round-robin assignment of docs to partitions
+    AtomicInteger currentPartition = new AtomicInteger();
+
     if (splitMethod != SplitMethod.LINK) {
       t = timings.sub("findDocSetsPerLeaf");
       for (LeafReaderContext readerContext : leaves) {
         assert readerContext.ordInParent == segmentDocSets.size();  // make sure we're going in order
-        FixedBitSet[] docSets = split(readerContext, numPieces, field, rangesArr, splitKey, hashRouter, false);
+        FixedBitSet[] docSets = split(readerContext, numPieces, field, rangesArr, splitKey, hashRouter, currentPartition, false);
         segmentDocSets.add(docSets);
       }
       t.stop();
@@ -295,7 +299,7 @@ public class SolrIndexSplitter {
           t.resume();
           // apply deletions specific to this partition. As a side-effect on the first call this also populates
           // a cache of docsets to delete per leaf reader per partition, which is reused for subsequent partitions.
-          iw.deleteDocuments(new SplittingQuery(partitionNumber, field, rangesArr, hashRouter, splitKey, docsToDeleteCache));
+          iw.deleteDocuments(new SplittingQuery(partitionNumber, field, rangesArr, hashRouter, splitKey, docsToDeleteCache, currentPartition));
           t.pause();
         } else {
           // This removes deletions but optimize might still be needed because sub-shards will have the same number of segments as the parent shard.
@@ -433,15 +437,17 @@ public class SolrIndexSplitter {
     private final HashBasedRouter hashRouter;
     private final String splitKey;
     private final Map<IndexReader.CacheKey, FixedBitSet[]> docsToDelete;
+    private final AtomicInteger currentPartition;
 
     SplittingQuery(int partition, SchemaField field, DocRouter.Range[] rangesArr, HashBasedRouter hashRouter, String splitKey,
-                   Map<IndexReader.CacheKey, FixedBitSet[]> docsToDelete) {
+                   Map<IndexReader.CacheKey, FixedBitSet[]> docsToDelete, AtomicInteger currentPartition) {
       this.partition = partition;
       this.field = field;
       this.rangesArr = rangesArr;
       this.hashRouter = hashRouter;
       this.splitKey = splitKey;
       this.docsToDelete = docsToDelete;
+      this.currentPartition = currentPartition;
     }
 
     @Override
@@ -493,7 +499,7 @@ public class SolrIndexSplitter {
           return perPartition[partition];
         }
 
-        perPartition = split(readerContext, numPieces, field, rangesArr, splitKey, hashRouter, true);
+        perPartition = split(readerContext, numPieces, field, rangesArr, splitKey, hashRouter, currentPartition, true);
         docsToDelete.put(readerContext.reader().getCoreCacheHelper().getKey(), perPartition);
         return perPartition[partition];
       }
@@ -526,7 +532,7 @@ public class SolrIndexSplitter {
   }
 
   static FixedBitSet[] split(LeafReaderContext readerContext, int numPieces, SchemaField field, DocRouter.Range[] rangesArr,
-                             String splitKey, HashBasedRouter hashRouter, boolean delete) throws IOException {
+                             String splitKey, HashBasedRouter hashRouter, AtomicInteger currentPartition, boolean delete) throws IOException {
     LeafReader reader = readerContext.reader();
     FixedBitSet[] docSets = new FixedBitSet[numPieces];
     for (int i=0; i<docSets.length; i++) {
@@ -556,7 +562,6 @@ public class SolrIndexSplitter {
       docsMatchingRanges = new int[rangesArr.length+1];
     }
 
-    int partition = 0;
     CharsRefBuilder idRef = new CharsRefBuilder();
     for (;;) {
       term = termsEnum.next();
@@ -580,7 +585,7 @@ public class SolrIndexSplitter {
       }
 
       int hash = 0;
-      if (hashRouter != null) {
+      if (hashRouter != null && rangesArr != null) {
         hash = hashRouter.sliceHash(idString, null, null, null);
       }
 
@@ -591,14 +596,14 @@ public class SolrIndexSplitter {
         if (doc == DocIdSetIterator.NO_MORE_DOCS) break;
         if (rangesArr == null) {
           if (delete) {
-            docSets[partition].clear(doc);
+            docSets[currentPartition.get()].clear(doc);
           } else {
-            docSets[partition].set(doc);
+            docSets[currentPartition.get()].set(doc);
           }
-          partition = (partition + 1) % numPieces;
+          currentPartition.set((currentPartition.get() + 1) % numPieces);
         } else  {
           int matchingRangesCount = 0;
-          for (int i=0; i<rangesArr.length; i++) {      // inner-loop: use array here for extra speed.
+          for (int i=0; i < rangesArr.length; i++) {      // inner-loop: use array here for extra speed.
             if (rangesArr[i].includes(hash)) {
               if (delete) {
                 docSets[i].clear(doc);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/b5ed6350/solr/core/src/test/org/apache/solr/update/SolrIndexSplitterTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/update/SolrIndexSplitterTest.java b/solr/core/src/test/org/apache/solr/update/SolrIndexSplitterTest.java
index 79eccd9..0bcc851 100644
--- a/solr/core/src/test/org/apache/solr/update/SolrIndexSplitterTest.java
+++ b/solr/core/src/test/org/apache/solr/update/SolrIndexSplitterTest.java
@@ -269,22 +269,26 @@ public class SolrIndexSplitterTest extends SolrTestCaseJ4 {
       directory = h.getCore().getDirectoryFactory().get(indexDir1.getAbsolutePath(),
           DirectoryFactory.DirContext.DEFAULT, h.getCore().getSolrConfig().indexConfig.lockType);
       DirectoryReader reader = DirectoryReader.open(directory);
-      assertEquals("split index1 has wrong number of documents", max / 3, reader.numDocs());
+      int numDocs1 = reader.numDocs();
       reader.close();
       h.getCore().getDirectoryFactory().release(directory);
       directory = h.getCore().getDirectoryFactory().get(indexDir2.getAbsolutePath(),
           DirectoryFactory.DirContext.DEFAULT, h.getCore().getSolrConfig().indexConfig.lockType);
       reader = DirectoryReader.open(directory);
-      assertEquals("split index2 has wrong number of documents", max / 3, reader.numDocs());
+      int numDocs2 = reader.numDocs();
       reader.close();
       h.getCore().getDirectoryFactory().release(directory);
       directory = h.getCore().getDirectoryFactory().get(indexDir3.getAbsolutePath(),
           DirectoryFactory.DirContext.DEFAULT, h.getCore().getSolrConfig().indexConfig.lockType);
       reader = DirectoryReader.open(directory);
-      assertEquals("split index3 has wrong number of documents", max / 3, reader.numDocs());
+      int numDocs3 = reader.numDocs();
       reader.close();
       h.getCore().getDirectoryFactory().release(directory);
       directory = null;
+      assertEquals("split indexes lost some documents!", max, numDocs1 + numDocs2 + numDocs3);
+      assertEquals("split index1 has wrong number of documents", max / 3, numDocs1);
+      assertEquals("split index2 has wrong number of documents", max / 3, numDocs2);
+      assertEquals("split index3 has wrong number of documents", max / 3, numDocs3);
     } finally {
       if (request != null) request.close(); // decrefs the searcher
       if (directory != null)  {


[12/48] lucene-solr:jira/http2: LUCENE-8391: Make TestUpgradeIndexMergePolicy faster when merging a TieredMergePolicy.

Posted by da...@apache.org.
LUCENE-8391: Make TestUpgradeIndexMergePolicy faster when merging a TieredMergePolicy.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/96e985a3
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/96e985a3
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/96e985a3

Branch: refs/heads/jira/http2
Commit: 96e985a3483f10537ea835a339f89dd10839dae3
Parents: 1a87087
Author: Adrien Grand <jp...@gmail.com>
Authored: Tue Jul 31 21:31:56 2018 +0200
Committer: Adrien Grand <jp...@gmail.com>
Committed: Tue Jul 31 21:33:11 2018 +0200

----------------------------------------------------------------------
 .../org/apache/lucene/index/TestUpgradeIndexMergePolicy.java | 8 +++++++-
 1 file changed, 7 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/96e985a3/lucene/core/src/test/org/apache/lucene/index/TestUpgradeIndexMergePolicy.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestUpgradeIndexMergePolicy.java b/lucene/core/src/test/org/apache/lucene/index/TestUpgradeIndexMergePolicy.java
index a2c6600..6df5cc8 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestUpgradeIndexMergePolicy.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestUpgradeIndexMergePolicy.java
@@ -19,11 +19,17 @@ package org.apache.lucene.index;
 import java.io.IOException;
 
 import org.apache.lucene.index.MergePolicy.MergeSpecification;
+import org.apache.lucene.util.TestUtil;
 
 public class TestUpgradeIndexMergePolicy extends BaseMergePolicyTestCase {
 
   public MergePolicy mergePolicy() {
-    return new UpgradeIndexMergePolicy(newMergePolicy(random()));
+    MergePolicy in = newMergePolicy(random());
+    if (in instanceof TieredMergePolicy) {
+      // Avoid low values of the max merged segment size which prevent this merge policy from scaling well
+      ((TieredMergePolicy) in).setMaxMergedSegmentMB(TestUtil.nextInt(random(), 1024, 10 * 1024));
+    }
+    return new UpgradeIndexMergePolicy(in);
   }
 
   @Override


[13/48] lucene-solr:jira/http2: SOLR-11807: Simply testing of createNodeSet with restoring collection and fixing the test failure

Posted by da...@apache.org.
SOLR-11807: Simply testing of createNodeSet with restoring collection and fixing the test failure


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/c9e3c456
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/c9e3c456
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/c9e3c456

Branch: refs/heads/jira/http2
Commit: c9e3c456e304522c0c37950d520c19c1565625f7
Parents: 96e985a
Author: Varun Thacker <va...@apache.org>
Authored: Tue Jul 31 14:54:50 2018 -0700
Committer: Varun Thacker <va...@apache.org>
Committed: Tue Jul 31 15:19:23 2018 -0700

----------------------------------------------------------------------
 .../AbstractCloudBackupRestoreTestCase.java       | 18 ++++--------------
 1 file changed, 4 insertions(+), 14 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/c9e3c456/solr/core/src/test/org/apache/solr/cloud/api/collections/AbstractCloudBackupRestoreTestCase.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/api/collections/AbstractCloudBackupRestoreTestCase.java b/solr/core/src/test/org/apache/solr/cloud/api/collections/AbstractCloudBackupRestoreTestCase.java
index a78be03..17089b8 100644
--- a/solr/core/src/test/org/apache/solr/cloud/api/collections/AbstractCloudBackupRestoreTestCase.java
+++ b/solr/core/src/test/org/apache/solr/cloud/api/collections/AbstractCloudBackupRestoreTestCase.java
@@ -20,7 +20,6 @@ import java.io.IOException;
 import java.lang.invoke.MethodHandles;
 import java.util.ArrayList;
 import java.util.HashMap;
-import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 import java.util.Properties;
@@ -30,7 +29,6 @@ import java.util.TreeMap;
 import org.apache.lucene.util.TestUtil;
 import org.apache.solr.client.solrj.SolrQuery;
 import org.apache.solr.client.solrj.SolrServerException;
-import org.apache.solr.client.solrj.embedded.JettySolrRunner;
 import org.apache.solr.client.solrj.impl.CloudSolrClient;
 import org.apache.solr.client.solrj.impl.HttpSolrClient;
 import org.apache.solr.client.solrj.request.CollectionAdminRequest;
@@ -294,20 +292,12 @@ public abstract class AbstractCloudBackupRestoreTestCase extends SolrCloudTestCa
     }
 
     if (rarely()) { // Try with createNodeSet configuration
-      List<String> nodeStrs = new ArrayList<>(1);//Always 1 as cluster.getJettySolrRunners().size()=NUM_SHARDS=2
-      Iterator<JettySolrRunner> iter = cluster.getJettySolrRunners().iterator();
-      nodeStrs.add(iter.next().getNodeName());
-      restore.setCreateNodeSet(String.join(",", nodeStrs));
-      restore.setCreateNodeSetShuffle(usually());
+      //Always 1 as cluster.getJettySolrRunners().size()=NUM_SHARDS=2
+      restore.setCreateNodeSet(cluster.getJettySolrRunners().get(0).getNodeName());
       // we need to double maxShardsPerNode value since we reduced number of available nodes by half.
       isMaxShardsPerNodeExternal = true;
-      if (restore.getMaxShardsPerNode() != null) {
-        computeRestoreMaxShardsPerNode = restore.getMaxShardsPerNode() * 2;
-        restore.setMaxShardsPerNode(computeRestoreMaxShardsPerNode);
-      } else {
-        computeRestoreMaxShardsPerNode = origShardToDocCount.size() * backupReplFactor;
-        restore.setMaxShardsPerNode(origShardToDocCount.size() * backupReplFactor);
-      }
+      computeRestoreMaxShardsPerNode = origShardToDocCount.size() * restoreReplFactor;
+      restore.setMaxShardsPerNode(computeRestoreMaxShardsPerNode);
     }
 
     final int restoreMaxShardsPerNode = computeRestoreMaxShardsPerNode;


[34/48] lucene-solr:jira/http2: LUCENE-8440: Add support for indexing and searching Line and Point shapes using LatLonShape encoding

Posted by da...@apache.org.
LUCENE-8440: Add support for indexing and searching Line and Point shapes using LatLonShape encoding


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/a0e33a9b
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/a0e33a9b
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/a0e33a9b

Branch: refs/heads/jira/http2
Commit: a0e33a9bc84179c9f17b30706a567bdf137194d1
Parents: b5ed635
Author: Nicholas Knize <nk...@gmail.com>
Authored: Tue Jul 31 17:45:12 2018 -0500
Committer: Nicholas Knize <nk...@gmail.com>
Committed: Thu Aug 2 17:17:37 2018 -0500

----------------------------------------------------------------------
 lucene/CHANGES.txt                              |   2 +
 .../src/java/org/apache/lucene/geo/Polygon.java |   2 +-
 .../org/apache/lucene/document/LatLonShape.java |  67 ++-
 .../src/java/org/apache/lucene/geo/Line.java    | 139 ++++++
 .../document/BaseLatLonShapeTestCase.java       | 458 +++++++++++++++++++
 .../document/TestLatLonLineShapeQueries.java    |  94 ++++
 .../document/TestLatLonPointShapeQueries.java   |  66 +++
 .../document/TestLatLonPolygonShapeQueries.java | 385 ++--------------
 .../apache/lucene/document/TestLatLonShape.java |  31 +-
 9 files changed, 885 insertions(+), 359 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/a0e33a9b/lucene/CHANGES.txt
----------------------------------------------------------------------
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index 9b9bcc8..7e261bc 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -213,6 +213,8 @@ Changes in Runtime Behavior:
 
 Improvements
 
+* LUCENE-8440: Add support for indexing and searching Line and Point shapes using LatLonShape encoding (Nick Knize)
+
 * LUCENE-8435: Add new LatLonShapePolygonQuery for querying indexed LatLonShape fields by arbitrary polygons (Nick Knize)
 
 * LUCENE-8367: Make per-dimension drill down optional for each facet dimension (Mike McCandless)

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/a0e33a9b/lucene/core/src/java/org/apache/lucene/geo/Polygon.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/geo/Polygon.java b/lucene/core/src/java/org/apache/lucene/geo/Polygon.java
index 5e14286..a6d7e9d 100644
--- a/lucene/core/src/java/org/apache/lucene/geo/Polygon.java
+++ b/lucene/core/src/java/org/apache/lucene/geo/Polygon.java
@@ -202,7 +202,7 @@ public final class Polygon {
     return sb.toString();
   }
 
-  private String verticesToGeoJSON(final double[] lats, final double[] lons) {
+  public static String verticesToGeoJSON(final double[] lats, final double[] lons) {
     StringBuilder sb = new StringBuilder();
     sb.append('[');
     for (int i = 0; i < lats.length; i++) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/a0e33a9b/lucene/sandbox/src/java/org/apache/lucene/document/LatLonShape.java
----------------------------------------------------------------------
diff --git a/lucene/sandbox/src/java/org/apache/lucene/document/LatLonShape.java b/lucene/sandbox/src/java/org/apache/lucene/document/LatLonShape.java
index 28c95e4..01a31ad 100644
--- a/lucene/sandbox/src/java/org/apache/lucene/document/LatLonShape.java
+++ b/lucene/sandbox/src/java/org/apache/lucene/document/LatLonShape.java
@@ -19,6 +19,7 @@ package org.apache.lucene.document;
 import java.util.ArrayList;
 import java.util.List;
 
+import org.apache.lucene.geo.Line;
 import org.apache.lucene.geo.Polygon;
 import org.apache.lucene.geo.Tessellator;
 import org.apache.lucene.geo.Tessellator.Triangle;
@@ -27,6 +28,9 @@ import org.apache.lucene.search.Query;
 import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.NumericUtils;
 
+import static org.apache.lucene.geo.GeoEncodingUtils.encodeLatitude;
+import static org.apache.lucene.geo.GeoEncodingUtils.encodeLongitude;
+
 /**
  * An indexed shape utility class.
  * <p>
@@ -62,16 +66,67 @@ public class LatLonShape {
   private LatLonShape() {
   }
 
-  /** the lionshare of the indexing is done by the tessellator */
+  /** create indexable fields for polygon geometry */
   public static Field[] createIndexableFields(String fieldName, Polygon polygon) {
+    // the lionshare of the indexing is done by the tessellator
     List<Triangle> tessellation = Tessellator.tessellate(polygon);
     List<LatLonTriangle> fields = new ArrayList<>();
-    for (int i = 0; i < tessellation.size(); ++i) {
-      fields.add(new LatLonTriangle(fieldName, tessellation.get(i)));
+    for (Triangle t : tessellation) {
+      fields.add(new LatLonTriangle(fieldName, t.getEncodedX(0), t.getEncodedY(0),
+          t.getEncodedX(1), t.getEncodedY(1), t.getEncodedX(2), t.getEncodedY(2)));
     }
     return fields.toArray(new Field[fields.size()]);
   }
 
+  /** create indexable fields for line geometry */
+  public static Field[] createIndexableFields(String fieldName, Line line) {
+    int numPoints = line.numPoints();
+    List<LatLonTriangle> fields = new ArrayList<>(numPoints - 1);
+
+    // encode the line vertices
+    int[] encodedLats = new int[numPoints];
+    int[] encodedLons = new int[numPoints];
+    for (int i = 0; i < numPoints; ++i) {
+      encodedLats[i] = encodeLatitude(line.getLat(i));
+      encodedLons[i] = encodeLongitude(line.getLon(i));
+    }
+
+    // create "flat" triangles
+    int aLat, bLat, aLon, bLon, temp;
+    for (int i = 0, j = 1; j < numPoints; ++i, ++j) {
+      aLat = encodedLats[i];
+      aLon = encodedLons[i];
+      bLat = encodedLats[j];
+      bLon = encodedLons[j];
+      if (aLat > bLat) {
+        temp = aLat;
+        aLat = bLat;
+        bLat = temp;
+        temp = aLon;
+        aLon = bLon;
+        bLon = temp;
+      } else if (aLat == bLat) {
+        if (aLon > bLon) {
+          temp = aLat;
+          aLat = bLat;
+          bLat = temp;
+          temp = aLon;
+          aLon = bLon;
+          bLon = temp;
+        }
+      }
+      fields.add(new LatLonTriangle(fieldName, aLon, aLat, bLon, bLat, aLon, aLat));
+    }
+    return fields.toArray(new Field[fields.size()]);
+  }
+
+  /** create indexable fields for point geometry */
+  public static Field[] createIndexableFields(String fieldName, double lat, double lon) {
+    final int encodedLat = encodeLatitude(lat);
+    final int encodedLon = encodeLongitude(lon);
+    return new Field[] {new LatLonTriangle(fieldName, encodedLon, encodedLat, encodedLon, encodedLat, encodedLon, encodedLat)};
+  }
+
   /** create a query to find all polygons that intersect a defined bounding box
    *  note: does not currently support dateline crossing boxes
    * todo split dateline crossing boxes into two queries like {@link LatLonPoint#newBoxQuery}
@@ -89,11 +144,9 @@ public class LatLonShape {
    */
   private static class LatLonTriangle extends Field {
 
-    public LatLonTriangle(String name, Triangle t) {
+    LatLonTriangle(String name, int ax, int ay, int bx, int by, int cx, int cy) {
       super(name, TYPE);
-      setTriangleValue(t.getEncodedX(0), t.getEncodedY(0),
-                       t.getEncodedX(1), t.getEncodedY(1),
-                       t.getEncodedX(2), t.getEncodedY(2));
+      setTriangleValue(ax, ay, bx, by, cx, cy);
     }
 
     public void setTriangleValue(int aX, int aY, int bX, int bY, int cX, int cY) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/a0e33a9b/lucene/sandbox/src/java/org/apache/lucene/geo/Line.java
----------------------------------------------------------------------
diff --git a/lucene/sandbox/src/java/org/apache/lucene/geo/Line.java b/lucene/sandbox/src/java/org/apache/lucene/geo/Line.java
new file mode 100644
index 0000000..c7e626d
--- /dev/null
+++ b/lucene/sandbox/src/java/org/apache/lucene/geo/Line.java
@@ -0,0 +1,139 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.geo;
+
+import java.util.Arrays;
+
+/**
+ * Represents a line on the earth's surface.  You can construct the Line directly with {@code double[]}
+ * coordinates.
+ * <p>
+ * NOTES:
+ * <ol>
+ *   <li>All latitude/longitude values must be in decimal degrees.
+ *   <li>For more advanced GeoSpatial indexing and query operations see the {@code spatial-extras} module
+ * </ol>
+ * @lucene.experimental
+ */
+public class Line {
+  /** array of latitude coordinates */
+  private final double[] lats;
+  /** array of longitude coordinates */
+  private final double[] lons;
+
+  /** minimum latitude of this line's bounding box */
+  public final double minLat;
+  /** maximum latitude of this line's bounding box */
+  public final double maxLat;
+  /** minimum longitude of this line's bounding box */
+  public final double minLon;
+  /** maximum longitude of this line's bounding box */
+  public final double maxLon;
+
+  /**
+   * Creates a new Line from the supplied latitude/longitude array.
+   */
+  public Line(double[] lats, double[] lons) {
+    if (lats == null) {
+      throw new IllegalArgumentException("lats must not be null");
+    }
+    if (lons == null) {
+      throw new IllegalArgumentException("lons must not be null");
+    }
+    if (lats.length != lons.length) {
+      throw new IllegalArgumentException("lats and lons must be equal length");
+    }
+    if (lats.length < 2) {
+      throw new IllegalArgumentException("at least 2 line points required");
+    }
+
+    // compute bounding box
+    double minLat = lats[0];
+    double minLon = lons[0];
+    double maxLat = lats[0];
+    double maxLon = lons[0];
+    for (int i = 0; i < lats.length; ++i) {
+      GeoUtils.checkLatitude(lats[i]);
+      GeoUtils.checkLongitude(lons[i]);
+      minLat = Math.min(lats[i], minLat);
+      minLon = Math.min(lons[i], minLon);
+      maxLat = Math.max(lats[i], maxLat);
+      maxLon = Math.max(lons[i], maxLon);
+    }
+
+    this.lats = lats.clone();
+    this.lons = lons.clone();
+    this.minLat = minLat;
+    this.maxLat = maxLat;
+    this.minLon = minLon;
+    this.maxLon = maxLon;
+  }
+
+  /** returns the number of vertex points */
+  public int numPoints() {
+    return lats.length;
+  }
+
+  /** Returns latitude value at given index */
+  public double getLat(int vertex) {
+    return lats[vertex];
+  }
+
+  /** Returns longitude value at given index */
+  public double getLon(int vertex) {
+    return lons[vertex];
+  }
+
+  @Override
+  public boolean equals(Object o) {
+    if (this == o) return true;
+    if (!(o instanceof Line)) return false;
+    Line line = (Line) o;
+    return Arrays.equals(lats, line.lats) && Arrays.equals(lons, line.lons);
+  }
+
+  @Override
+  public int hashCode() {
+    int result = Arrays.hashCode(lats);
+    result = 31 * result + Arrays.hashCode(lons);
+    return result;
+  }
+
+  @Override
+  public String toString() {
+    StringBuilder sb = new StringBuilder();
+    sb.append("LINE(");
+    for (int i = 0; i < lats.length; i++) {
+      sb.append("[")
+          .append(lats[i])
+          .append(", ")
+          .append(lons[i])
+          .append("]");
+    }
+    sb.append(')');
+    return sb.toString();
+  }
+
+  /** prints polygons as geojson */
+  public String toGeoJSON() {
+    StringBuilder sb = new StringBuilder();
+    sb.append("[");
+    sb.append(Polygon.verticesToGeoJSON(lats, lons));
+    sb.append("]");
+    return sb.toString();
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/a0e33a9b/lucene/sandbox/src/test/org/apache/lucene/document/BaseLatLonShapeTestCase.java
----------------------------------------------------------------------
diff --git a/lucene/sandbox/src/test/org/apache/lucene/document/BaseLatLonShapeTestCase.java b/lucene/sandbox/src/test/org/apache/lucene/document/BaseLatLonShapeTestCase.java
new file mode 100644
index 0000000..3321f9a
--- /dev/null
+++ b/lucene/sandbox/src/test/org/apache/lucene/document/BaseLatLonShapeTestCase.java
@@ -0,0 +1,458 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.document;
+
+import java.io.IOException;
+import java.util.HashSet;
+import java.util.Set;
+
+import com.carrotsearch.randomizedtesting.generators.RandomPicks;
+import org.apache.lucene.geo.GeoTestUtil;
+import org.apache.lucene.geo.Line;
+import org.apache.lucene.geo.Polygon;
+import org.apache.lucene.geo.Polygon2D;
+import org.apache.lucene.geo.Rectangle;
+import org.apache.lucene.index.DirectoryReader;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.index.MultiDocValues;
+import org.apache.lucene.index.MultiFields;
+import org.apache.lucene.index.NumericDocValues;
+import org.apache.lucene.index.SerialMergeScheduler;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.ScoreMode;
+import org.apache.lucene.search.SimpleCollector;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.util.Bits;
+import org.apache.lucene.util.FixedBitSet;
+import org.apache.lucene.util.IOUtils;
+import org.apache.lucene.util.LuceneTestCase;
+
+import static com.carrotsearch.randomizedtesting.RandomizedTest.randomBoolean;
+import static com.carrotsearch.randomizedtesting.RandomizedTest.randomInt;
+import static org.apache.lucene.geo.GeoEncodingUtils.decodeLatitude;
+import static org.apache.lucene.geo.GeoEncodingUtils.decodeLongitude;
+import static org.apache.lucene.geo.GeoEncodingUtils.encodeLatitude;
+import static org.apache.lucene.geo.GeoEncodingUtils.encodeLatitudeCeil;
+import static org.apache.lucene.geo.GeoEncodingUtils.encodeLongitude;
+import static org.apache.lucene.geo.GeoEncodingUtils.encodeLongitudeCeil;
+import static org.apache.lucene.geo.GeoTestUtil.nextLatitude;
+import static org.apache.lucene.geo.GeoTestUtil.nextLongitude;
+
+public abstract class BaseLatLonShapeTestCase extends LuceneTestCase {
+
+  protected static final String FIELD_NAME = "shape";
+
+  protected abstract ShapeType getShapeType();
+
+  protected Object nextShape() {
+    return getShapeType().nextShape();
+  }
+
+  protected double quantizeLat(double rawLat) {
+    return decodeLatitude(encodeLatitude(rawLat));
+  }
+
+  protected double quantizeLatCeil(double rawLat) {
+    return decodeLatitude(encodeLatitudeCeil(rawLat));
+  }
+
+  protected double quantizeLon(double rawLon) {
+    return decodeLongitude(encodeLongitude(rawLon));
+  }
+
+  protected double quantizeLonCeil(double rawLon) {
+    return decodeLongitude(encodeLongitudeCeil(rawLon));
+  }
+
+  protected Polygon quantizePolygon(Polygon polygon) {
+    double[] lats = new double[polygon.numPoints()];
+    double[] lons = new double[polygon.numPoints()];
+    for (int i = 0; i < lats.length; ++i) {
+      lats[i] = quantizeLat(polygon.getPolyLat(i));
+      lons[i] = quantizeLon(polygon.getPolyLon(i));
+    }
+    return new Polygon(lats, lons);
+  }
+
+  protected abstract Field[] createIndexableFields(String field, Object shape);
+
+  private void addShapeToDoc(String field, Document doc, Object shape) {
+    Field[] fields = createIndexableFields(field, shape);
+    for (Field f : fields) {
+      doc.add(f);
+    }
+  }
+
+  protected Query newRectQuery(String field, double minLat, double maxLat, double minLon, double maxLon) {
+    return LatLonShape.newBoxQuery(field, minLat, maxLat, minLon, maxLon);
+  }
+
+  protected Query newPolygonQuery(String field, Polygon... polygons) {
+    return LatLonShape.newPolygonQuery(field, polygons);
+  }
+
+  public void testRandomTiny() throws Exception {
+    // Make sure single-leaf-node case is OK:
+    doTestRandom(10);
+  }
+
+  public void testRandomMedium() throws Exception {
+    doTestRandom(10000);
+  }
+
+  @Nightly
+  public void testRandomBig() throws Exception {
+    doTestRandom(50000);
+  }
+
+  private void doTestRandom(int count) throws Exception {
+    int numShapes = atLeast(count);
+    ShapeType type = getShapeType();
+
+    if (VERBOSE) {
+      System.out.println("TEST: number of " + type.name() + " shapes=" + numShapes);
+    }
+
+    Object[] shapes = new Object[numShapes];
+    for (int id = 0; id < numShapes; ++id) {
+      int x = randomInt(20);
+      if (x == 17) {
+        shapes[id] = null;
+        if (VERBOSE) {
+          System.out.println("  id=" + id + " is missing");
+        }
+      } else {
+        // create a new shape
+        shapes[id] = nextShape();
+      }
+    }
+    verify(shapes);
+  }
+
+  private void verify(Object... shapes) throws Exception {
+    IndexWriterConfig iwc = newIndexWriterConfig();
+    iwc.setMergeScheduler(new SerialMergeScheduler());
+    int mbd = iwc.getMaxBufferedDocs();
+    if (mbd != -1 && mbd < shapes.length / 100) {
+      iwc.setMaxBufferedDocs(shapes.length / 100);
+    }
+    Directory dir;
+    if (shapes.length > 1000) {
+      dir = newFSDirectory(createTempDir(getClass().getSimpleName()));
+    } else {
+      dir = newDirectory();
+    }
+    IndexWriter w = new IndexWriter(dir, iwc);
+
+    // index random polygons
+    indexRandomShapes(w, shapes);
+
+    // query testing
+    final IndexReader reader = DirectoryReader.open(w);
+
+    // test random bbox queries
+    verifyRandomBBoxQueries(reader, shapes);
+    // test random polygon queires
+    verifyRandomPolygonQueries(reader, shapes);
+
+    IOUtils.close(w, reader, dir);
+  }
+
+  protected void indexRandomShapes(IndexWriter w, Object... shapes) throws Exception {
+    Set<Integer> deleted = new HashSet<>();
+    for (int id = 0; id < shapes.length; ++id) {
+      Document doc = new Document();
+      doc.add(newStringField("id", "" + id, Field.Store.NO));
+      doc.add(new NumericDocValuesField("id", id));
+      if (shapes[id] != null) {
+        addShapeToDoc(FIELD_NAME, doc, shapes[id]);
+      }
+      w.addDocument(doc);
+      if (id > 0 && randomInt(100) == 42) {
+        int idToDelete = randomInt(id);
+        w.deleteDocuments(new Term("id", ""+idToDelete));
+        deleted.add(idToDelete);
+        if (VERBOSE) {
+          System.out.println("   delete id=" + idToDelete);
+        }
+      }
+    }
+
+    if (randomBoolean()) {
+      w.forceMerge(1);
+    }
+  }
+
+  protected void verifyRandomBBoxQueries(IndexReader reader, Object... shapes) throws Exception {
+    IndexSearcher s = newSearcher(reader);
+
+    final int iters = atLeast(75);
+
+    Bits liveDocs = MultiFields.getLiveDocs(s.getIndexReader());
+    int maxDoc = s.getIndexReader().maxDoc();
+
+    for (int iter = 0; iter < iters; ++iter) {
+      if (VERBOSE) {
+        System.out.println("\nTEST: iter=" + (iter+1) + " of " + iters + " s=" + s);
+      }
+
+      // BBox
+      Rectangle rect;
+      // quantizing the bbox may end up w/ bounding boxes crossing dateline...
+      // todo add support for bounding boxes crossing dateline
+      while (true) {
+        rect = GeoTestUtil.nextBoxNotCrossingDateline();
+        if (decodeLongitude(encodeLongitudeCeil(rect.minLon)) <= decodeLongitude(encodeLongitude(rect.maxLon)) &&
+            decodeLatitude(encodeLatitudeCeil(rect.minLat)) <= decodeLatitude(encodeLatitude(rect.maxLat))) {
+          break;
+        }
+      }
+      Query query = newRectQuery(FIELD_NAME, rect.minLat, rect.maxLat, rect.minLon, rect.maxLon);
+
+      if (VERBOSE) {
+        System.out.println("  query=" + query);
+      }
+
+      final FixedBitSet hits = new FixedBitSet(maxDoc);
+      s.search(query, new SimpleCollector() {
+
+        private int docBase;
+
+        @Override
+        public ScoreMode scoreMode() {
+          return ScoreMode.COMPLETE_NO_SCORES;
+        }
+
+        @Override
+        protected void doSetNextReader(LeafReaderContext context) throws IOException {
+          docBase = context.docBase;
+        }
+
+        @Override
+        public void collect(int doc) throws IOException {
+          hits.set(docBase+doc);
+        }
+      });
+
+      boolean fail = false;
+      NumericDocValues docIDToID = MultiDocValues.getNumericValues(reader, "id");
+      for (int docID = 0; docID < maxDoc; ++docID) {
+        assertEquals(docID, docIDToID.nextDoc());
+        int id = (int) docIDToID.longValue();
+        boolean expected;
+        if (liveDocs != null && liveDocs.get(docID) == false) {
+          // document is deleted
+          expected = false;
+        } else if (shapes[id] == null) {
+          expected = false;
+        } else {
+          // check quantized poly against quantized query
+          expected = getValidator().testBBoxQuery(quantizeLatCeil(rect.minLat), quantizeLat(rect.maxLat),
+              quantizeLonCeil(rect.minLon), quantizeLon(rect.maxLon), shapes[id]);
+        }
+
+        if (hits.get(docID) != expected) {
+          StringBuilder b = new StringBuilder();
+
+          if (expected) {
+            b.append("FAIL: id=" + id + " should match but did not\n");
+          } else {
+            b.append("FAIL: id=" + id + " should not match but did\n");
+          }
+          b.append("  query=" + query + " docID=" + docID + "\n");
+          b.append("  shape=" + shapes[id] + "\n");
+          b.append("  deleted?=" + (liveDocs != null && liveDocs.get(docID) == false));
+          b.append("  rect=Rectangle(" + quantizeLatCeil(rect.minLat) + " TO " + quantizeLat(rect.maxLat) + " lon=" + quantizeLonCeil(rect.minLon) + " TO " + quantizeLon(rect.maxLon) + ")");
+          if (true) {
+            fail("wrong hit (first of possibly more):\n\n" + b);
+          } else {
+            System.out.println(b.toString());
+            fail = true;
+          }
+        }
+      }
+      if (fail) {
+        fail("some hits were wrong");
+      }
+    }
+  }
+
+  protected void verifyRandomPolygonQueries(IndexReader reader, Object... shapes) throws Exception {
+    IndexSearcher s = newSearcher(reader);
+
+    final int iters = atLeast(75);
+
+    Bits liveDocs = MultiFields.getLiveDocs(s.getIndexReader());
+    int maxDoc = s.getIndexReader().maxDoc();
+
+    for (int iter = 0; iter < iters; ++iter) {
+      if (VERBOSE) {
+        System.out.println("\nTEST: iter=" + (iter + 1) + " of " + iters + " s=" + s);
+      }
+
+      // Polygon
+      Polygon queryPolygon = GeoTestUtil.nextPolygon();
+      Polygon2D queryPoly2D = Polygon2D.create(queryPolygon);
+      Query query = newPolygonQuery(FIELD_NAME, queryPolygon);
+
+      if (VERBOSE) {
+        System.out.println("  query=" + query);
+      }
+
+      final FixedBitSet hits = new FixedBitSet(maxDoc);
+      s.search(query, new SimpleCollector() {
+
+        private int docBase;
+
+        @Override
+        public ScoreMode scoreMode() {
+          return ScoreMode.COMPLETE_NO_SCORES;
+        }
+
+        @Override
+        protected void doSetNextReader(LeafReaderContext context) throws IOException {
+          docBase = context.docBase;
+        }
+
+        @Override
+        public void collect(int doc) throws IOException {
+          hits.set(docBase+doc);
+        }
+      });
+
+      boolean fail = false;
+      NumericDocValues docIDToID = MultiDocValues.getNumericValues(reader, "id");
+      for (int docID = 0; docID < maxDoc; ++docID) {
+        assertEquals(docID, docIDToID.nextDoc());
+        int id = (int) docIDToID.longValue();
+        boolean expected;
+        if (liveDocs != null && liveDocs.get(docID) == false) {
+          // document is deleted
+          expected = false;
+        } else if (shapes[id] == null) {
+          expected = false;
+        } else {
+          expected = getValidator().testPolygonQuery(queryPoly2D, shapes[id]);
+        }
+
+        if (hits.get(docID) != expected) {
+          StringBuilder b = new StringBuilder();
+
+          if (expected) {
+            b.append("FAIL: id=" + id + " should match but did not\n");
+          } else {
+            b.append("FAIL: id=" + id + " should not match but did\n");
+          }
+          b.append("  query=" + query + " docID=" + docID + "\n");
+          b.append("  shape=" + shapes[id] + "\n");
+          b.append("  deleted?=" + (liveDocs != null && liveDocs.get(docID) == false));
+          b.append("  queryPolygon=" + queryPolygon.toGeoJSON());
+          if (true) {
+            fail("wrong hit (first of possibly more):\n\n" + b);
+          } else {
+            System.out.println(b.toString());
+            fail = true;
+          }
+        }
+      }
+      if (fail) {
+        fail("some hits were wrong");
+      }
+    }
+  }
+
+  protected abstract Validator getValidator();
+
+  /** internal point class for testing point shapes */
+  protected static class Point {
+    double lat;
+    double lon;
+
+    public Point(double lat, double lon) {
+      this.lat = lat;
+      this.lon = lon;
+    }
+
+    public String toString() {
+      StringBuilder sb = new StringBuilder();
+      sb.append("POINT(");
+      sb.append(lon);
+      sb.append(',');
+      sb.append(lat);
+      return sb.toString();
+    }
+  }
+
+  /** internal shape type for testing different shape types */
+  protected enum ShapeType {
+    POINT() {
+      public Point nextShape() {
+        return new Point(nextLatitude(), nextLongitude());
+      }
+    },
+    LINE() {
+      public Line nextShape() {
+        Polygon p = GeoTestUtil.nextPolygon();
+        double[] lats = new double[p.numPoints() - 1];
+        double[] lons = new double[lats.length];
+        for (int i = 0; i < lats.length; ++i) {
+          lats[i] = p.getPolyLat(i);
+          lons[i] = p.getPolyLon(i);
+        }
+        return new Line(lats, lons);
+      }
+    },
+    POLYGON() {
+      public Polygon nextShape() {
+        return GeoTestUtil.nextPolygon();
+      }
+    },
+    MIXED() {
+      public Object nextShape() {
+        return RandomPicks.randomFrom(random(), subList).nextShape();
+      }
+    };
+
+    static ShapeType[] subList;
+    static {
+      subList = new ShapeType[] {POINT, LINE, POLYGON};
+    }
+
+    public abstract Object nextShape();
+
+    static ShapeType fromObject(Object shape) {
+      if (shape instanceof Point) {
+        return POINT;
+      } else if (shape instanceof Line) {
+        return LINE;
+      } else if (shape instanceof Polygon) {
+        return POLYGON;
+      }
+      throw new IllegalArgumentException("invalid shape type from " + shape.toString());
+    }
+  }
+
+  protected interface Validator {
+    boolean testBBoxQuery(double minLat, double maxLat, double minLon, double maxLon, Object shape);
+    boolean testPolygonQuery(Polygon2D poly2d, Object shape);
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/a0e33a9b/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonLineShapeQueries.java
----------------------------------------------------------------------
diff --git a/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonLineShapeQueries.java b/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonLineShapeQueries.java
new file mode 100644
index 0000000..21367dc
--- /dev/null
+++ b/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonLineShapeQueries.java
@@ -0,0 +1,94 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.document;
+
+import org.apache.lucene.geo.Line;
+import org.apache.lucene.geo.Polygon;
+import org.apache.lucene.geo.Polygon2D;
+import org.apache.lucene.index.PointValues.Relation;
+
+import static org.apache.lucene.geo.GeoEncodingUtils.decodeLatitude;
+import static org.apache.lucene.geo.GeoEncodingUtils.decodeLongitude;
+import static org.apache.lucene.geo.GeoEncodingUtils.encodeLatitude;
+import static org.apache.lucene.geo.GeoEncodingUtils.encodeLongitude;
+
+/** random bounding box and polygon query tests for random generated {@link Line} types */
+public class TestLatLonLineShapeQueries extends BaseLatLonShapeTestCase {
+
+  protected final LineValidator VALIDATOR = new LineValidator();
+
+  @Override
+  protected ShapeType getShapeType() {
+    return ShapeType.LINE;
+  }
+
+  @Override
+  protected Field[] createIndexableFields(String field, Object line) {
+    return LatLonShape.createIndexableFields(field, (Line)line);
+  }
+
+  @Override
+  protected Validator getValidator() {
+    return VALIDATOR;
+  }
+
+  protected class LineValidator implements Validator {
+    @Override
+    public boolean testBBoxQuery(double minLat, double maxLat, double minLon, double maxLon, Object shape) {
+      // to keep it simple we convert the bbox into a polygon and use poly2d
+      Polygon2D p = Polygon2D.create(new Polygon[] {new Polygon(new double[] {minLat, minLat, maxLat, maxLat, minLat},
+          new double[] {minLon, maxLon, maxLon, minLon, minLon})});
+      return testLine(p, (Line)shape);
+    }
+
+    @Override
+    public boolean testPolygonQuery(Polygon2D poly2d, Object shape) {
+      return testLine(poly2d, (Line) shape);
+    }
+
+    private boolean testLine(Polygon2D queryPoly, Line line) {
+      double ax, ay, bx, by, temp;
+      for (int i = 0, j = 1; j < line.numPoints(); ++i, ++j) {
+        ay = decodeLatitude(encodeLatitude(line.getLat(i)));
+        ax = decodeLongitude(encodeLongitude(line.getLon(i)));
+        by = decodeLatitude(encodeLatitude(line.getLat(j)));
+        bx = decodeLongitude(encodeLongitude(line.getLon(j)));
+        if (ay > by) {
+          temp = ay;
+          ay = by;
+          by = temp;
+          temp = ax;
+          ax = bx;
+          bx = temp;
+        } else if (ay == by) {
+          if (ax > bx) {
+            temp = ay;
+            ay = by;
+            by = temp;
+            temp = ax;
+            ax = bx;
+            bx = temp;
+          }
+        }
+        if (queryPoly.relateTriangle(ax, ay, bx, by, ax, ay) != Relation.CELL_OUTSIDE_QUERY) {
+          return true;
+        }
+      }
+      return false;
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/a0e33a9b/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonPointShapeQueries.java
----------------------------------------------------------------------
diff --git a/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonPointShapeQueries.java b/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonPointShapeQueries.java
new file mode 100644
index 0000000..e98cb73
--- /dev/null
+++ b/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonPointShapeQueries.java
@@ -0,0 +1,66 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.document;
+
+import org.apache.lucene.geo.Polygon2D;
+import org.apache.lucene.index.PointValues.Relation;
+
+import static org.apache.lucene.geo.GeoEncodingUtils.decodeLatitude;
+import static org.apache.lucene.geo.GeoEncodingUtils.decodeLongitude;
+import static org.apache.lucene.geo.GeoEncodingUtils.encodeLatitude;
+import static org.apache.lucene.geo.GeoEncodingUtils.encodeLongitude;
+
+/** random bounding box and polygon query tests for random generated {@code latitude, longitude} points */
+public class TestLatLonPointShapeQueries extends BaseLatLonShapeTestCase {
+
+  protected final PointValidator VALIDATOR = new PointValidator();
+
+  @Override
+  protected ShapeType getShapeType() {
+    return ShapeType.POINT;
+  }
+
+  @Override
+  protected Field[] createIndexableFields(String field, Object point) {
+    Point p = (Point)point;
+    return LatLonShape.createIndexableFields(field, p.lat, p.lon);
+  }
+
+  @Override
+  protected Validator getValidator() {
+    return VALIDATOR;
+  }
+
+  protected class PointValidator implements Validator {
+    @Override
+    public boolean testBBoxQuery(double minLat, double maxLat, double minLon, double maxLon, Object shape) {
+      Point p = (Point)shape;
+      double lat = decodeLatitude(encodeLatitude(p.lat));
+      double lon = decodeLongitude(encodeLongitude(p.lon));
+      return (lat < minLat || lat > maxLat || lon < minLon || lon > maxLon) == false;
+    }
+
+    @Override
+    public boolean testPolygonQuery(Polygon2D poly2d, Object shape) {
+      Point p = (Point) shape;
+      double lat = decodeLatitude(encodeLatitude(p.lat));
+      double lon = decodeLongitude(encodeLongitude(p.lon));
+      // for consistency w/ the query we test the point as a triangle
+      return poly2d.relateTriangle(lon, lat, lon, lat, lon, lat) != Relation.CELL_OUTSIDE_QUERY;
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/a0e33a9b/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonPolygonShapeQueries.java
----------------------------------------------------------------------
diff --git a/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonPolygonShapeQueries.java b/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonPolygonShapeQueries.java
index 25d4888..17eb6e8 100644
--- a/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonPolygonShapeQueries.java
+++ b/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonPolygonShapeQueries.java
@@ -16,378 +16,67 @@
  */
 package org.apache.lucene.document;
 
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.HashSet;
 import java.util.List;
-import java.util.Set;
 
-import org.apache.lucene.geo.GeoTestUtil;
 import org.apache.lucene.geo.Polygon;
 import org.apache.lucene.geo.Polygon2D;
-import org.apache.lucene.geo.Rectangle;
 import org.apache.lucene.geo.Tessellator;
-import org.apache.lucene.index.DirectoryReader;
-import org.apache.lucene.index.IndexReader;
-import org.apache.lucene.index.IndexWriter;
-import org.apache.lucene.index.IndexWriterConfig;
-import org.apache.lucene.index.LeafReaderContext;
-import org.apache.lucene.index.MultiDocValues;
-import org.apache.lucene.index.MultiFields;
-import org.apache.lucene.index.NumericDocValues;
 import org.apache.lucene.index.PointValues.Relation;
-import org.apache.lucene.index.SerialMergeScheduler;
-import org.apache.lucene.index.Term;
-import org.apache.lucene.search.IndexSearcher;
-import org.apache.lucene.search.Query;
-import org.apache.lucene.search.ScoreMode;
-import org.apache.lucene.search.SimpleCollector;
-import org.apache.lucene.store.Directory;
-import org.apache.lucene.util.Bits;
-import org.apache.lucene.util.FixedBitSet;
-import org.apache.lucene.util.IOUtils;
-import org.apache.lucene.util.LuceneTestCase;
 
-import static org.apache.lucene.geo.GeoEncodingUtils.decodeLatitude;
-import static org.apache.lucene.geo.GeoEncodingUtils.decodeLongitude;
-import static org.apache.lucene.geo.GeoEncodingUtils.encodeLatitude;
-import static org.apache.lucene.geo.GeoEncodingUtils.encodeLatitudeCeil;
-import static org.apache.lucene.geo.GeoEncodingUtils.encodeLongitude;
-import static org.apache.lucene.geo.GeoEncodingUtils.encodeLongitudeCeil;
+public class TestLatLonPolygonShapeQueries extends BaseLatLonShapeTestCase {
 
-/** base Test case for {@link LatLonShape} indexing and search */
-public class TestLatLonPolygonShapeQueries extends LuceneTestCase {
-  protected static final String FIELD_NAME = "shape";
+  protected final PolygonValidator VALIDATOR = new PolygonValidator();
 
-  private Polygon quantizePolygon(Polygon polygon) {
-    double[] lats = new double[polygon.numPoints()];
-    double[] lons = new double[polygon.numPoints()];
-    for (int i = 0; i < lats.length; ++i) {
-      lats[i] = quantizeLat(polygon.getPolyLat(i));
-      lons[i] = quantizeLon(polygon.getPolyLon(i));
-    }
-    return new Polygon(lats, lons);
-  }
-
-  protected double quantizeLat(double rawLat) {
-    return decodeLatitude(encodeLatitude(rawLat));
-  }
-
-  protected double quantizeLatCeil(double rawLat) {
-    return decodeLatitude(encodeLatitudeCeil(rawLat));
-  }
-
-  protected double quantizeLon(double rawLon) {
-    return decodeLongitude(encodeLongitude(rawLon));
-  }
-
-  protected double quantizeLonCeil(double rawLon) {
-    return decodeLongitude(encodeLongitudeCeil(rawLon));
-  }
-
-  protected void addPolygonsToDoc(String field, Document doc, Polygon polygon) {
-    Field[] fields = LatLonShape.createIndexableFields(field, polygon);
-    for (Field f : fields) {
-      doc.add(f);
-    }
-  }
-
-  protected Query newRectQuery(String field, double minLat, double maxLat, double minLon, double maxLon) {
-    return LatLonShape.newBoxQuery(field, minLat, maxLat, minLon, maxLon);
-  }
-
-  protected Query newPolygonQuery(String field, Polygon... polygons) {
-    return LatLonShape.newPolygonQuery(field, polygons);
-  }
-
-  public void testRandomTiny() throws Exception {
-    // Make sure single-leaf-node case is OK:
-    doTestRandom(10);
-  }
-
-  public void testRandomMedium() throws Exception {
-    doTestRandom(10000);
+  @Override
+  protected ShapeType getShapeType() {
+    return ShapeType.POLYGON;
   }
 
-  @Nightly
-  public void testRandomBig() throws Exception {
-    doTestRandom(50000);
-  }
-
-  private void doTestRandom(int count) throws Exception {
-    int numPolygons = atLeast(count);
-
-    if (VERBOSE) {
-      System.out.println("TEST: numPolygons=" + numPolygons);
-    }
-
-    Polygon[] polygons = new Polygon[numPolygons];
-    for (int id = 0; id < numPolygons; ++id) {
-      int x = random().nextInt(20);
-      if (x == 17) {
-        polygons[id] = null;
-        if (VERBOSE) {
-          System.out.println("  id=" + id + " is missing");
-        }
-      } else {
-        // create a polygon that does not cross the dateline
-        polygons[id] = GeoTestUtil.nextPolygon();
+  @Override
+  protected Polygon nextShape() {
+    Polygon p;
+    while (true) {
+      // if we can't tessellate; then random polygon generator created a malformed shape
+      p = (Polygon)getShapeType().nextShape();
+      try {
+        Tessellator.tessellate(p);
+        return p;
+      } catch (IllegalArgumentException e) {
+        continue;
       }
     }
-    verify(polygons);
   }
 
-  private void verify(Polygon... polygons) throws Exception {
-    ArrayList<Polygon2D> poly2d = new ArrayList<>();
-    poly2d.ensureCapacity(polygons.length);
-    // index random polygons; poly2d will contain the Polygon2D objects needed for verification
-    IndexWriter w = indexRandomPolygons(poly2d, polygons);
-    Directory dir = w.getDirectory();
-    final IndexReader reader = DirectoryReader.open(w);
-    // test random bbox queries
-    verifyRandomBBoxQueries(reader, poly2d, polygons);
-    // test random polygon queires
-    verifyRandomPolygonQueries(reader, poly2d, polygons);
-    IOUtils.close(w, reader, dir);
+  @Override
+  protected Field[] createIndexableFields(String field, Object polygon) {
+    return LatLonShape.createIndexableFields(field, (Polygon)polygon);
   }
 
-  protected IndexWriter indexRandomPolygons(List<Polygon2D> poly2d, Polygon... polygons) throws Exception {
-    IndexWriterConfig iwc = newIndexWriterConfig();
-    iwc.setMergeScheduler(new SerialMergeScheduler());
-    int mbd = iwc.getMaxBufferedDocs();
-    if (mbd != -1 && mbd < polygons.length / 100) {
-      iwc.setMaxBufferedDocs(polygons.length / 100);
-    }
-    Directory dir;
-    if (polygons.length > 1000) {
-      dir = newFSDirectory(createTempDir(getClass().getSimpleName()));
-    } else {
-      dir = newDirectory();
-    }
-
-    Set<Integer> deleted = new HashSet<>();
-    IndexWriter w = new IndexWriter(dir, iwc);
-    for (int id = 0; id < polygons.length; ++id) {
-      Document doc = new Document();
-      doc.add(newStringField("id", "" + id, Field.Store.NO));
-      doc.add(new NumericDocValuesField("id", id));
-      if (polygons[id] != null) {
-        try {
-          addPolygonsToDoc(FIELD_NAME, doc, polygons[id]);
-        } catch (IllegalArgumentException e) {
-          // GeoTestUtil will occassionally create invalid polygons
-          // invalid polygons will not tessellate
-          // we skip those polygons that will not tessellate, relying on the TestTessellator class
-          // to ensure the Tessellator correctly identified a malformed shape and its not a bug
-          if (VERBOSE) {
-            System.out.println("  id=" + id + " could not tessellate. Malformed shape " + polygons[id] + " detected");
-          }
-          // remove and skip the malformed shape
-          polygons[id] = null;
-          poly2d.add(id, null);
-          continue;
-        }
-        poly2d.add(id, Polygon2D.create(quantizePolygon(polygons[id])));
-      } else {
-        poly2d.add(id, null);
-      }
-      w.addDocument(doc);
-      if (id > 0 && random().nextInt(100) == 42) {
-        int idToDelete = random().nextInt(id);
-        w.deleteDocuments(new Term("id", ""+idToDelete));
-        deleted.add(idToDelete);
-        if (VERBOSE) {
-          System.out.println("   delete id=" + idToDelete);
-        }
-      }
-    }
-
-    if (random().nextBoolean()) {
-      w.forceMerge(1);
-    }
-
-    return w;
+  @Override
+  protected Validator getValidator() {
+    return VALIDATOR;
   }
 
-  protected void verifyRandomBBoxQueries(IndexReader reader, List<Polygon2D> poly2d, Polygon... polygons) throws Exception {
-    IndexSearcher s = newSearcher(reader);
-
-    final int iters = atLeast(75);
-
-    Bits liveDocs = MultiFields.getLiveDocs(s.getIndexReader());
-    int maxDoc = s.getIndexReader().maxDoc();
-
-    for (int iter = 0; iter < iters; ++iter) {
-      if (VERBOSE) {
-        System.out.println("\nTEST: iter=" + (iter+1) + " of " + iters + " s=" + s);
-      }
-
-      // BBox
-      Rectangle rect = GeoTestUtil.nextBoxNotCrossingDateline();
-      Query query = newRectQuery(FIELD_NAME, rect.minLat, rect.maxLat, rect.minLon, rect.maxLon);
-
-      if (VERBOSE) {
-        System.out.println("  query=" + query);
-      }
-
-      final FixedBitSet hits = new FixedBitSet(maxDoc);
-      s.search(query, new SimpleCollector() {
-
-        private int docBase;
-
-        @Override
-        public ScoreMode scoreMode() {
-          return ScoreMode.COMPLETE_NO_SCORES;
-        }
-
-        @Override
-        protected void doSetNextReader(LeafReaderContext context) throws IOException {
-          docBase = context.docBase;
-        }
-
-        @Override
-        public void collect(int doc) throws IOException {
-          hits.set(docBase+doc);
-        }
-      });
-
-      boolean fail = false;
-      NumericDocValues docIDToID = MultiDocValues.getNumericValues(reader, "id");
-      for (int docID = 0; docID < maxDoc; ++docID) {
-        assertEquals(docID, docIDToID.nextDoc());
-        int id = (int) docIDToID.longValue();
-        boolean expected;
-        if (liveDocs != null && liveDocs.get(docID) == false) {
-          // document is deleted
-          expected = false;
-        } else if (polygons[id] == null) {
-          expected = false;
-        } else {
-          // check quantized poly against quantized query
-          expected = poly2d.get(id).relate(quantizeLatCeil(rect.minLat), quantizeLat(rect.maxLat),
-              quantizeLonCeil(rect.minLon), quantizeLon(rect.maxLon)) != Relation.CELL_OUTSIDE_QUERY;
-        }
-
-        if (hits.get(docID) != expected) {
-          StringBuilder b = new StringBuilder();
-
-          if (expected) {
-            b.append("FAIL: id=" + id + " should match but did not\n");
-          } else {
-            b.append("FAIL: id=" + id + " should not match but did\n");
-          }
-          b.append("  query=" + query + " docID=" + docID + "\n");
-          b.append("  polygon=" + quantizePolygon(polygons[id]) + "\n");
-          b.append("  deleted?=" + (liveDocs != null && liveDocs.get(docID) == false));
-          b.append("  rect=Rectangle(" + quantizeLatCeil(rect.minLat) + " TO " + quantizeLat(rect.maxLat) + " lon=" + quantizeLonCeil(rect.minLon) + " TO " + quantizeLon(rect.maxLon) + ")");
-          if (true) {
-            fail("wrong hit (first of possibly more):\n\n" + b);
-          } else {
-            System.out.println(b.toString());
-            fail = true;
-          }
-        }
-      }
-      if (fail) {
-        fail("some hits were wrong");
-      }
+  protected class PolygonValidator implements Validator {
+    @Override
+    public boolean testBBoxQuery(double minLat, double maxLat, double minLon, double maxLon, Object shape) {
+      Polygon2D poly = Polygon2D.create(quantizePolygon((Polygon)shape));
+      return poly.relate(minLat, maxLat, minLon, maxLon) != Relation.CELL_OUTSIDE_QUERY;
     }
-  }
 
-  protected void verifyRandomPolygonQueries(IndexReader reader, List<Polygon2D> poly2d, Polygon... polygons) throws Exception {
-    IndexSearcher s = newSearcher(reader);
+    @Override
+    public boolean testPolygonQuery(Polygon2D query, Object shape) {
 
-    final int iters = atLeast(75);
-
-    Bits liveDocs = MultiFields.getLiveDocs(s.getIndexReader());
-    int maxDoc = s.getIndexReader().maxDoc();
-
-    for (int iter = 0; iter < iters; ++iter) {
-      if (VERBOSE) {
-        System.out.println("\nTEST: iter=" + (iter+1) + " of " + iters + " s=" + s);
-      }
-
-      // Polygon
-      Polygon queryPolygon = GeoTestUtil.nextPolygon();
-      Polygon2D queryPoly2D = Polygon2D.create(queryPolygon);
-      Query query = newPolygonQuery(FIELD_NAME, queryPolygon);
-
-      if (VERBOSE) {
-        System.out.println("  query=" + query);
-      }
-
-      final FixedBitSet hits = new FixedBitSet(maxDoc);
-      s.search(query, new SimpleCollector() {
-
-        private int docBase;
-
-        @Override
-        public ScoreMode scoreMode() {
-          return ScoreMode.COMPLETE_NO_SCORES;
+      List<Tessellator.Triangle> tessellation = Tessellator.tessellate((Polygon) shape);
+      for (Tessellator.Triangle t : tessellation) {
+        // we quantize the triangle for consistency with the index
+        if (query.relateTriangle(quantizeLon(t.getLon(0)), quantizeLat(t.getLat(0)),
+            quantizeLon(t.getLon(1)), quantizeLat(t.getLat(1)),
+            quantizeLon(t.getLon(2)), quantizeLat(t.getLat(2))) != Relation.CELL_OUTSIDE_QUERY) {
+          return true;
         }
-
-        @Override
-        protected void doSetNextReader(LeafReaderContext context) throws IOException {
-          docBase = context.docBase;
-        }
-
-        @Override
-        public void collect(int doc) throws IOException {
-          hits.set(docBase+doc);
-        }
-      });
-
-      boolean fail = false;
-      NumericDocValues docIDToID = MultiDocValues.getNumericValues(reader, "id");
-      for (int docID = 0; docID < maxDoc; ++docID) {
-        assertEquals(docID, docIDToID.nextDoc());
-        int id = (int) docIDToID.longValue();
-        boolean expected;
-        if (liveDocs != null && liveDocs.get(docID) == false) {
-          // document is deleted
-          expected = false;
-        } else if (polygons[id] == null) {
-          expected = false;
-        } else {
-          expected = false;
-          try {
-            // check poly (quantized the same way as indexed) against query polygon
-            List<Tessellator.Triangle> tesselation = Tessellator.tessellate(quantizePolygon(polygons[id]));
-            for (Tessellator.Triangle t : tesselation) {
-              if (queryPoly2D.relateTriangle(t.getLon(0), t.getLat(0),
-                  t.getLon(1), t.getLat(1), t.getLon(2), t.getLat(2)) != Relation.CELL_OUTSIDE_QUERY) {
-                expected = true;
-                break;
-              }
-            }
-          } catch (IllegalArgumentException e) {
-            continue;
-          }
-        }
-
-        if (hits.get(docID) != expected) {
-          StringBuilder b = new StringBuilder();
-
-          if (expected) {
-            b.append("FAIL: id=" + id + " should match but did not\n");
-          } else {
-            b.append("FAIL: id=" + id + " should not match but did\n");
-          }
-          b.append("  query=" + query + " docID=" + docID + "\n");
-          b.append("  polygon=" + quantizePolygon(polygons[id]).toGeoJSON() + "\n");
-          b.append("  deleted?=" + (liveDocs != null && liveDocs.get(docID) == false));
-          b.append("  queryPolygon=" + queryPolygon.toGeoJSON());
-          if (true) {
-            fail("wrong hit (first of possibly more):\n\n" + b);
-          } else {
-            System.out.println(b.toString());
-            fail = true;
-          }
-        }
-      }
-      if (fail) {
-        fail("some hits were wrong");
       }
+      return false;
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/a0e33a9b/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonShape.java
----------------------------------------------------------------------
diff --git a/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonShape.java b/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonShape.java
index f673d0a..3aa5ace 100644
--- a/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonShape.java
+++ b/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonShape.java
@@ -18,6 +18,7 @@ package org.apache.lucene.document;
 
 import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
 import org.apache.lucene.geo.GeoTestUtil;
+import org.apache.lucene.geo.Line;
 import org.apache.lucene.geo.Polygon;
 import org.apache.lucene.index.DirectoryReader;
 import org.apache.lucene.index.IndexReader;
@@ -43,6 +44,13 @@ public class TestLatLonShape extends LuceneTestCase {
     }
   }
 
+  protected void addLineToDoc(String field, Document doc, Line line) {
+    Field[] fields = LatLonShape.createIndexableFields(field, line);
+    for (Field f : fields) {
+      doc.add(f);
+    }
+  }
+
   protected Query newRectQuery(String field, double minLat, double maxLat, double minLon, double maxLon) {
     return LatLonShape.newBoxQuery(field, minLat, maxLat, minLon, maxLon);
   }
@@ -81,19 +89,36 @@ public class TestLatLonShape extends LuceneTestCase {
     Directory dir = newDirectory();
     RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
 
-    // add a random polygon
+    // add a random polygon document
     Polygon p = GeoTestUtil.createRegularPolygon(0, 90, atLeast(1000000), numVertices);
     Document document = new Document();
     addPolygonsToDoc(FIELDNAME, document, p);
     writer.addDocument(document);
 
+    // add a line document
+    document = new Document();
+    // add a line string
+    double lats[] = new double[p.numPoints() - 1];
+    double lons[] = new double[p.numPoints() - 1];
+    for (int i = 0; i < lats.length; ++i) {
+      lats[i] = p.getPolyLat(i);
+      lons[i] = p.getPolyLon(i);
+    }
+    Line l = new Line(lats, lons);
+    addLineToDoc(FIELDNAME, document, l);
+    writer.addDocument(document);
+
     ////// search /////
     // search an intersecting bbox
     IndexReader reader = writer.getReader();
     writer.close();
     IndexSearcher searcher = newSearcher(reader);
-    Query q = newRectQuery(FIELDNAME, -1d, 1d, p.minLon, p.maxLon);
-    assertEquals(1, searcher.count(q));
+    double minLat = Math.min(lats[0], lats[1]);
+    double minLon = Math.min(lons[0], lons[1]);
+    double maxLat = Math.max(lats[0], lats[1]);
+    double maxLon = Math.max(lons[0], lons[1]);
+    Query q = newRectQuery(FIELDNAME, minLat, maxLat, minLon, maxLon);
+    assertEquals(2, searcher.count(q));
 
     // search a disjoint bbox
     q = newRectQuery(FIELDNAME, p.minLat-1d, p.minLat+1, p.minLon-1d, p.minLon+1d);


[35/48] lucene-solr:jira/http2: SOLR-12028: BadApple and AwaitsFix annotations usage

Posted by da...@apache.org.
SOLR-12028: BadApple and AwaitsFix annotations usage


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/6afd3d11
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/6afd3d11
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/6afd3d11

Branch: refs/heads/jira/http2
Commit: 6afd3d11929a75e3b3310638b32f4ed55da3ea6e
Parents: a0e33a9
Author: Erick <Er...@Ericks-MacBook-Pro-2.local>
Authored: Thu Aug 2 17:35:13 2018 -0700
Committer: Erick Erickson <Er...@gmail.com>
Committed: Thu Aug 2 17:40:59 2018 -0700

----------------------------------------------------------------------
 .../src/test/org/apache/solr/ltr/TestLTROnSolrCloud.java |  2 ++
 .../src/test/org/apache/solr/TestDistributedSearch.java  |  1 +
 .../src/test/org/apache/solr/cloud/AddReplicaTest.java   |  2 +-
 .../apache/solr/cloud/ChaosMonkeyNothingIsSafeTest.java  |  2 ++
 .../test/org/apache/solr/cloud/DeleteReplicaTest.java    |  2 +-
 .../org/apache/solr/cloud/DocValuesNotIndexedTest.java   |  2 ++
 .../apache/solr/cloud/FullSolrCloudDistribCmdsTest.java  |  2 ++
 .../apache/solr/cloud/LeaderElectionIntegrationTest.java |  2 ++
 .../org/apache/solr/cloud/LeaderVoteWaitTimeoutTest.java |  2 +-
 .../src/test/org/apache/solr/cloud/MoveReplicaTest.java  |  2 ++
 .../test/org/apache/solr/cloud/OverseerRolesTest.java    |  2 +-
 .../org/apache/solr/cloud/PeerSyncReplicationTest.java   |  2 +-
 .../src/test/org/apache/solr/cloud/RecoveryZkTest.java   |  2 +-
 .../test/org/apache/solr/cloud/RollingRestartTest.java   |  3 +--
 .../test/org/apache/solr/cloud/TestCloudConsistency.java |  3 ++-
 .../test/org/apache/solr/cloud/TestCloudPivotFacet.java  |  2 +-
 .../apache/solr/cloud/TestMiniSolrCloudClusterSSL.java   |  4 +++-
 .../CollectionsAPIAsyncDistributedZkTest.java            |  3 ++-
 .../api/collections/TestHdfsCloudBackupRestore.java      |  2 ++
 .../api/collections/TestLocalFSCloudBackupRestore.java   |  2 ++
 .../autoscaling/AutoAddReplicasIntegrationTest.java      |  2 ++
 .../solr/cloud/autoscaling/ComputePlanActionTest.java    |  3 +++
 .../cloud/autoscaling/MetricTriggerIntegrationTest.java  |  2 ++
 .../autoscaling/ScheduledTriggerIntegrationTest.java     |  2 ++
 .../autoscaling/sim/TestGenericDistributedQueue.java     |  1 +
 .../solr/cloud/autoscaling/sim/TestLargeCluster.java     |  2 +-
 .../org/apache/solr/cloud/cdcr/CdcrBootstrapTest.java    |  2 ++
 .../solr/cloud/hdfs/HdfsChaosMonkeySafeLeaderTest.java   |  2 ++
 .../src/test/org/apache/solr/handler/TestSQLHandler.java |  2 ++
 .../handler/component/DistributedMLTComponentTest.java   |  2 ++
 .../solr/metrics/reporters/SolrJmxReporterCloudTest.java |  2 ++
 .../metrics/reporters/solr/SolrCloudReportersTest.java   |  3 +++
 .../org/apache/solr/search/stats/TestDistribIDF.java     |  2 ++
 .../security/hadoop/TestDelegationWithHadoopAuth.java    |  2 ++
 .../org/apache/solr/servlet/HttpSolrCallGetCoreTest.java |  2 ++
 .../org/apache/solr/update/MaxSizeAutoCommitTest.java    |  7 +++----
 .../apache/solr/update/TestInPlaceUpdatesDistrib.java    |  2 ++
 .../solr/client/solrj/embedded/LargeVolumeJettyTest.java |  2 ++
 .../solr/client/solrj/impl/CloudSolrClientTest.java      |  5 ++++-
 .../solr/client/solrj/io/graph/GraphExpressionTest.java  |  1 +
 .../org/apache/solr/client/solrj/io/graph/GraphTest.java |  1 +
 .../solr/client/solrj/io/stream/StreamDecoratorTest.java | 11 +++++++++++
 .../client/solrj/io/stream/StreamExpressionTest.java     |  1 +
 43 files changed, 87 insertions(+), 18 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6afd3d11/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTROnSolrCloud.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTROnSolrCloud.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTROnSolrCloud.java
index d4c30e6..26f36fd 100644
--- a/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTROnSolrCloud.java
+++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTROnSolrCloud.java
@@ -19,6 +19,7 @@ import java.io.File;
 import java.util.SortedMap;
 
 import org.apache.commons.io.FileUtils;
+import org.apache.lucene.util.LuceneTestCase;
 import org.apache.solr.client.solrj.SolrQuery;
 import org.apache.solr.client.solrj.embedded.JettyConfig;
 import org.apache.solr.client.solrj.embedded.JettySolrRunner;
@@ -73,6 +74,7 @@ public class TestLTROnSolrCloud extends TestRerankBase {
   }
 
   @Test
+  @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2-Aug-2018
   public void testSimpleQuery() throws Exception {
     // will randomly pick a configuration with [1..5] shards and [1..3] replicas
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6afd3d11/solr/core/src/test/org/apache/solr/TestDistributedSearch.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/TestDistributedSearch.java b/solr/core/src/test/org/apache/solr/TestDistributedSearch.java
index 89c6a17..c9be70f 100644
--- a/solr/core/src/test/org/apache/solr/TestDistributedSearch.java
+++ b/solr/core/src/test/org/apache/solr/TestDistributedSearch.java
@@ -109,6 +109,7 @@ public class TestDistributedSearch extends BaseDistributedSearchTestCase {
   
   @Test
   //05-Jul-2018  @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 09-Apr-2018
+  @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2-Aug-2018
   public void test() throws Exception {
     QueryResponse rsp = null;
     int backupStress = stress; // make a copy so we can restore

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6afd3d11/solr/core/src/test/org/apache/solr/cloud/AddReplicaTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/AddReplicaTest.java b/solr/core/src/test/org/apache/solr/cloud/AddReplicaTest.java
index 55b1848..e338cc2 100644
--- a/solr/core/src/test/org/apache/solr/cloud/AddReplicaTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/AddReplicaTest.java
@@ -46,7 +46,7 @@ public class AddReplicaTest extends SolrCloudTestCase {
   }
 
   @Test
-  @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 09-Apr-2018
+  //commented 2-Aug-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 09-Apr-2018
   public void test() throws Exception {
     cluster.waitForAllNodes(5000);
     String collection = "addreplicatest_coll";

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6afd3d11/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyNothingIsSafeTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyNothingIsSafeTest.java b/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyNothingIsSafeTest.java
index b40870c..2b6584e 100644
--- a/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyNothingIsSafeTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyNothingIsSafeTest.java
@@ -21,6 +21,7 @@ import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
 
+import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util.LuceneTestCase.Slow;
 import org.apache.solr.SolrTestCaseJ4.SuppressObjectReleaseTracker;
 import org.apache.solr.SolrTestCaseJ4.SuppressSSL;
@@ -113,6 +114,7 @@ public class ChaosMonkeyNothingIsSafeTest extends AbstractFullDistribZkTestBase
 
   @Test
   //05-Jul-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 09-Apr-2018
+  @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2-Aug-2018
   public void test() throws Exception {
     // None of the operations used here are particularly costly, so this should work.
     // Using this low timeout will also help us catch index stalling.

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6afd3d11/solr/core/src/test/org/apache/solr/cloud/DeleteReplicaTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/DeleteReplicaTest.java b/solr/core/src/test/org/apache/solr/cloud/DeleteReplicaTest.java
index 52d5e45..30124d1 100644
--- a/solr/core/src/test/org/apache/solr/cloud/DeleteReplicaTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/DeleteReplicaTest.java
@@ -173,7 +173,7 @@ public class DeleteReplicaTest extends SolrCloudTestCase {
   }
 
   @Test
-  @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 28-June-2018
+  //commented 2-Aug-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 28-June-2018
   public void deleteReplicaFromClusterState() throws Exception {
     deleteReplicaFromClusterState("true");
     deleteReplicaFromClusterState("false");

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6afd3d11/solr/core/src/test/org/apache/solr/cloud/DocValuesNotIndexedTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/DocValuesNotIndexedTest.java b/solr/core/src/test/org/apache/solr/cloud/DocValuesNotIndexedTest.java
index c50fbb5..bbd1389 100644
--- a/solr/core/src/test/org/apache/solr/cloud/DocValuesNotIndexedTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/DocValuesNotIndexedTest.java
@@ -30,6 +30,7 @@ import java.util.Locale;
 import java.util.Map;
 
 import com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule;
+import org.apache.lucene.util.LuceneTestCase;
 import org.apache.solr.client.solrj.SolrQuery;
 import org.apache.solr.client.solrj.SolrServerException;
 import org.apache.solr.client.solrj.impl.CloudSolrClient;
@@ -313,6 +314,7 @@ public class DocValuesNotIndexedTest extends SolrCloudTestCase {
   // make sure all the values for each field are unique. We need to have docs that have values that are _not_
   // unique.
   // 12-Jun-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 04-May-2018
+  @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2-Aug-2018
   public void testGroupingDVOnly() throws IOException, SolrServerException {
     List<SolrInputDocument> docs = new ArrayList<>(50);
     for (int idx = 0; idx < 49; ++idx) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6afd3d11/solr/core/src/test/org/apache/solr/cloud/FullSolrCloudDistribCmdsTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/FullSolrCloudDistribCmdsTest.java b/solr/core/src/test/org/apache/solr/cloud/FullSolrCloudDistribCmdsTest.java
index b4c5a46..b31e8f7f 100644
--- a/solr/core/src/test/org/apache/solr/cloud/FullSolrCloudDistribCmdsTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/FullSolrCloudDistribCmdsTest.java
@@ -21,6 +21,7 @@ import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
 
+import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util.LuceneTestCase.Slow;
 import org.apache.solr.SolrTestCaseJ4.SuppressSSL;
 import org.apache.solr.client.solrj.SolrClient;
@@ -68,6 +69,7 @@ public class FullSolrCloudDistribCmdsTest extends AbstractFullDistribZkTestBase
 
   @Test
   @ShardsFixed(num = 6)
+  @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2-Aug-2018
   public void test() throws Exception {
     handle.clear();
     handle.put("timestamp", SKIPVAL);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6afd3d11/solr/core/src/test/org/apache/solr/cloud/LeaderElectionIntegrationTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/LeaderElectionIntegrationTest.java b/solr/core/src/test/org/apache/solr/cloud/LeaderElectionIntegrationTest.java
index ff9a220..fa08381 100644
--- a/solr/core/src/test/org/apache/solr/cloud/LeaderElectionIntegrationTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/LeaderElectionIntegrationTest.java
@@ -20,6 +20,7 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
 
+import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util.LuceneTestCase.Slow;
 import org.apache.solr.client.solrj.SolrServerException;
 import org.apache.solr.client.solrj.embedded.JettySolrRunner;
@@ -61,6 +62,7 @@ public class LeaderElectionIntegrationTest extends SolrCloudTestCase {
 
   @Test
   // 12-Jun-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 04-May-2018
+  @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2-Aug-2018
   public void testSimpleSliceLeaderElection() throws Exception {
     String collection = "collection1";
     createCollection(collection);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6afd3d11/solr/core/src/test/org/apache/solr/cloud/LeaderVoteWaitTimeoutTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/LeaderVoteWaitTimeoutTest.java b/solr/core/src/test/org/apache/solr/cloud/LeaderVoteWaitTimeoutTest.java
index e937cdd..3584487 100644
--- a/solr/core/src/test/org/apache/solr/cloud/LeaderVoteWaitTimeoutTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/LeaderVoteWaitTimeoutTest.java
@@ -124,7 +124,7 @@ public class LeaderVoteWaitTimeoutTest extends SolrCloudTestCase {
   }
 
   @Test
-  @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 21-May-2018
+  //commented 2-Aug-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 21-May-2018
   public void testMostInSyncReplicasCanWinElection() throws Exception {
     final String collectionName = "collection1";
     CollectionAdminRequest.createCollection(collectionName, 1, 3)

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6afd3d11/solr/core/src/test/org/apache/solr/cloud/MoveReplicaTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/MoveReplicaTest.java b/solr/core/src/test/org/apache/solr/cloud/MoveReplicaTest.java
index d58edf6..5131006 100644
--- a/solr/core/src/test/org/apache/solr/cloud/MoveReplicaTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/MoveReplicaTest.java
@@ -27,6 +27,7 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
+import org.apache.lucene.util.LuceneTestCase;
 import org.apache.solr.client.solrj.SolrClient;
 import org.apache.solr.client.solrj.SolrQuery;
 import org.apache.solr.client.solrj.SolrServerException;
@@ -237,6 +238,7 @@ public class MoveReplicaTest extends SolrCloudTestCase {
   // @AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/SOLR-11458")
   @Test
   // 12-Jun-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 17-Mar-2018 This JIRA is fixed, but this test still fails
+  @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2-Aug-2018
   public void testFailedMove() throws Exception {
     String coll = getTestClass().getSimpleName() + "_failed_coll_" + inPlaceMove;
     int REPLICATION = 2;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6afd3d11/solr/core/src/test/org/apache/solr/cloud/OverseerRolesTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/OverseerRolesTest.java b/solr/core/src/test/org/apache/solr/cloud/OverseerRolesTest.java
index 6b5884c..4ec445e 100644
--- a/solr/core/src/test/org/apache/solr/cloud/OverseerRolesTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/OverseerRolesTest.java
@@ -85,7 +85,7 @@ public class OverseerRolesTest extends SolrCloudTestCase {
   }
 
   @Test
-  @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 04-May-2018
+  //commented 2-Aug-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 04-May-2018
   public void testOverseerRole() throws Exception {
 
     logOverseerState();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6afd3d11/solr/core/src/test/org/apache/solr/cloud/PeerSyncReplicationTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/PeerSyncReplicationTest.java b/solr/core/src/test/org/apache/solr/cloud/PeerSyncReplicationTest.java
index d4776d7..5f20423 100644
--- a/solr/core/src/test/org/apache/solr/cloud/PeerSyncReplicationTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/PeerSyncReplicationTest.java
@@ -109,7 +109,7 @@ public class PeerSyncReplicationTest extends AbstractFullDistribZkTestBase {
   }
 
   @Test
-  @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028")
+  //commented 2-Aug-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028")
   public void test() throws Exception {
     handle.clear();
     handle.put("timestamp", SKIPVAL);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6afd3d11/solr/core/src/test/org/apache/solr/cloud/RecoveryZkTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/RecoveryZkTest.java b/solr/core/src/test/org/apache/solr/cloud/RecoveryZkTest.java
index 398002c..fa9a2df 100644
--- a/solr/core/src/test/org/apache/solr/cloud/RecoveryZkTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/RecoveryZkTest.java
@@ -59,7 +59,7 @@ public class RecoveryZkTest extends SolrCloudTestCase {
   }
 
   @Test
-  @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 28-June-2018
+  //commented 2-Aug-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 28-June-2018
   public void test() throws Exception {
 
     final String collection = "recoverytest";

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6afd3d11/solr/core/src/test/org/apache/solr/cloud/RollingRestartTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/RollingRestartTest.java b/solr/core/src/test/org/apache/solr/cloud/RollingRestartTest.java
index 511f221..53e7131 100644
--- a/solr/core/src/test/org/apache/solr/cloud/RollingRestartTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/RollingRestartTest.java
@@ -22,7 +22,6 @@ import java.util.List;
 import java.util.concurrent.TimeUnit;
 
 import org.apache.commons.collections.CollectionUtils;
-import org.apache.lucene.util.LuceneTestCase;
 import org.apache.solr.client.solrj.request.CollectionAdminRequest;
 import org.apache.solr.common.cloud.SolrZkClient;
 import org.apache.zookeeper.KeeperException;
@@ -47,7 +46,7 @@ public class RollingRestartTest extends AbstractFullDistribZkTestBase {
   }
 
   @Test
-  @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2018-06-18
+  //commented 2-Aug-2018 @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2018-06-18
   public void test() throws Exception {
     waitForRecoveriesToFinish(false);
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6afd3d11/solr/core/src/test/org/apache/solr/cloud/TestCloudConsistency.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestCloudConsistency.java b/solr/core/src/test/org/apache/solr/cloud/TestCloudConsistency.java
index d408dcc..aac1b9c 100644
--- a/solr/core/src/test/org/apache/solr/cloud/TestCloudConsistency.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestCloudConsistency.java
@@ -89,12 +89,13 @@ public class TestCloudConsistency extends SolrCloudTestCase {
   }
 
   @Test
-  @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 12-Jun-2018
+  //commented 2-Aug-2018 @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 12-Jun-2018
   public void testOutOfSyncReplicasCannotBecomeLeader() throws Exception {
     testOutOfSyncReplicasCannotBecomeLeader(false);
   }
 
   @Test
+  @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2-Aug-2018
   public void testOutOfSyncReplicasCannotBecomeLeaderAfterRestart() throws Exception {
     testOutOfSyncReplicasCannotBecomeLeader(true);
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6afd3d11/solr/core/src/test/org/apache/solr/cloud/TestCloudPivotFacet.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestCloudPivotFacet.java b/solr/core/src/test/org/apache/solr/cloud/TestCloudPivotFacet.java
index 2b7a62c..adbaf2b 100644
--- a/solr/core/src/test/org/apache/solr/cloud/TestCloudPivotFacet.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestCloudPivotFacet.java
@@ -107,7 +107,7 @@ public class TestCloudPivotFacet extends AbstractFullDistribZkTestBase {
   }
 
   @Test
-  @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 28-June-2018
+  //commented 2-Aug-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 28-June-2018
   public void test() throws Exception {
 
     sanityCheckAssertNumerics();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6afd3d11/solr/core/src/test/org/apache/solr/cloud/TestMiniSolrCloudClusterSSL.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestMiniSolrCloudClusterSSL.java b/solr/core/src/test/org/apache/solr/cloud/TestMiniSolrCloudClusterSSL.java
index 7a6606a..eeb7be7 100644
--- a/solr/core/src/test/org/apache/solr/cloud/TestMiniSolrCloudClusterSSL.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestMiniSolrCloudClusterSSL.java
@@ -33,6 +33,7 @@ import org.apache.http.impl.client.CloseableHttpClient;
 import org.apache.http.impl.client.HttpClientBuilder;
 import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
 import org.apache.lucene.util.Constants;
+import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util.TestRuleRestoreSystemProperties;
 
 import org.apache.solr.SolrTestCaseJ4;
@@ -129,7 +130,8 @@ public class TestMiniSolrCloudClusterSSL extends SolrTestCaseJ4 {
     System.setProperty(ZkStateReader.URL_SCHEME, "https");
     checkClusterWithNodeReplacement(sslConfig);
   }
-  
+
+  @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2-Aug-2018
   public void testSslWithCheckPeerName() throws Exception {
     final SSLTestConfig sslConfig = new SSLTestConfig(true, false, true);
     HttpClientUtil.setSchemaRegistryProvider(sslConfig.buildClientSchemaRegistryProvider());

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6afd3d11/solr/core/src/test/org/apache/solr/cloud/api/collections/CollectionsAPIAsyncDistributedZkTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/api/collections/CollectionsAPIAsyncDistributedZkTest.java b/solr/core/src/test/org/apache/solr/cloud/api/collections/CollectionsAPIAsyncDistributedZkTest.java
index 6c6252a..07a8808 100644
--- a/solr/core/src/test/org/apache/solr/cloud/api/collections/CollectionsAPIAsyncDistributedZkTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/api/collections/CollectionsAPIAsyncDistributedZkTest.java
@@ -24,6 +24,7 @@ import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicInteger;
+import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util.LuceneTestCase.Slow;
 import org.apache.lucene.util.TestUtil;
 import org.apache.solr.client.solrj.SolrClient;
@@ -190,7 +191,7 @@ public class CollectionsAPIAsyncDistributedZkTest extends SolrCloudTestCase {
         .processAndWait(client, MAX_TIMEOUT_SECONDS);
     assertSame("DeleteCollection did not complete", RequestStatusState.COMPLETED, state);
   }
-  
+  @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2-Aug-2018
   public void testAsyncIdRaceCondition() throws Exception {
     SolrClient[] clients = new SolrClient[cluster.getJettySolrRunners().size()];
     int j = 0;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6afd3d11/solr/core/src/test/org/apache/solr/cloud/api/collections/TestHdfsCloudBackupRestore.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/api/collections/TestHdfsCloudBackupRestore.java b/solr/core/src/test/org/apache/solr/cloud/api/collections/TestHdfsCloudBackupRestore.java
index 9029cc1..ae391ae 100644
--- a/solr/core/src/test/org/apache/solr/cloud/api/collections/TestHdfsCloudBackupRestore.java
+++ b/solr/core/src/test/org/apache/solr/cloud/api/collections/TestHdfsCloudBackupRestore.java
@@ -33,6 +33,7 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.hdfs.DistributedFileSystem;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction;
+import org.apache.lucene.util.LuceneTestCase;
 import org.apache.solr.client.solrj.impl.CloudSolrClient;
 import org.apache.solr.client.solrj.request.CollectionAdminRequest;
 import org.apache.solr.cloud.hdfs.HdfsTestUtil;
@@ -61,6 +62,7 @@ import static org.apache.solr.core.backup.BackupManager.ZK_STATE_DIR;
     BadHdfsThreadsFilter.class // hdfs currently leaks thread(s)
 })
 //05-Jul-2018  @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 04-May-2018
+@LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2-Aug-2018
 public class TestHdfsCloudBackupRestore extends AbstractCloudBackupRestoreTestCase {
   public static final String SOLR_XML = "<solr>\n" +
       "\n" +

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6afd3d11/solr/core/src/test/org/apache/solr/cloud/api/collections/TestLocalFSCloudBackupRestore.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/api/collections/TestLocalFSCloudBackupRestore.java b/solr/core/src/test/org/apache/solr/cloud/api/collections/TestLocalFSCloudBackupRestore.java
index 587b9b1..cb6c0dd 100644
--- a/solr/core/src/test/org/apache/solr/cloud/api/collections/TestLocalFSCloudBackupRestore.java
+++ b/solr/core/src/test/org/apache/solr/cloud/api/collections/TestLocalFSCloudBackupRestore.java
@@ -16,6 +16,7 @@
  */
 package org.apache.solr.cloud.api.collections;
 
+import org.apache.lucene.util.LuceneTestCase;
 import org.junit.BeforeClass;
 
 /**
@@ -23,6 +24,7 @@ import org.junit.BeforeClass;
  * Note that the Solr backup/restore still requires a "shared" file-system. Its just that in this case
  * such file-system would be exposed via local file-system API.
  */
+@LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2-Aug-2018
 public class TestLocalFSCloudBackupRestore extends AbstractCloudBackupRestoreTestCase {
   private static String backupLocation;
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6afd3d11/solr/core/src/test/org/apache/solr/cloud/autoscaling/AutoAddReplicasIntegrationTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/AutoAddReplicasIntegrationTest.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/AutoAddReplicasIntegrationTest.java
index c2e95ed..e80252f 100644
--- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/AutoAddReplicasIntegrationTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/AutoAddReplicasIntegrationTest.java
@@ -23,6 +23,7 @@ import java.util.List;
 import java.util.Map;
 import java.util.concurrent.TimeUnit;
 
+import org.apache.lucene.util.LuceneTestCase;
 import org.apache.solr.client.solrj.SolrServerException;
 import org.apache.solr.client.solrj.embedded.JettySolrRunner;
 import org.apache.solr.client.solrj.request.CollectionAdminRequest;
@@ -60,6 +61,7 @@ public class AutoAddReplicasIntegrationTest extends SolrCloudTestCase {
   @Test
   // This apparently fails in both subclasses.
   // 12-Jun-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028")
+  @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2-Aug-2018
   public void testSimple() throws Exception {
     JettySolrRunner jetty1 = cluster.getJettySolrRunner(0);
     JettySolrRunner jetty2 = cluster.getJettySolrRunner(1);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6afd3d11/solr/core/src/test/org/apache/solr/cloud/autoscaling/ComputePlanActionTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/ComputePlanActionTest.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/ComputePlanActionTest.java
index 16cc34e..8791388 100644
--- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/ComputePlanActionTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/ComputePlanActionTest.java
@@ -28,6 +28,7 @@ import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicReference;
 
+import org.apache.lucene.util.LuceneTestCase;
 import org.apache.solr.client.solrj.SolrRequest;
 import org.apache.solr.client.solrj.cloud.NodeStateProvider;
 import org.apache.solr.client.solrj.cloud.autoscaling.ReplicaInfo;
@@ -158,6 +159,7 @@ public class ComputePlanActionTest extends SolrCloudTestCase {
 
   @Test
   //28-June-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 21-May-2018
+  @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2-Aug-2018
   public void testNodeLost() throws Exception  {
     // let's start a node so that we have at least two
     JettySolrRunner runner = cluster.startJettySolrRunner();
@@ -239,6 +241,7 @@ public class ComputePlanActionTest extends SolrCloudTestCase {
 
   }
 
+  @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2-Aug-2018
   public void testNodeWithMultipleReplicasLost() throws Exception {
     AssertingTriggerAction.expectedNode = null;
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6afd3d11/solr/core/src/test/org/apache/solr/cloud/autoscaling/MetricTriggerIntegrationTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/MetricTriggerIntegrationTest.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/MetricTriggerIntegrationTest.java
index 67d943f..fb4a605 100644
--- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/MetricTriggerIntegrationTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/MetricTriggerIntegrationTest.java
@@ -27,6 +27,7 @@ import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.TimeUnit;
 
+import org.apache.lucene.util.LuceneTestCase;
 import org.apache.solr.client.solrj.SolrClient;
 import org.apache.solr.client.solrj.SolrRequest;
 import org.apache.solr.client.solrj.cloud.SolrCloudManager;
@@ -81,6 +82,7 @@ public class MetricTriggerIntegrationTest extends SolrCloudTestCase {
   }
 
   @Test
+  @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2-Aug-2018
   public void testMetricTrigger() throws Exception {
     cluster.waitForAllNodes(5);
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6afd3d11/solr/core/src/test/org/apache/solr/cloud/autoscaling/ScheduledTriggerIntegrationTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/ScheduledTriggerIntegrationTest.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/ScheduledTriggerIntegrationTest.java
index 1ca6c15..6197dc9 100644
--- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/ScheduledTriggerIntegrationTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/ScheduledTriggerIntegrationTest.java
@@ -27,6 +27,7 @@ import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicReference;
 
+import org.apache.lucene.util.LuceneTestCase;
 import org.apache.solr.client.solrj.SolrClient;
 import org.apache.solr.client.solrj.SolrRequest;
 import org.apache.solr.client.solrj.embedded.JettySolrRunner;
@@ -72,6 +73,7 @@ public class ScheduledTriggerIntegrationTest extends SolrCloudTestCase {
   }
 
   @Test
+  @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2-Aug-2018
   public void testScheduledTrigger() throws Exception {
     CloudSolrClient solrClient = cluster.getSolrClient();
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6afd3d11/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestGenericDistributedQueue.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestGenericDistributedQueue.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestGenericDistributedQueue.java
index 15fb4d5..4f25d10 100644
--- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestGenericDistributedQueue.java
+++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestGenericDistributedQueue.java
@@ -28,6 +28,7 @@ public class TestGenericDistributedQueue extends TestSimDistributedQueue {
   DistribStateManager stateManager = new SimDistribStateManager();
 
   @Override
+  @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2-Aug-2018
   protected DistributedQueue makeDistributedQueue(String dqZNode) throws Exception {
     return new GenericDistributedQueue(stateManager, dqZNode);
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6afd3d11/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestLargeCluster.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestLargeCluster.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestLargeCluster.java
index 2b7acd2..61fedb8 100644
--- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestLargeCluster.java
+++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestLargeCluster.java
@@ -587,7 +587,7 @@ public class TestLargeCluster extends SimSolrCloudTestCase {
   }
 
   @Test
-  @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2018-06-18
+  //commented 2-Aug-2018 @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2018-06-18
   public void testSearchRate() throws Exception {
     SolrClient solrClient = cluster.simGetSolrClient();
     String collectionName = "testSearchRate";

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6afd3d11/solr/core/src/test/org/apache/solr/cloud/cdcr/CdcrBootstrapTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/cdcr/CdcrBootstrapTest.java b/solr/core/src/test/org/apache/solr/cloud/cdcr/CdcrBootstrapTest.java
index fc8e5e8..652f602 100644
--- a/solr/core/src/test/org/apache/solr/cloud/cdcr/CdcrBootstrapTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/cdcr/CdcrBootstrapTest.java
@@ -22,6 +22,7 @@ import java.lang.invoke.MethodHandles;
 import java.util.LinkedHashMap;
 
 import org.apache.lucene.store.FSDirectory;
+import org.apache.lucene.util.LuceneTestCase;
 import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.client.solrj.SolrQuery;
 import org.apache.solr.client.solrj.SolrServerException;
@@ -57,6 +58,7 @@ public class CdcrBootstrapTest extends SolrTestCaseJ4 {
    * call returns the same version as the last update indexed on the source.
    */
   @Test
+  @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2-Aug-2018
   public void testConvertClusterToCdcrAndBootstrap() throws Exception {
     // start the target first so that we know its zkhost
     MiniSolrCloudCluster target = new MiniSolrCloudCluster(1, createTempDir("cdcr-target"), buildJettyConfig("/solr"));

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6afd3d11/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsChaosMonkeySafeLeaderTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsChaosMonkeySafeLeaderTest.java b/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsChaosMonkeySafeLeaderTest.java
index 0bd29ac..0c03885 100644
--- a/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsChaosMonkeySafeLeaderTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsChaosMonkeySafeLeaderTest.java
@@ -19,6 +19,7 @@ package org.apache.solr.cloud.hdfs;
 import java.io.IOException;
 
 import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util.LuceneTestCase.Slow;
 import org.apache.solr.cloud.ChaosMonkeySafeLeaderTest;
 import org.apache.solr.util.BadHdfsThreadsFilter;
@@ -33,6 +34,7 @@ import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
 @ThreadLeakFilters(defaultFilters = true, filters = {
     BadHdfsThreadsFilter.class // hdfs currently leaks thread(s)
 })
+@LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2-Aug-2018
 public class HdfsChaosMonkeySafeLeaderTest extends ChaosMonkeySafeLeaderTest {
   private static MiniDFSCluster dfsCluster;
   

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6afd3d11/solr/core/src/test/org/apache/solr/handler/TestSQLHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/handler/TestSQLHandler.java b/solr/core/src/test/org/apache/solr/handler/TestSQLHandler.java
index 47b2328..8315868 100644
--- a/solr/core/src/test/org/apache/solr/handler/TestSQLHandler.java
+++ b/solr/core/src/test/org/apache/solr/handler/TestSQLHandler.java
@@ -23,6 +23,7 @@ import java.io.InputStreamReader;
 import java.util.ArrayList;
 import java.util.List;
 
+import org.apache.lucene.util.LuceneTestCase;
 import org.apache.solr.client.solrj.SolrClient;
 import org.apache.solr.client.solrj.SolrRequest;
 import org.apache.solr.client.solrj.SolrServerException;
@@ -77,6 +78,7 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase {
 
   @Test
   //28-June-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 21-May-2018
+  @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2-Aug-2018
   public void doTest() throws Exception {
     waitForRecoveriesToFinish(false);
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6afd3d11/solr/core/src/test/org/apache/solr/handler/component/DistributedMLTComponentTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/handler/component/DistributedMLTComponentTest.java b/solr/core/src/test/org/apache/solr/handler/component/DistributedMLTComponentTest.java
index 157d6a7..4b07775 100644
--- a/solr/core/src/test/org/apache/solr/handler/component/DistributedMLTComponentTest.java
+++ b/solr/core/src/test/org/apache/solr/handler/component/DistributedMLTComponentTest.java
@@ -19,6 +19,7 @@ package org.apache.solr.handler.component;
 import java.util.HashMap;
 import java.util.Map;
 
+import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util.LuceneTestCase.Slow;
 import org.apache.lucene.util.TestUtil;
 import org.apache.solr.BaseDistributedSearchTestCase;
@@ -75,6 +76,7 @@ public class DistributedMLTComponentTest extends BaseDistributedSearchTestCase {
   
   @Test
   @ShardsFixed(num = 3)
+  @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2-Aug-2018
   public void test() throws Exception {
     del("*:*");
     index(id, "1", "lowerfilt", "toyota", "lowerfilt1", "x");

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6afd3d11/solr/core/src/test/org/apache/solr/metrics/reporters/SolrJmxReporterCloudTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/metrics/reporters/SolrJmxReporterCloudTest.java b/solr/core/src/test/org/apache/solr/metrics/reporters/SolrJmxReporterCloudTest.java
index 0ae0289..ffe9834 100644
--- a/solr/core/src/test/org/apache/solr/metrics/reporters/SolrJmxReporterCloudTest.java
+++ b/solr/core/src/test/org/apache/solr/metrics/reporters/SolrJmxReporterCloudTest.java
@@ -26,6 +26,7 @@ import java.util.HashSet;
 import java.util.Map;
 import java.util.Set;
 
+import org.apache.lucene.util.LuceneTestCase;
 import org.apache.solr.client.solrj.embedded.JettySolrRunner;
 import org.apache.solr.client.solrj.impl.CloudSolrClient;
 import org.apache.solr.client.solrj.request.CollectionAdminRequest;
@@ -62,6 +63,7 @@ public class SolrJmxReporterCloudTest extends SolrCloudTestCase {
   }
 
   @Test
+  @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2-Aug-2018
   public void testJmxReporter() throws Exception {
     CollectionAdminRequest.reloadCollection(COLLECTION).processAndWait(cluster.getSolrClient(), 60);
     CloudSolrClient solrClient = cluster.getSolrClient();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6afd3d11/solr/core/src/test/org/apache/solr/metrics/reporters/solr/SolrCloudReportersTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/metrics/reporters/solr/SolrCloudReportersTest.java b/solr/core/src/test/org/apache/solr/metrics/reporters/solr/SolrCloudReportersTest.java
index 59952c3..8b8b828 100644
--- a/solr/core/src/test/org/apache/solr/metrics/reporters/solr/SolrCloudReportersTest.java
+++ b/solr/core/src/test/org/apache/solr/metrics/reporters/solr/SolrCloudReportersTest.java
@@ -21,6 +21,7 @@ import java.util.Map;
 
 import com.codahale.metrics.Metric;
 import org.apache.commons.io.IOUtils;
+import org.apache.lucene.util.LuceneTestCase;
 import org.apache.solr.client.solrj.request.CollectionAdminRequest;
 import org.apache.solr.cloud.SolrCloudTestCase;
 import org.apache.solr.core.CoreContainer;
@@ -58,6 +59,7 @@ public class SolrCloudReportersTest extends SolrCloudTestCase {
   }
 
   @Test
+  @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2-Aug-2018
   public void testExplicitConfiguration() throws Exception {
     String solrXml = IOUtils.toString(SolrCloudReportersTest.class.getResourceAsStream("/solr/solr-solrreporter.xml"), "UTF-8");
     configureCluster(2)
@@ -153,6 +155,7 @@ public class SolrCloudReportersTest extends SolrCloudTestCase {
   }
 
   @Test
+  @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2-Aug-2018
   public void testDefaultPlugins() throws Exception {
     String solrXml = IOUtils.toString(SolrCloudReportersTest.class.getResourceAsStream("/solr/solr.xml"), "UTF-8");
     configureCluster(2)

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6afd3d11/solr/core/src/test/org/apache/solr/search/stats/TestDistribIDF.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/search/stats/TestDistribIDF.java b/solr/core/src/test/org/apache/solr/search/stats/TestDistribIDF.java
index cac2dc5..e748744 100644
--- a/solr/core/src/test/org/apache/solr/search/stats/TestDistribIDF.java
+++ b/solr/core/src/test/org/apache/solr/search/stats/TestDistribIDF.java
@@ -19,6 +19,7 @@ package org.apache.solr.search.stats;
 import java.io.IOException;
 import java.lang.invoke.MethodHandles;
 
+import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util.TestUtil;
 import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.client.solrj.SolrClient;
@@ -141,6 +142,7 @@ public class TestDistribIDF extends SolrTestCaseJ4 {
   }
 
   @Test
+  @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2-Aug-2018
   public void testMultiCollectionQuery() throws Exception {
     // collection1 and collection2 are collections which have distributed idf enabled
     // collection1_local and collection2_local don't have distributed idf available

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6afd3d11/solr/core/src/test/org/apache/solr/security/hadoop/TestDelegationWithHadoopAuth.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/security/hadoop/TestDelegationWithHadoopAuth.java b/solr/core/src/test/org/apache/solr/security/hadoop/TestDelegationWithHadoopAuth.java
index c440ccb..beb6f19 100644
--- a/solr/core/src/test/org/apache/solr/security/hadoop/TestDelegationWithHadoopAuth.java
+++ b/solr/core/src/test/org/apache/solr/security/hadoop/TestDelegationWithHadoopAuth.java
@@ -25,6 +25,7 @@ import org.apache.hadoop.security.authentication.client.PseudoAuthenticator;
 import org.apache.hadoop.util.Time;
 import org.apache.http.HttpStatus;
 import org.apache.lucene.util.Constants;
+import org.apache.lucene.util.LuceneTestCase;
 import org.apache.solr.client.solrj.SolrClient;
 import org.apache.solr.client.solrj.SolrRequest;
 import org.apache.solr.client.solrj.embedded.JettySolrRunner;
@@ -314,6 +315,7 @@ public class TestDelegationWithHadoopAuth extends SolrCloudTestCase {
   }
 
   @Test
+  @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2-Aug-2018
   public void testDelegationTokenRenew() throws Exception {
     // test with specifying renewer
     verifyDelegationTokenRenew(USER_1, USER_1);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6afd3d11/solr/core/src/test/org/apache/solr/servlet/HttpSolrCallGetCoreTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/servlet/HttpSolrCallGetCoreTest.java b/solr/core/src/test/org/apache/solr/servlet/HttpSolrCallGetCoreTest.java
index ae683e5..eb67221 100644
--- a/solr/core/src/test/org/apache/solr/servlet/HttpSolrCallGetCoreTest.java
+++ b/solr/core/src/test/org/apache/solr/servlet/HttpSolrCallGetCoreTest.java
@@ -25,6 +25,7 @@ import java.io.IOException;
 import java.util.HashSet;
 import java.util.Set;
 
+import org.apache.lucene.util.LuceneTestCase;
 import org.apache.solr.client.solrj.embedded.JettySolrRunner;
 import org.apache.solr.client.solrj.request.CollectionAdminRequest;
 import org.apache.solr.cloud.AbstractDistribZkTestBase;
@@ -34,6 +35,7 @@ import org.eclipse.jetty.server.Response;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
+@LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2-Aug-2018
 public class HttpSolrCallGetCoreTest extends SolrCloudTestCase {
   private static final String COLLECTION = "collection1";
   private static final int NUM_SHARD = 3;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6afd3d11/solr/core/src/test/org/apache/solr/update/MaxSizeAutoCommitTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/update/MaxSizeAutoCommitTest.java b/solr/core/src/test/org/apache/solr/update/MaxSizeAutoCommitTest.java
index 14419e3..57390d9 100644
--- a/solr/core/src/test/org/apache/solr/update/MaxSizeAutoCommitTest.java
+++ b/solr/core/src/test/org/apache/solr/update/MaxSizeAutoCommitTest.java
@@ -29,7 +29,6 @@ import java.util.function.Function;
 
 import com.carrotsearch.randomizedtesting.annotations.Repeat;
 import com.fasterxml.jackson.databind.ObjectMapper;
-import org.apache.lucene.util.LuceneTestCase;
 import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.client.solrj.util.ClientUtils;
 import org.apache.solr.common.params.MapSolrParams;
@@ -44,7 +43,7 @@ import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 
-@LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2018-06-18
+//commented 2-Aug-2018 @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2018-06-18
 public class MaxSizeAutoCommitTest extends SolrTestCaseJ4 {
 
   // Given an ID, returns an XML string for an "add document" request
@@ -169,7 +168,7 @@ public class MaxSizeAutoCommitTest extends SolrTestCaseJ4 {
   }
 
   @Test
-  @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 12-Jun-2018
+  //commented 2-Aug-2018 @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 12-Jun-2018
   public void deleteTest() throws Exception {
     int maxFileSizeBound = 1000;
     int maxFileSizeBoundWithBuffer = (int) (maxFileSizeBound * 1.25);
@@ -206,7 +205,7 @@ public class MaxSizeAutoCommitTest extends SolrTestCaseJ4 {
   
   @Test
   @Repeat(iterations = 5)
-  @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 12-Jun-2018
+  //commented 2-Aug-2018 @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 12-Jun-2018
   public void endToEndTest() throws Exception {
     int maxFileSizeBound = 5000;
     // Set max size bound

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6afd3d11/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdatesDistrib.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdatesDistrib.java b/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdatesDistrib.java
index b9be989..2441671 100644
--- a/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdatesDistrib.java
+++ b/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdatesDistrib.java
@@ -32,6 +32,7 @@ import java.util.concurrent.TimeUnit;
 
 import org.apache.lucene.index.IndexWriter;
 import org.apache.lucene.index.NoMergePolicy;
+import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util.LuceneTestCase.Slow;
 import org.apache.lucene.util.TestUtil;
 import org.apache.solr.client.solrj.SolrClient;
@@ -120,6 +121,7 @@ public class TestInPlaceUpdatesDistrib extends AbstractFullDistribZkTestBase {
   @ShardsFixed(num = 3)
   @SuppressWarnings("unchecked")
   //28-June-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 21-May-2018
+  @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2-Aug-2018
   public void test() throws Exception {
     waitForRecoveriesToFinish(true);
     mapReplicasToClients();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6afd3d11/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/LargeVolumeJettyTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/LargeVolumeJettyTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/LargeVolumeJettyTest.java
index d627f2b..89f937a 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/LargeVolumeJettyTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/LargeVolumeJettyTest.java
@@ -16,9 +16,11 @@
  */
 package org.apache.solr.client.solrj.embedded;
 
+import org.apache.lucene.util.LuceneTestCase;
 import org.apache.solr.client.solrj.LargeVolumeTestBase;
 import org.junit.BeforeClass;
 
+@LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2-Aug-2018
 public class LargeVolumeJettyTest extends LargeVolumeTestBase {
   @BeforeClass
   public static void beforeTest() throws Exception {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6afd3d11/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientTest.java
index bc4bd8c..283d5a0 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientTest.java
@@ -36,6 +36,7 @@ import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 import com.google.common.collect.Sets;
 import org.apache.http.impl.client.CloseableHttpClient;
+import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util.LuceneTestCase.Slow;
 import org.apache.lucene.util.TestUtil;
 import org.apache.solr.client.solrj.SolrClient;
@@ -394,6 +395,7 @@ public class CloudSolrClientTest extends SolrCloudTestCase {
    * limits the distributed query to locally hosted shards only
    */
   @Test
+  @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2-Aug-2018
   public void preferLocalShardsTest() throws Exception {
 
     String collectionName = "localShardsTestColl";
@@ -849,10 +851,11 @@ public class CloudSolrClientTest extends SolrCloudTestCase {
   }
 
   /**
-   * Tests if the specification of 'preferReplicaTypes' in the query-params
+   * Tests if the specification of 'preferReplicaTypes` in the query-params
    * limits the distributed query to locally hosted shards only
    */
   @Test
+  @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2-Aug-2018
   public void preferReplicaTypesTest() throws Exception {
 
     String collectionName = "replicaTypesTestColl";

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6afd3d11/solr/solrj/src/test/org/apache/solr/client/solrj/io/graph/GraphExpressionTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/graph/GraphExpressionTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/graph/GraphExpressionTest.java
index 2cd0e97..f14003c 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/graph/GraphExpressionTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/graph/GraphExpressionTest.java
@@ -97,6 +97,7 @@ public class GraphExpressionTest extends SolrCloudTestCase {
   }
 
   @Test
+  @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2-Aug-2018
   public void testShortestPathStream() throws Exception {
 
     new UpdateRequest()

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6afd3d11/solr/solrj/src/test/org/apache/solr/client/solrj/io/graph/GraphTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/graph/GraphTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/graph/GraphTest.java
index dea758b..fc24a63 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/graph/GraphTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/graph/GraphTest.java
@@ -72,6 +72,7 @@ public class GraphTest extends SolrCloudTestCase {
   }
 
   @Test
+  @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2-Aug-2018
   public void testShortestPathStream() throws Exception {
 
     new UpdateRequest()

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6afd3d11/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamDecoratorTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamDecoratorTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamDecoratorTest.java
index 2726739..be919eb 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamDecoratorTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamDecoratorTest.java
@@ -106,6 +106,7 @@ public class StreamDecoratorTest extends SolrCloudTestCase {
   }
 
   @Test
+  @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2-Aug-2018
   public void testUniqueStream() throws Exception {
 
     new UpdateRequest()
@@ -656,6 +657,7 @@ public class StreamDecoratorTest extends SolrCloudTestCase {
   }
 
   @Test
+  @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2-Aug-2018
   public void testParallelHavingStream() throws Exception {
 
     SolrClientCache solrClientCache = new SolrClientCache();
@@ -866,6 +868,7 @@ public class StreamDecoratorTest extends SolrCloudTestCase {
   }
 
   @Test
+  @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2-Aug-2018
   public void testParallelFetchStream() throws Exception {
 
     new UpdateRequest()
@@ -1377,6 +1380,7 @@ public class StreamDecoratorTest extends SolrCloudTestCase {
   }
 
   @Test
+  @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2-Aug-2018
   public void testParallelReducerStream() throws Exception {
 
     new UpdateRequest()
@@ -1509,6 +1513,7 @@ public class StreamDecoratorTest extends SolrCloudTestCase {
   }
 
   @Test
+  @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2-Aug-2018
   public void testParallelMergeStream() throws Exception {
 
     new UpdateRequest()
@@ -2315,6 +2320,7 @@ public class StreamDecoratorTest extends SolrCloudTestCase {
   }
 
   @Test
+  @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2-Aug-2018
   public void testParallelPriorityStream() throws Exception {
     Assume.assumeTrue(!useAlias);
 
@@ -2484,6 +2490,7 @@ public class StreamDecoratorTest extends SolrCloudTestCase {
   }
 
   @Test
+  @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2-Aug-2018
   public void testParallelUpdateStream() throws Exception {
 
     CollectionAdminRequest.createCollection("parallelDestinationCollection", "conf", 2, 1).process(cluster.getSolrClient());
@@ -2583,6 +2590,7 @@ public class StreamDecoratorTest extends SolrCloudTestCase {
   }
 
   @Test
+  @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2-Aug-2018
   public void testParallelDaemonUpdateStream() throws Exception {
 
     CollectionAdminRequest.createCollection("parallelDestinationCollection1", "conf", 2, 1).process(cluster.getSolrClient());
@@ -3288,6 +3296,7 @@ public class StreamDecoratorTest extends SolrCloudTestCase {
   }
 
   @Test
+  @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2-Aug-2018
   public void testClassifyStream() throws Exception {
     Assume.assumeTrue(!useAlias);
 
@@ -3482,6 +3491,7 @@ public class StreamDecoratorTest extends SolrCloudTestCase {
   }
 
   @Test
+  @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2-Aug-2018
   public void testExecutorStream() throws Exception {
     CollectionAdminRequest.createCollection("workQueue", "conf", 2, 1).processAndWait(cluster.getSolrClient(), DEFAULT_TIMEOUT);
     AbstractDistribZkTestBase.waitForRecoveriesToFinish("workQueue", cluster.getSolrClient().getZkStateReader(),
@@ -3551,6 +3561,7 @@ public class StreamDecoratorTest extends SolrCloudTestCase {
 
 
   @Test
+  @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2-Aug-2018
   public void testParallelExecutorStream() throws Exception {
     CollectionAdminRequest.createCollection("workQueue1", "conf", 2, 1).processAndWait(cluster.getSolrClient(),DEFAULT_TIMEOUT);
     AbstractDistribZkTestBase.waitForRecoveriesToFinish("workQueue1", cluster.getSolrClient().getZkStateReader(),

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6afd3d11/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
index 52c77dd..e5455b5 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
@@ -1551,6 +1551,7 @@ public class StreamExpressionTest extends SolrCloudTestCase {
 
 
   @Test
+  @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2-Aug-2018
   public void testParallelTopicStream() throws Exception {
 
     Assume.assumeTrue(!useAlias);


[10/48] lucene-solr:jira/http2: Fix 'redundant cast to long' warning in TestTieredMergePolicy.

Posted by da...@apache.org.
Fix 'redundant cast to long' warning in TestTieredMergePolicy.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/1a870876
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/1a870876
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/1a870876

Branch: refs/heads/jira/http2
Commit: 1a870876c24a78c2da8f1222c7d69b90791ccacb
Parents: 8a44828
Author: Christine Poerschke <cp...@apache.org>
Authored: Tue Jul 31 18:55:34 2018 +0100
Committer: Christine Poerschke <cp...@apache.org>
Committed: Tue Jul 31 19:29:50 2018 +0100

----------------------------------------------------------------------
 .../src/test/org/apache/lucene/index/TestTieredMergePolicy.java    | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/1a870876/lucene/core/src/test/org/apache/lucene/index/TestTieredMergePolicy.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestTieredMergePolicy.java b/lucene/core/src/test/org/apache/lucene/index/TestTieredMergePolicy.java
index a7f2111..b4a7ff5 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestTieredMergePolicy.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestTieredMergePolicy.java
@@ -54,7 +54,7 @@ public class TestTieredMergePolicy extends BaseMergePolicyTestCase {
       totalMaxDoc += sci.info.maxDoc();
       long byteSize = sci.sizeInBytes();
       double liveRatio = 1 - (double) sci.getDelCount() / sci.info.maxDoc();
-      long weightedByteSize = (long) Math.round(liveRatio * byteSize);
+      long weightedByteSize = Math.round(liveRatio * byteSize);
       totalBytes += weightedByteSize;
       minSegmentBytes = Math.min(minSegmentBytes, weightedByteSize);
     }


[39/48] lucene-solr:jira/http2: SOLR-12617: Remove Commons BeanUtils as a dependency

Posted by da...@apache.org.
SOLR-12617: Remove Commons BeanUtils as a dependency


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/e3cdb395
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/e3cdb395
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/e3cdb395

Branch: refs/heads/jira/http2
Commit: e3cdb395a4009f118900397c8a2086620b436455
Parents: 2a41cbd
Author: Varun Thacker <va...@apache.org>
Authored: Fri Aug 3 09:46:36 2018 -0700
Committer: Varun Thacker <va...@apache.org>
Committed: Fri Aug 3 09:46:54 2018 -0700

----------------------------------------------------------------------
 lucene/ivy-versions.properties                 | 1 -
 solr/CHANGES.txt                               | 2 ++
 solr/contrib/velocity/ivy.xml                  | 1 -
 solr/licenses/commons-beanutils-1.8.3.jar.sha1 | 1 -
 4 files changed, 2 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e3cdb395/lucene/ivy-versions.properties
----------------------------------------------------------------------
diff --git a/lucene/ivy-versions.properties b/lucene/ivy-versions.properties
index 9d5303d..3a1ef2b 100644
--- a/lucene/ivy-versions.properties
+++ b/lucene/ivy-versions.properties
@@ -46,7 +46,6 @@ com.sun.jersey.version = 1.9
 
 /com.tdunning/t-digest = 3.1
 /com.vaadin.external.google/android-json = 0.0.20131108.vaadin1
-/commons-beanutils/commons-beanutils = 1.8.3
 /commons-cli/commons-cli = 1.2
 /commons-codec/commons-codec = 1.10
 /commons-collections/commons-collections = 3.2.2

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e3cdb395/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index fdccfb8..b9846bd 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -250,6 +250,8 @@ Other Changes
 
 * SOLR-12164: Improve Ref Guide main landing page. (Cassandra Targett)
 
+* SOLR-12617: Remove Commons BeanUtils as a dependency (Varun Thacker)
+
 ==================  7.4.0 ==================
 
 Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e3cdb395/solr/contrib/velocity/ivy.xml
----------------------------------------------------------------------
diff --git a/solr/contrib/velocity/ivy.xml b/solr/contrib/velocity/ivy.xml
index 426a636..561492d 100644
--- a/solr/contrib/velocity/ivy.xml
+++ b/solr/contrib/velocity/ivy.xml
@@ -23,7 +23,6 @@
     <conf name="test" transitive="false"/>
   </configurations>
   <dependencies>
-    <dependency org="commons-beanutils" name="commons-beanutils" rev="${/commons-beanutils/commons-beanutils}" conf="compile"/>
     <dependency org="commons-collections" name="commons-collections" rev="${/commons-collections/commons-collections}" conf="compile"/>
     <dependency org="org.apache.velocity" name="velocity" rev="${/org.apache.velocity/velocity}" conf="compile"/>
     <dependency org="org.apache.velocity" name="velocity-tools" rev="${/org.apache.velocity/velocity-tools}" conf="compile"/>

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e3cdb395/solr/licenses/commons-beanutils-1.8.3.jar.sha1
----------------------------------------------------------------------
diff --git a/solr/licenses/commons-beanutils-1.8.3.jar.sha1 b/solr/licenses/commons-beanutils-1.8.3.jar.sha1
deleted file mode 100644
index 98b35bb..0000000
--- a/solr/licenses/commons-beanutils-1.8.3.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-686ef3410bcf4ab8ce7fd0b899e832aaba5facf7


[21/48] lucene-solr:jira/http2: Revert "Fix AAIOOBE in GeoTestUtil."

Posted by da...@apache.org.
Revert "Fix AAIOOBE in GeoTestUtil."

This reverts commit c3e813188eaf103ac8b6460cda3ce231db08b623.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/86a39fa2
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/86a39fa2
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/86a39fa2

Branch: refs/heads/jira/http2
Commit: 86a39fa29f439f149f7fb20f110c87628df8ec2e
Parents: c3e8131
Author: Adrien Grand <jp...@gmail.com>
Authored: Wed Aug 1 15:44:47 2018 +0200
Committer: Adrien Grand <jp...@gmail.com>
Committed: Wed Aug 1 15:44:47 2018 +0200

----------------------------------------------------------------------
 .../src/java/org/apache/lucene/geo/GeoTestUtil.java            | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/86a39fa2/lucene/test-framework/src/java/org/apache/lucene/geo/GeoTestUtil.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/geo/GeoTestUtil.java b/lucene/test-framework/src/java/org/apache/lucene/geo/GeoTestUtil.java
index bb29b0b..8817d20 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/geo/GeoTestUtil.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/geo/GeoTestUtil.java
@@ -245,7 +245,7 @@ public class GeoTestUtil {
       return new double[] { nextLatitudeBetween(polygon.minLat, polygon.maxLat), nextLongitudeBetween(polygon.minLon, polygon.maxLon) };
     } else if (surpriseMe < 20) {
       // target a vertex
-      int vertex = randomInt(polyLats.length - 2);
+      int vertex = randomInt(polyLats.length - 1);
       return new double[] { nextLatitudeNear(polyLats[vertex], polyLats[vertex+1] - polyLats[vertex]), 
                             nextLongitudeNear(polyLons[vertex], polyLons[vertex+1] - polyLons[vertex]) };
     } else if (surpriseMe < 30) {
@@ -253,12 +253,12 @@ public class GeoTestUtil {
       Polygon container = boxPolygon(new Rectangle(polygon.minLat, polygon.maxLat, polygon.minLon, polygon.maxLon));
       double containerLats[] = container.getPolyLats();
       double containerLons[] = container.getPolyLons();
-      int startVertex = randomInt(containerLats.length - 2);
+      int startVertex = randomInt(containerLats.length - 1);
       return nextPointAroundLine(containerLats[startVertex], containerLons[startVertex], 
                                  containerLats[startVertex+1], containerLons[startVertex+1]);
     } else {
       // target points around diagonals between vertices
-      int startVertex = randomInt(polyLats.length - 2);
+      int startVertex = randomInt(polyLats.length - 1);
       // but favor edges heavily
       int endVertex = randomBoolean() ? startVertex + 1 : randomInt(polyLats.length - 1);
       return nextPointAroundLine(polyLats[startVertex], polyLons[startVertex], 


[42/48] lucene-solr:jira/http2: SOLR-12592: added support for range , percentage and decimal

Posted by da...@apache.org.
SOLR-12592: added support for range , percentage and decimal


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/664187f7
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/664187f7
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/664187f7

Branch: refs/heads/jira/http2
Commit: 664187f73af51059c75d51206bf53900433f669c
Parents: f8db5d0
Author: noble <no...@apache.org>
Authored: Sat Aug 4 16:34:23 2018 +1000
Committer: noble <no...@apache.org>
Committed: Sat Aug 4 16:34:23 2018 +1000

----------------------------------------------------------------------
 solr/CHANGES.txt                                |   2 +-
 ...olrcloud-autoscaling-policy-preferences.adoc |   7 +-
 .../client/solrj/cloud/autoscaling/Clause.java  | 178 +------------------
 .../solrj/cloud/autoscaling/ComputedType.java   |  99 +++++++++++
 .../solrj/cloud/autoscaling/Condition.java      | 122 +++++++++++++
 .../solrj/cloud/autoscaling/CoresVariable.java  |  50 ++++--
 .../cloud/autoscaling/FreeDiskVariable.java     |   6 +-
 .../client/solrj/cloud/autoscaling/Operand.java |   6 +-
 .../client/solrj/cloud/autoscaling/Policy.java  |   4 +-
 .../cloud/autoscaling/ReplicaVariable.java      |  24 +--
 .../solrj/cloud/autoscaling/Variable.java       |  31 ++--
 .../solrj/cloud/autoscaling/VariableBase.java   |   8 +-
 .../solrj/cloud/autoscaling/Violation.java      |   4 +-
 .../solrj/cloud/autoscaling/TestPolicy.java     |  63 ++++---
 14 files changed, 347 insertions(+), 257 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/664187f7/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 314d045..3d9c68c 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -138,7 +138,7 @@ New Features
 
 * SOLR-12402: Factor out SolrDefaultStreamFactory class. (Christine Poerschke)
 
-* SOLR-12592: support #EQUAL function in cores in autoscaling policies (noble)
+* SOLR-12592: support #EQUAL function, range operator, decimal and percentage in cores in autoscaling policies (noble)
 
 Bug Fixes
 ----------------------

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/664187f7/solr/solr-ref-guide/src/solrcloud-autoscaling-policy-preferences.adoc
----------------------------------------------------------------------
diff --git a/solr/solr-ref-guide/src/solrcloud-autoscaling-policy-preferences.adoc b/solr/solr-ref-guide/src/solrcloud-autoscaling-policy-preferences.adoc
index c3f8612..9641424 100644
--- a/solr/solr-ref-guide/src/solrcloud-autoscaling-policy-preferences.adoc
+++ b/solr/solr-ref-guide/src/solrcloud-autoscaling-policy-preferences.adoc
@@ -90,7 +90,12 @@ A policy is a hard rule to be satisfied by each node. If a node does not satisfy
 A policy can have the following attributes:
 
 `cores`::
-This is a special attribute that applies to the entire cluster. It can only be used along with the `node` attribute and no other. This attribute is optional.
+This is a special attribute that applies to the entire cluster. It can only be used along with the `node` attribute and no other. The value of this attribute can be
+* a positive integer . e.g : "`3`"
+* a number with a decimal value . e.g: "`1.66`" . This means both 1 and 2 are acceptable values but the system would prefer `2`
+* a number range. Such as `"3-5"` . This means `3,4,5` are acceptable values
+* a percentage value . e.g: `33%` . This is computed to a decimal value at runtime
+* `#EQUAL` : Divide the no:of cores equally among all the nodes or a subset of nodes
 
 `collection`::
 The name of the collection to which the policy rule should apply. If omitted, the rule applies to all collections. This attribute is optional.

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/664187f7/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Clause.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Clause.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Clause.java
index 5fe6894..cd9212b 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Clause.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Clause.java
@@ -251,6 +251,7 @@ public class Clause implements MapWriter, Comparable<Clause> {
   }
 
   Condition parse(String s, Map m) {
+
     Object expectedVal = null;
     ComputedType computedType = null;
     Object val = m.get(s);
@@ -461,183 +462,6 @@ public class Clause implements MapWriter, Comparable<Clause> {
     return false;
   }
 
-  public enum ComputedType {
-    NULL(),
-    EQUAL() {
-      @Override
-      public String wrap(String value) {
-        return "#EQUAL";
-      }
-
-      @Override
-      public String match(String val) {
-        if ("#EQUAL".equals(val)) return "1";
-        return null;
-      }
-
-    },
-    ALL() {
-      @Override
-      public String wrap(String value) {
-        return "#ALL";
-      }
-
-      @Override
-      public String match(String val) {
-        if ("#ALL".equals(val)) return "1";
-        return null;
-      }
-
-    },
-    PERCENT {
-      @Override
-      public String wrap(String value) {
-        return value + "%";
-      }
-
-      @Override
-      public String match(String val) {
-        if (val != null && !val.isEmpty() && val.charAt(val.length() - 1) == '%') {
-          String newVal = val.substring(0, val.length() - 1);
-          double d;
-          try {
-            d = Double.parseDouble(newVal);
-          } catch (NumberFormatException e) {
-            throw new IllegalArgumentException("Invalid percentage value : " + val);
-          }
-          if (d < 0 || d > 100) {
-            throw new IllegalArgumentException("Percentage value must lie between [1 -100] : provided value : " + val);
-          }
-          return newVal;
-        } else {
-          return null;
-        }
-      }
-
-      @Override
-      public Object compute(Object val, Condition c) {
-        if (val == null || Clause.parseDouble(c.name, val) == 0) return 0d;
-        return Clause.parseDouble(c.name, val) * Clause.parseDouble(c.name, c.val).doubleValue() / 100;
-      }
-
-      @Override
-      public String toString() {
-        return "%";
-      }
-    };
-
-    // return null if there is no match. return a modified string
-    // if there is a match
-    public String match(String val) {
-      return null;
-    }
-
-    public String wrap(String value) {
-      return value;
-    }
-
-    public Object compute(Object val, Condition c) {
-      return val;
-    }
-
-  }
-
-  public static class Condition implements MapWriter {
-    final String name;
-    final Object val;
-    final Type varType;
-    final ComputedType computedType;
-    final Operand op;
-    private Clause clause;
-
-    Condition(String name, Object val, Operand op, ComputedType computedType, Clause parent) {
-      this.name = name;
-      this.val = val;
-      this.op = op;
-      varType = VariableBase.getTagType(name);
-      this.computedType = computedType;
-      this.clause = parent;
-    }
-
-    @Override
-    public void writeMap(EntryWriter ew) throws IOException {
-      String value = op.wrap(val);
-      if (computedType != null) value = computedType.wrap(value);
-      ew.put(name, value);
-    }
-
-    @Override
-    public String toString() {
-      return jsonStr();
-    }
-
-    public Clause getClause() {
-      return clause;
-    }
-
-    boolean isPass(Object inputVal) {
-      return isPass(inputVal, null);
-    }
-
-    boolean isPass(Object inputVal, Row row) {
-      if (computedType != null) {
-        throw new IllegalStateException("This is supposed to be called only from a Condition with no computed value or a SealedCondition");
-
-      }
-      if (inputVal instanceof ReplicaCount) inputVal = ((ReplicaCount) inputVal).getVal(getClause().type);
-      return varType.match(inputVal, op, val, name, row);
-    }
-
-
-    boolean isPass(Row row) {
-      return isPass(row.getVal(name), row);
-    }
-
-    @Override
-    public boolean equals(Object that) {
-      if (that instanceof Condition) {
-        Condition c = (Condition) that;
-        return Objects.equals(c.name, name) && Objects.equals(c.val, val) && c.op == op;
-      }
-      return false;
-    }
-
-    public Double delta(Object val) {
-      if (val instanceof ReplicaCount) val = ((ReplicaCount) val).getVal(getClause().type);
-      if (this.val instanceof String) {
-        if (op == LESS_THAN || op == GREATER_THAN) {
-          return op
-              .opposite(getClause().isReplicaZero() && this == getClause().tag)
-              .delta(Clause.parseDouble(name, this.val), Clause.parseDouble(name, val));
-        } else {
-          return 0d;
-        }
-      } else {
-        if (this == getClause().getReplica()) {
-          Double delta = op.delta(this.val, val);
-          return getClause().isReplicaZero() ? -1 * delta : delta;
-        } else {
-          return op
-              .opposite(getClause().isReplicaZero() && this == getClause().getTag())
-              .delta(this.val, val);
-        }
-
-      }
-    }
-
-    public String getName() {
-      return name;
-    }
-
-    public Object getValue() {
-      return val;
-    }
-
-    public Operand getOperand() {
-      return op;
-    }
-  }
-
   public boolean isStrict() {
     return strict;
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/664187f7/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/ComputedType.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/ComputedType.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/ComputedType.java
new file mode 100644
index 0000000..73bca1f
--- /dev/null
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/ComputedType.java
@@ -0,0 +1,99 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.client.solrj.cloud.autoscaling;
+
+public enum ComputedType {
+  NULL(),
+  EQUAL() {
+    @Override
+    public String wrap(String value) {
+      return "#EQUAL";
+    }
+
+    @Override
+    public String match(String val) {
+      if ("#EQUAL".equals(val)) return "1";
+      return null;
+    }
+
+  },
+  ALL() {
+    @Override
+    public String wrap(String value) {
+      return "#ALL";
+    }
+
+    @Override
+    public String match(String val) {
+      if ("#ALL".equals(val)) return "1";
+      return null;
+    }
+
+  },
+  PERCENT {
+    @Override
+    public String wrap(String value) {
+      return value + "%";
+    }
+
+    @Override
+    public String match(String val) {
+      if (val != null && !val.isEmpty() && val.charAt(val.length() - 1) == '%') {
+        String newVal = val.substring(0, val.length() - 1);
+        double d;
+        try {
+          d = Double.parseDouble(newVal);
+        } catch (NumberFormatException e) {
+          throw new IllegalArgumentException("Invalid percentage value : " + val);
+        }
+        if (d < 0 || d > 100) {
+          throw new IllegalArgumentException("Percentage value must lie between [1 -100] : provided value : " + val);
+        }
+        return newVal;
+      } else {
+        return null;
+      }
+    }
+
+    @Override
+    public Object compute(Object val, Condition c) {
+      if (val == null || Clause.parseDouble(c.name, val) == 0) return 0d;
+      return Clause.parseDouble(c.name, val) * Clause.parseDouble(c.name, c.val).doubleValue() / 100;
+    }
+
+    @Override
+    public String toString() {
+      return "%";
+    }
+  };
+
+  // return null if there is no match. return a modified string
+  // if there is a match
+  public String match(String val) {
+    return null;
+  }
+
+  public String wrap(String value) {
+    return value;
+  }
+
+  public Object compute(Object val, Condition c) {
+    return val;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/664187f7/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Condition.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Condition.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Condition.java
new file mode 100644
index 0000000..3a58804
--- /dev/null
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Condition.java
@@ -0,0 +1,122 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.client.solrj.cloud.autoscaling;
+
+import java.io.IOException;
+import java.util.Objects;
+
+import org.apache.solr.common.MapWriter;
+
+import static org.apache.solr.client.solrj.cloud.autoscaling.Operand.GREATER_THAN;
+import static org.apache.solr.client.solrj.cloud.autoscaling.Operand.LESS_THAN;
+
+public class Condition implements MapWriter {
+  final String name;
+  final Object val;
+  final Variable.Type varType;
+  final ComputedType computedType;
+  final Operand op;
+  Clause clause;
+
+  Condition(String name, Object val, Operand op, ComputedType computedType, Clause parent) {
+    this.name = name;
+    this.val = val;
+    this.op = op;
+    varType = VariableBase.getTagType(name);
+    this.computedType = computedType;
+    this.clause = parent;
+  }
+
+  @Override
+  public void writeMap(EntryWriter ew) throws IOException {
+    String value = op.wrap(val);
+    if (computedType != null) value = computedType.wrap(value);
+    ew.put(name, value);
+  }
+
+  @Override
+  public String toString() {
+    return jsonStr();
+  }
+
+  public Clause getClause() {
+    return clause;
+  }
+
+  boolean isPass(Object inputVal) {
+    return isPass(inputVal, null);
+  }
+
+  boolean isPass(Object inputVal, Row row) {
+    if (computedType != null) {
+      throw new IllegalStateException("This is supposed to be called only from a Condition with no computed value or a SealedCondition");
+
+    }
+    if (inputVal instanceof ReplicaCount) inputVal = ((ReplicaCount) inputVal).getVal(getClause().type);
+    return varType.match(inputVal, op, val, name, row);
+  }
+
+
+  boolean isPass(Row row) {
+    return isPass(row.getVal(name), row);
+  }
+
+  @Override
+  public boolean equals(Object that) {
+    if (that instanceof Condition) {
+      Condition c = (Condition) that;
+      return Objects.equals(c.name, name) && Objects.equals(c.val, val) && c.op == op;
+    }
+    return false;
+  }
+
+  public Double delta(Object val) {
+    if (val instanceof ReplicaCount) val = ((ReplicaCount) val).getVal(getClause().type);
+    if (this.val instanceof String) {
+      if (op == LESS_THAN || op == GREATER_THAN) {
+        return op
+            .opposite(getClause().isReplicaZero() && this == getClause().tag)
+            .delta(Clause.parseDouble(name, this.val), Clause.parseDouble(name, val));
+      } else {
+        return 0d;
+      }
+    } else {
+      if (this == getClause().getReplica()) {
+        Double delta = op.delta(this.val, val);
+        return getClause().isReplicaZero() ? -1 * delta : delta;
+      } else {
+        return op
+            .opposite(getClause().isReplicaZero() && this == getClause().getTag())
+            .delta(this.val, val);
+      }
+
+    }
+  }
+
+  public String getName() {
+    return name;
+  }
+
+  public Object getValue() {
+    return val;
+  }
+
+  public Operand getOperand() {
+    return op;
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/664187f7/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/CoresVariable.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/CoresVariable.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/CoresVariable.java
index 45f9eb7..4df394b 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/CoresVariable.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/CoresVariable.java
@@ -17,6 +17,8 @@
 
 package org.apache.solr.client.solrj.cloud.autoscaling;
 
+import java.util.Collection;
+import java.util.concurrent.atomic.AtomicInteger;
 import java.util.function.Consumer;
 
 import static org.apache.solr.common.params.CollectionParams.CollectionAction.MOVEREPLICA;
@@ -67,33 +69,47 @@ public class CoresVariable extends VariableBase {
   }
 
   @Override
-  public Object computeValue(Policy.Session session, Clause.Condition condition, String collection, String shard, String node) {
-    if (condition.computedType == Clause.ComputedType.EQUAL) {
-      int[] coresCount = new int[1];
-      int[] liveNodes = new int[1];
-      for (Row row : session.matrix) {
-        if (!row.isLive) continue;
-        liveNodes[0]++;
-        row.forEachReplica(replicaInfo -> coresCount[0]++);
-      }
-      return liveNodes[0] == 0 || coresCount[0] == 0 ? 0d : (double) coresCount[0] / (double) liveNodes[0];
+  public Object computeValue(Policy.Session session, Condition condition, String collection, String shard, String node) {
+    if (condition.computedType == ComputedType.EQUAL) {
+      AtomicInteger liveNodes = new AtomicInteger(0);
+      int coresCount = getTotalCores(session, liveNodes);
+      int numBuckets = condition.clause.tag.op == Operand.IN ?
+          ((Collection) condition.clause.tag.val).size() :
+          liveNodes.get();
+      return numBuckets == 0 || coresCount == 0 ? 0d : (double) coresCount / (double) numBuckets;
+    } else if (condition.computedType == ComputedType.PERCENT) {
+      return ComputedType.PERCENT.compute(getTotalCores(session, new AtomicInteger()), condition);
     } else {
       throw new IllegalArgumentException("Invalid computed type in " + condition);
     }
   }
 
+  private int getTotalCores(Policy.Session session, AtomicInteger liveNodes) {
+    int[] coresCount = new int[1];
+    for (Row row : session.matrix) {
+      if (!row.isLive) continue;
+      liveNodes.incrementAndGet();
+      row.forEachReplica(replicaInfo -> coresCount[0]++);
+    }
+    return coresCount[0];
+  }
+
   @Override
-  public String postValidate(Clause.Condition condition) {
-    Clause.Condition nodeTag = condition.getClause().getTag();
-    if (nodeTag.name.equals("node") && nodeTag.op == Operand.WILDCARD) {
-      return null;
-    } else {
-      throw new IllegalArgumentException("cores: '#EQUAL' can be used only with node: '#ANY'");
+  public String postValidate(Condition condition) {
+    Condition nodeTag = condition.getClause().getTag();
+    if (nodeTag.varType != Type.NODE) return "'cores' attribute can only be used with 'node' attribute";
+    if (condition.computedType == ComputedType.EQUAL) {
+      if (nodeTag.name.equals("node") && (nodeTag.op == Operand.WILDCARD || nodeTag.op == Operand.IN)) {
+        return null;
+      } else {
+        return "cores: '#EQUAL' can be used only with node: '#ANY', node :[....]";
+      }
     }
+    return null;
   }
 
   @Override
-  public Operand getOperand(Operand expected, Object strVal, Clause.ComputedType computedType) {
+  public Operand getOperand(Operand expected, Object strVal, ComputedType computedType) {
     return ReplicaVariable.checkForRangeOperand(expected, strVal, computedType);
   }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/664187f7/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/FreeDiskVariable.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/FreeDiskVariable.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/FreeDiskVariable.java
index b598207..600a708 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/FreeDiskVariable.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/FreeDiskVariable.java
@@ -46,11 +46,11 @@ public class FreeDiskVariable extends VariableBase {
   }
 
   @Override
-  public Object computeValue(Policy.Session session, Clause.Condition condition, String collection, String shard, String node) {
-    if (condition.computedType == Clause.ComputedType.PERCENT) {
+  public Object computeValue(Policy.Session session, Condition condition, String collection, String shard, String node) {
+    if (condition.computedType == ComputedType.PERCENT) {
       Row r = session.getNode(node);
       if (r == null) return 0d;
-      return Clause.ComputedType.PERCENT.compute(r.getVal(TOTALDISK.tagName), condition);
+      return ComputedType.PERCENT.compute(r.getVal(TOTALDISK.tagName), condition);
     }
     throw new IllegalArgumentException("Unsupported type " + condition.computedType);
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/664187f7/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Operand.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Operand.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Operand.java
index 3e2368c..d4835b8 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Operand.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Operand.java
@@ -49,7 +49,7 @@ public enum Operand {
     }
 
     @Override
-    public Object readRuleValue(Clause.Condition condition) {
+    public Object readRuleValue(Condition condition) {
       if (condition.val instanceof String) {
         String strVal = ((String) condition.val).trim();
         int hyphenIdx = strVal.indexOf('-');
@@ -91,7 +91,7 @@ public enum Operand {
     }
 
     @Override
-    public Object readRuleValue(Clause.Condition condition) {
+    public Object readRuleValue(Condition condition) {
       return RANGE_EQUAL.readRuleValue(condition);
     }
 
@@ -202,7 +202,7 @@ public enum Operand {
     return operand + val.toString();
   }
 
-  public Object readRuleValue(Clause.Condition condition) {
+  public Object readRuleValue(Condition condition) {
     return condition.val;
   }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/664187f7/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Policy.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Policy.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Policy.java
index 09aa244..b398caf 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Policy.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Policy.java
@@ -514,8 +514,8 @@ public class Policy implements MapWriter {
           Map<String, String> withCollMap = (Map<String, String>) vals.get("withCollection");
           if (!withCollMap.isEmpty()) {
             Clause withCollClause = new Clause((Map<String,Object>)Utils.fromJSONString("{withCollection:'*' , node: '#ANY'}") ,
-                new Clause.Condition(NODE.tagName, "#ANY", Operand.EQUAL, null, null),
-                new Clause.Condition(WITH_COLLECTION.tagName,"*" , Operand.EQUAL, null, null), true
+                new Condition(NODE.tagName, "#ANY", Operand.EQUAL, null, null),
+                new Condition(WITH_COLLECTION.tagName,"*" , Operand.EQUAL, null, null), true
             );
             expandedClauses.add(withCollClause);
           }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/664187f7/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/ReplicaVariable.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/ReplicaVariable.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/ReplicaVariable.java
index ab0a03a..6239ec3 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/ReplicaVariable.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/ReplicaVariable.java
@@ -30,7 +30,7 @@ class ReplicaVariable extends VariableBase {
     super(type);
   }
 
-  static int getRelevantReplicasCount(Policy.Session session, Clause.Condition cv, String collection, String shard) {
+  static int getRelevantReplicasCount(Policy.Session session, Condition cv, String collection, String shard) {
     AtomicInteger totalReplicasOfInterest = new AtomicInteger(0);
     Clause clause = cv.getClause();
     for (Row row : session.matrix) {
@@ -50,12 +50,12 @@ class ReplicaVariable extends VariableBase {
 
 
   @Override
-  public Operand getOperand(Operand expected, Object strVal, Clause.ComputedType computedType) {
-    if (computedType == Clause.ComputedType.ALL) return expected;
+  public Operand getOperand(Operand expected, Object strVal, ComputedType computedType) {
+    if (computedType == ComputedType.ALL) return expected;
     return checkForRangeOperand(expected, strVal, computedType);
   }
 
-  static Operand checkForRangeOperand(Operand expected, Object strVal, Clause.ComputedType computedType) {
+  static Operand checkForRangeOperand(Operand expected, Object strVal, ComputedType computedType) {
     if (strVal instanceof String) {
       String s = ((String) strVal).trim();
       int hyphenIdx = s.indexOf('-');
@@ -79,8 +79,8 @@ class ReplicaVariable extends VariableBase {
   }
 
   @Override
-  public String postValidate(Clause.Condition condition) {
-    if (condition.computedType == Clause.ComputedType.EQUAL) {
+  public String postValidate(Condition condition) {
+    if (condition.computedType == ComputedType.EQUAL) {
       if (condition.getClause().tag != null &&
 //              condition.getClause().tag.varType == NODE &&
           (condition.getClause().tag.op == Operand.WILDCARD || condition.getClause().tag.op == Operand.IN)) {
@@ -89,7 +89,7 @@ class ReplicaVariable extends VariableBase {
         return "'replica': '#EQUAL` must be used with 'node':'#ANY'";
       }
     }
-    if (condition.computedType == Clause.ComputedType.ALL) {
+    if (condition.computedType == ComputedType.ALL) {
       if (condition.getClause().tag != null && (condition.getClause().getTag().op == Operand.IN ||
           condition.getClause().getTag().op == Operand.WILDCARD)) {
         return StrUtils.formatString("array value or wild card cannot be used for tag {0} with replica : '#ALL'",
@@ -100,16 +100,16 @@ class ReplicaVariable extends VariableBase {
   }
 
   @Override
-  public Object computeValue(Policy.Session session, Clause.Condition cv, String collection, String shard, String node) {
-    if (cv.computedType == Clause.ComputedType.ALL)
+  public Object computeValue(Policy.Session session, Condition cv, String collection, String shard, String node) {
+    if (cv.computedType == ComputedType.ALL)
       return Double.valueOf(getRelevantReplicasCount(session, cv, collection, shard));
-    if (cv.computedType == Clause.ComputedType.EQUAL) {
+    if (cv.computedType == ComputedType.EQUAL) {
       int relevantReplicasCount = getRelevantReplicasCount(session, cv, collection, shard);
       double bucketsCount = getNumBuckets(session, cv.getClause());
       if (relevantReplicasCount == 0 || bucketsCount == 0) return 0;
       return (double) relevantReplicasCount / bucketsCount;
-    } else if (cv.computedType == Clause.ComputedType.PERCENT) {
-      return Clause.ComputedType.PERCENT.compute(getRelevantReplicasCount(session, cv, collection, shard), cv);
+    } else if (cv.computedType == ComputedType.PERCENT) {
+      return ComputedType.PERCENT.compute(getRelevantReplicasCount(session, cv, collection, shard), cv);
     } else {
       throw new IllegalArgumentException("Unsupported type " + cv.computedType);
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/664187f7/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Variable.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Variable.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Variable.java
index c3d8ca2..d817c2f 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Variable.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Variable.java
@@ -42,6 +42,7 @@ public interface Variable {
   default boolean match(Object inputVal, Operand op, Object val, String name, Row row) {
     return op.match(val, validate(name, inputVal, false)) == Clause.TestStatus.PASS;
   }
+
   default Object convertVal(Object val) {
     return val;
   }
@@ -56,9 +57,9 @@ public interface Variable {
     }
   }
 
-  void getSuggestions(Suggestion.Ctx ctx) ;
+  void getSuggestions(Suggestion.Ctx ctx);
 
-  default Object computeValue(Policy.Session session, Clause.Condition condition, String collection, String shard, String node) {
+  default Object computeValue(Policy.Session session, Condition condition, String collection, String shard, String node) {
     return condition.val;
   }
 
@@ -67,11 +68,11 @@ public interface Variable {
   default void projectRemoveReplica(Cell cell, ReplicaInfo ri, Consumer<Row.OperationInfo> opCollector) {
   }
 
-  default String postValidate(Clause.Condition condition) {
+  default String postValidate(Condition condition) {
     return null;
   }
 
-  default Operand getOperand(Operand expected, Object strVal, Clause.ComputedType computedType) {
+  default Operand getOperand(Operand expected, Object strVal, ComputedType computedType) {
     return expected;
   }
 
@@ -97,7 +98,7 @@ public interface Variable {
         type = Double.class,
         min = 0, max = -1,
         implementation = ReplicaVariable.class,
-        computedValues = {Clause.ComputedType.EQUAL, Clause.ComputedType.PERCENT, Clause.ComputedType.ALL})
+        computedValues = {ComputedType.EQUAL, ComputedType.PERCENT, ComputedType.ALL})
     REPLICA,
     @Meta(name = ImplicitSnitch.PORT,
         type = Long.class,
@@ -142,7 +143,7 @@ public interface Variable {
         associatedPerReplicaValue = Variable.coreidxsize,
         associatedPerNodeValue = "totaldisk",
         implementation = FreeDiskVariable.class,
-        computedValues = Clause.ComputedType.PERCENT)
+        computedValues = ComputedType.PERCENT)
     FREEDISK,
 
     @Meta(name = "totaldisk",
@@ -166,7 +167,7 @@ public interface Variable {
     @Meta(name = ImplicitSnitch.CORES,
         type = Double.class,
         min = 0, max = -1,
-        computedValues = Clause.ComputedType.EQUAL,
+        computedValues = {ComputedType.EQUAL, ComputedType.PERCENT},
         implementation = CoresVariable.class)
     CORES,
 
@@ -225,7 +226,7 @@ public interface Variable {
     public final String perReplicaValue;
     public final Set<String> associatedPerNodeValues;
     public final String metricsAttribute;
-    public final Set<Clause.ComputedType> supportedComputedTypes;
+    public final Set<ComputedType> supportedComputedTypes;
     final Variable impl;
 
 
@@ -238,7 +239,7 @@ public interface Variable {
       } catch (NoSuchFieldException e) {
         //cannot happen
       }
-      impl= VariableBase.loadImpl(meta, this);
+      impl = VariableBase.loadImpl(meta, this);
 
       this.tagName = meta.name();
       this.type = meta.type();
@@ -250,7 +251,7 @@ public interface Variable {
       this.associatedPerNodeValues = readSet(meta.associatedPerNodeValue());
       this.additive = meta.isAdditive();
       this.metricsAttribute = readStr(meta.metricsKey());
-      this.supportedComputedTypes = meta.computedValues()[0] == Clause.ComputedType.NULL ?
+      this.supportedComputedTypes = meta.computedValues()[0] == ComputedType.NULL ?
           emptySet() :
           unmodifiableSet(new HashSet(Arrays.asList(meta.computedValues())));
       this.wildCards = readSet(meta.wildCards());
@@ -282,10 +283,10 @@ public interface Variable {
 
     @Override
     public void addViolatingReplicas(Violation.Ctx ctx) {
-        impl.addViolatingReplicas(ctx);
+      impl.addViolatingReplicas(ctx);
     }
 
-    public Operand getOperand(Operand expected, Object val, Clause.ComputedType computedType) {
+    public Operand getOperand(Operand expected, Object val, ComputedType computedType) {
       return impl.getOperand(expected, val, computedType);
     }
 
@@ -294,7 +295,7 @@ public interface Variable {
       return impl.convertVal(val);
     }
 
-    public String postValidate(Clause.Condition condition) {
+    public String postValidate(Condition condition) {
       return impl.postValidate(condition);
     }
 
@@ -319,7 +320,7 @@ public interface Variable {
     }
 
     @Override
-    public Object computeValue(Policy.Session session, Clause.Condition condition, String collection, String shard, String node) {
+    public Object computeValue(Policy.Session session, Condition condition, String collection, String shard, String node) {
       return impl.computeValue(session, condition, collection, shard, node);
     }
 
@@ -360,6 +361,6 @@ public interface Variable {
 
     Class implementation() default void.class;
 
-    Clause.ComputedType[] computedValues() default Clause.ComputedType.NULL;
+    ComputedType[] computedValues() default ComputedType.NULL;
   }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/664187f7/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/VariableBase.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/VariableBase.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/VariableBase.java
index ad2b43b..8b3f1e1 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/VariableBase.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/VariableBase.java
@@ -40,8 +40,8 @@ public class VariableBase implements Variable {
   }
 
   static Object getOperandAdjustedValue(Object val, Object original) {
-    if (original instanceof Clause.Condition) {
-      Clause.Condition condition = (Clause.Condition) original;
+    if (original instanceof Condition) {
+      Condition condition = (Condition) original;
       if (condition.computedType == null && isIntegerEquivalent(val)) {
         if (condition.op == Operand.LESS_THAN) {
           //replica : '<3'
@@ -102,8 +102,8 @@ public class VariableBase implements Variable {
 
   @Override
   public Object validate(String name, Object val, boolean isRuleVal) {
-    if (val instanceof Clause.Condition) {
-      Clause.Condition condition = (Clause.Condition) val;
+    if (val instanceof Condition) {
+      Condition condition = (Condition) val;
       val = condition.op.readRuleValue(condition);
       if (val != condition.val) return val;
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/664187f7/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Violation.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Violation.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Violation.java
index 2f81291..39b3c0b 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Violation.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Violation.java
@@ -170,7 +170,7 @@ public class Violation implements MapWriter {
   }
 
   static class Ctx {
-    final Function<Clause.Condition, Object> evaluator;
+    final Function<Condition, Object> evaluator;
     String tagKey;
     Clause clause;
     ReplicaCount count;
@@ -178,7 +178,7 @@ public class Violation implements MapWriter {
     List<Row> allRows;
     List<Violation> allViolations = new ArrayList<>();
 
-    public Ctx(Clause clause, List<Row> allRows, Function<Clause.Condition, Object> evaluator) {
+    public Ctx(Clause clause, List<Row> allRows, Function<Condition, Object> evaluator) {
       this.allRows = allRows;
       this.clause = clause;
       this.evaluator = evaluator;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/664187f7/solr/solrj/src/test/org/apache/solr/client/solrj/cloud/autoscaling/TestPolicy.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/cloud/autoscaling/TestPolicy.java b/solr/solrj/src/test/org/apache/solr/client/solrj/cloud/autoscaling/TestPolicy.java
index 48d8f2e..3bd58aa 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/cloud/autoscaling/TestPolicy.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/cloud/autoscaling/TestPolicy.java
@@ -567,7 +567,7 @@ public class TestPolicy extends SolrTestCaseJ4 {
     for (Suggester.SuggestionInfo suggestionInfo : l) {
       Map s = suggestionInfo.toMap(new LinkedHashMap<>());
       assertEquals("POST", Utils.getObjectByPath(s, true, "operation/method"));
-      if (Utils.getObjectByPath(s, false, "operation/command/add-replica") != null)  {
+      if (Utils.getObjectByPath(s, false, "operation/command/add-replica") != null) {
         numAdds++;
         assertEquals(1.0d, Utils.getObjectByPath(s, true, "violation/violation/delta"));
         assertEquals("/c/articles_coll/shards", Utils.getObjectByPath(s, true, "operation/path"));
@@ -577,7 +577,7 @@ public class TestPolicy extends SolrTestCaseJ4 {
         assertEquals("/c/articles_coll", Utils.getObjectByPath(s, true, "operation/path"));
         targetNodes.add((String) Utils.getObjectByPath(s, true, "operation/command/move-replica/targetNode"));
         movedReplicas.add((String) Utils.getObjectByPath(s, true, "operation/command/move-replica/replica"));
-      } else  {
+      } else {
         fail("Unexpected operation type suggested for suggestion: " + suggestionInfo);
       }
     }
@@ -712,7 +712,7 @@ public class TestPolicy extends SolrTestCaseJ4 {
     suggester.hint(Hint.SRC_NODE, "node1");
     SolrRequest op = suggester.getSuggestion();
     assertNotNull(op);
-    assertEquals("node2 should have been selected by move replica","node2",
+    assertEquals("node2 should have been selected by move replica", "node2",
         op.getParams().get("targetNode"));
 
     session = suggester.getSession();
@@ -801,13 +801,13 @@ public class TestPolicy extends SolrTestCaseJ4 {
       }
       throw new RuntimeException("");
     });
-    assertTrue( clause.getReplica().isPass(2));
+    assertTrue(clause.getReplica().isPass(2));
 
     clause = Clause.create("{replica: '3 - 5', node:'#ANY'}");
-    assertEquals(Operand.RANGE_EQUAL,  clause.getReplica().getOperand());
+    assertEquals(Operand.RANGE_EQUAL, clause.getReplica().getOperand());
     RangeVal range = (RangeVal) clause.getReplica().getValue();
-    assertEquals(3.0 , range.min);
-    assertEquals(5.0 , range.max);
+    assertEquals(3.0, range.min);
+    assertEquals(5.0, range.max);
     assertTrue(clause.replica.isPass(3));
     assertTrue(clause.replica.isPass(4));
     assertTrue(clause.replica.isPass(5));
@@ -832,7 +832,7 @@ public class TestPolicy extends SolrTestCaseJ4 {
     expectThrows(IllegalArgumentException.class,
         () -> Clause.create("{replica: '#EQUAL', node:'node_1'}"));
     clause = Clause.create("{replica : 0, freedisk:'<20%'}");
-    assertEquals(clause.tag.computedType, Clause.ComputedType.PERCENT);
+    assertEquals(clause.tag.computedType, ComputedType.PERCENT);
     assertEquals(clause.tag.op, Operand.LESS_THAN);
     expectThrows(IllegalArgumentException.class,
         () -> Clause.create("{replica : 0, INDEX.sizeInGB:'>300'}"));
@@ -864,16 +864,39 @@ public class TestPolicy extends SolrTestCaseJ4 {
     expectThrows(IllegalArgumentException.class,
         () -> Clause.create("{replica: '#ALL' , shard: '#EACH' , sysprop.zone:'#EACH'}"));
     clause = Clause.create("{replica: '#EQUAL' , shard: '#EACH' , sysprop.zone:[east, west]}");
-    assertEquals(Clause.ComputedType.EQUAL, clause.replica.computedType);
+    assertEquals(ComputedType.EQUAL, clause.replica.computedType);
     assertEquals(Operand.IN, clause.tag.op);
     expectThrows(IllegalArgumentException.class,
         () -> Clause.create("{replica: '#EQUAL' , shard: '#EACH' , sysprop.zone:[east]}"));
 
     clause = Clause.create("{cores: '#EQUAL' , node:'#ANY'}");
-    assertEquals(Clause.ComputedType.EQUAL, clause.globalTag.computedType);
+    assertEquals(ComputedType.EQUAL, clause.globalTag.computedType);
     expectThrows(IllegalArgumentException.class,
         () -> Clause.create("{cores: '#EQUAL' , node:'node1'}"));
 
+    clause = Clause.create("{cores: '#EQUAL' , node:[node1 , node2 , node3]}");
+    assertEquals(Operand.IN, clause.getTag().op);
+    assertEquals(ComputedType.EQUAL, clause.getGlobalTag().computedType);
+
+    clause = Clause.create("{cores: '3-5' , node:'#ANY'}");
+    assertEquals(Operand.RANGE_EQUAL, clause.globalTag.op);
+    assertEquals(3.0d, ((RangeVal) clause.globalTag.val).min.doubleValue(), 0.001);
+    assertEquals(5.0d, ((RangeVal) clause.globalTag.val).max.doubleValue(), 0.001);
+
+    clause = Clause.create("{cores: 1.66 , node:'#ANY'}");
+    assertEquals(Operand.RANGE_EQUAL, clause.globalTag.op);
+    assertEquals(1.0d, ((RangeVal) clause.globalTag.val).min.doubleValue(), 0.001);
+    assertEquals(2.0d, ((RangeVal) clause.globalTag.val).max.doubleValue(), 0.001);
+    assertEquals(1.66d, ((RangeVal) clause.globalTag.val).actual.doubleValue(), 0.001);
+
+    expectThrows(IllegalArgumentException.class,
+        () -> Clause.create("{cores:5, sysprop.zone : west}"));
+
+    clause = Clause.create("{cores: '14%' , node:'#ANY'}");
+    assertEquals(ComputedType.PERCENT, clause.getGlobalTag().computedType);
+
+    clause = Clause.create("{cores: '14%' , node:[node1, node2, node3]}");
+    assertEquals(Operand.IN, clause.getTag().op);
   }
 
 
@@ -1018,7 +1041,7 @@ public class TestPolicy extends SolrTestCaseJ4 {
       } else if (violation.node.equals("node5")) {
         assertEquals(-1, violation.replicaCountDelta.doubleValue(), 0.01);
 
-      } else{
+      } else {
         fail();
       }
     }
@@ -1026,7 +1049,6 @@ public class TestPolicy extends SolrTestCaseJ4 {
 //    assertEquals("node1", violation.node);
 
 
-
   }
 
   private static void expectError(String name, Object val, String msg) {
@@ -2358,10 +2380,10 @@ public class TestPolicy extends SolrTestCaseJ4 {
     List<Violation> violations = session.getViolations();
     assertEquals(2, violations.size());
     for (Violation violation : violations) {
-      if(violation.node.equals("10.0.0.6:8983_solr")){
+      if (violation.node.equals("10.0.0.6:8983_solr")) {
         assertEquals(1.0d, violation.replicaCountDelta, 0.01);
         assertEquals(1.53d, ((RangeVal) violation.getClause().getReplica().val).actual);
-      } else if(violation.node.equals("10.0.0.6:7574_solr")){
+      } else if (violation.node.equals("10.0.0.6:7574_solr")) {
         assertEquals(-1.0d, violation.replicaCountDelta, 0.01);
       }
 
@@ -2486,7 +2508,6 @@ public class TestPolicy extends SolrTestCaseJ4 {
   }
 
 
-
   public void testFreeDiskSuggestions() {
     String dataproviderdata = "{" +
         "  liveNodes:[node1,node2]," +
@@ -3082,9 +3103,10 @@ public class TestPolicy extends SolrTestCaseJ4 {
       suggester = createSuggester(cloudManager, jsonObj, suggester);
     }
 
-    assertEquals("count = "+count ,0,count);
+    assertEquals("count = " + count, 0, count);
   }
-public void testUtilizeNodeFailure2() throws Exception {
+
+  public void testUtilizeNodeFailure2() throws Exception {
     String state = "{  'liveNodes':[" +
         "  '127.0.0.1:51075_solr'," +
         "  '127.0.0.1:51076_solr'," +
@@ -3166,7 +3188,7 @@ public void testUtilizeNodeFailure2() throws Exception {
       suggester = createSuggester(cloudManager, jsonObj, suggester);
     }
 
-    assertEquals("count = "+count ,1,count);
+    assertEquals("count = " + count, 1, count);
   }
 
   //SOLR-12358
@@ -3476,6 +3498,7 @@ public void testUtilizeNodeFailure2() throws Exception {
 
 
   }
+
   public void testViolationOutput() throws IOException {
     String autoScalingjson = "{" +
         "  'cluster-preferences': [" +
@@ -3534,7 +3557,7 @@ public void testUtilizeNodeFailure2() throws Exception {
     new SolrJSONWriter(writer)
         .writeObj(val)
         .close();
-    JSONWriter.write (writer, true, JsonTextWriter.JSON_NL_MAP, val);
+    JSONWriter.write(writer, true, JsonTextWriter.JSON_NL_MAP, val);
 
     Object root = Utils.fromJSONString(writer.toString());
     assertEquals(2l,
@@ -3547,7 +3570,7 @@ public void testUtilizeNodeFailure2() throws Exception {
   }
 
 
-  public void testFreediskPercentage(){
+  public void testFreediskPercentage() {
     String dataproviderdata = "{" +
         "  'liveNodes': [" +
         "    'node1:8983'," +


[16/48] lucene-solr:jira/http2: LUCENE-8060: IndexSearcher's search and searchAfter methods now only compute total hit counts accurately up to 1, 000.

Posted by da...@apache.org.
LUCENE-8060: IndexSearcher's search and searchAfter methods now only compute total hit counts accurately up to 1,000.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/99dbe936
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/99dbe936
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/99dbe936

Branch: refs/heads/jira/http2
Commit: 99dbe936818add5723f2014a90bd0ea8a17c8f19
Parents: 0dc124a
Author: Adrien Grand <jp...@gmail.com>
Authored: Wed Aug 1 09:00:40 2018 +0200
Committer: Adrien Grand <jp...@gmail.com>
Committed: Wed Aug 1 09:01:21 2018 +0200

----------------------------------------------------------------------
 lucene/CHANGES.txt                              |  4 ++++
 lucene/MIGRATE.txt                              |  8 ++++---
 .../org/apache/lucene/search/IndexSearcher.java | 23 +++++++++++++++++---
 .../org/apache/lucene/search/TestBoolean2.java  |  5 ++---
 .../apache/lucene/search/TestBooleanScorer.java |  2 +-
 .../apache/lucene/search/TestLRUQueryCache.java |  8 ++++++-
 .../apache/lucene/search/TestNeedsScores.java   |  2 +-
 .../lucene/search/TestShardSearching.java       |  2 +-
 .../apache/lucene/search/TestTopDocsMerge.java  |  2 +-
 .../search/join/ToParentBlockJoinQuery.java     |  8 +------
 .../java/org/apache/lucene/util/TestUtil.java   | 18 ++++++++++++---
 11 files changed, 58 insertions(+), 24 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/99dbe936/lucene/CHANGES.txt
----------------------------------------------------------------------
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index 0f13dd3..76815f5 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -96,6 +96,10 @@ Changes in Runtime Behavior
 * LUCENE-7444: StandardAnalyzer no longer defaults to removing English stopwords
   (Alan Woodward)
 
+* LUCENE-8060: IndexSearcher's search and searchAfter methods now only compute
+  total hit counts accurately up to 1,000 in order to enable top-hits
+  optimizations such as block-max WAND (LUCENE-8135). (Adrien Grand)
+
 Improvements
 
 * LUCENE-7997: Add BaseSimilarityTestCase to sanity check similarities.

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/99dbe936/lucene/MIGRATE.txt
----------------------------------------------------------------------
diff --git a/lucene/MIGRATE.txt b/lucene/MIGRATE.txt
index fc0930c..7dfe5c7 100644
--- a/lucene/MIGRATE.txt
+++ b/lucene/MIGRATE.txt
@@ -90,10 +90,12 @@ request in order to only compute scores for documents that made it to the top
 hits. As a consequence, the trackDocScores option has been removed and can be
 replaced with the new TopFieldCollector#populateScores helper method.
 
-## TopDocs.totalHits is no longer a long ##
+## IndexSearcher.search(After) may return lower bounds of the hit count and TopDocs.totalHits is no longer a long ##
 
 Lucene 8 received optimizations for collection of top-k matches by not visiting
 all matches. However these optimizations won't help if all matches still need
 to be visited in order to compute the total number of hits. As a consequence,
-TopDocs.totalHits is now an TotalHits object that is either an exact hit count
-or a lower bound of the hit count.
+IndexSearcher's search and searchAfter methods were changed to only count hits
+accurately up to 1,000, and Topdocs.totalHits was changed from a long to an
+object that says whether the hit count is accurate or a lower bound of the
+actual hit count.

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/99dbe936/lucene/core/src/java/org/apache/lucene/search/IndexSearcher.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/IndexSearcher.java b/lucene/core/src/java/org/apache/lucene/search/IndexSearcher.java
index d2e5d01..689409f 100644
--- a/lucene/core/src/java/org/apache/lucene/search/IndexSearcher.java
+++ b/lucene/core/src/java/org/apache/lucene/search/IndexSearcher.java
@@ -63,7 +63,19 @@ import org.apache.lucene.util.ThreadInterruptedException;
  * reader ({@link DirectoryReader#open(IndexWriter)}).
  * Once you have a new {@link IndexReader}, it's relatively
  * cheap to create a new IndexSearcher from it.
- * 
+ *
+ * <p><b>NOTE</b>: The {@link #search} and {@link #searchAfter} methods are
+ * configured to only count top hits accurately up to {@code 1,000} and may
+ * return a {@link TotalHits.Relation lower bound} of the hit count if the
+ * hit count is greater than or equal to {@code 1,000}. On queries that match
+ * lots of documents, counting the number of hits may take much longer than
+ * computing the top hits so this trade-off allows to get some minimal
+ * information about the hit count without slowing down search too much. The
+ * {@link TopDocs#scoreDocs} array is always accurate however. If this behavior
+ * doesn't suit your needs, you should create collectors manually with either
+ * {@link TopScoreDocCollector#create} or {@link TopFieldCollector#create} and
+ * call {@link #search(Query, Collector)}.
+ *
  * <a name="thread-safety"></a><p><b>NOTE</b>: <code>{@link
  * IndexSearcher}</code> instances are completely
  * thread safe, meaning multiple threads can call any of its
@@ -82,6 +94,11 @@ public class IndexSearcher {
     final long maxRamBytesUsed = Math.min(1L << 25, Runtime.getRuntime().maxMemory() / 20);
     DEFAULT_QUERY_CACHE = new LRUQueryCache(maxCachedQueries, maxRamBytesUsed);
   }
+  /**
+   * By default we count hits accurately up to 1000. This makes sure that we
+   * don't spend most time on computing hit counts
+   */
+  private static final int TOTAL_HITS_THRESHOLD = 1000;
 
   final IndexReader reader; // package private for testing!
   
@@ -384,7 +401,7 @@ public class IndexSearcher {
 
       @Override
       public TopScoreDocCollector newCollector() throws IOException {
-        return TopScoreDocCollector.create(cappedNumHits, after, Integer.MAX_VALUE);
+        return TopScoreDocCollector.create(cappedNumHits, after, TOTAL_HITS_THRESHOLD);
       }
 
       @Override
@@ -513,7 +530,7 @@ public class IndexSearcher {
       @Override
       public TopFieldCollector newCollector() throws IOException {
         // TODO: don't pay the price for accurate hit counts by default
-        return TopFieldCollector.create(rewrittenSort, cappedNumHits, after, Integer.MAX_VALUE);
+        return TopFieldCollector.create(rewrittenSort, cappedNumHits, after, TOTAL_HITS_THRESHOLD);
       }
 
       @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/99dbe936/lucene/core/src/test/org/apache/lucene/search/TestBoolean2.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestBoolean2.java b/lucene/core/src/test/org/apache/lucene/search/TestBoolean2.java
index e15ba97..9478841 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestBoolean2.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestBoolean2.java
@@ -255,7 +255,7 @@ public class TestBoolean2 extends LuceneTestCase {
     
     // sanity check expected num matches in bigSearcher
     assertEquals(mulFactor * collector.totalHits,
-                 bigSearcher.search(query, 1).totalHits.value);
+                 bigSearcher.count(query));
 
     // now check 2 diff scorers from the bigSearcher as well
     collector = TopScoreDocCollector.create(topDocsToCheck, Integer.MAX_VALUE);
@@ -398,8 +398,7 @@ public class TestBoolean2 extends LuceneTestCase {
         BooleanQuery.Builder q3 = new BooleanQuery.Builder();
         q3.add(q1, BooleanClause.Occur.SHOULD);
         q3.add(new PrefixQuery(new Term("field2", "b")), BooleanClause.Occur.SHOULD);
-        TopDocs hits4 = bigSearcher.search(q3.build(), 1);
-        assertEquals(mulFactor*collector.totalHits + NUM_EXTRA_DOCS/2, hits4.totalHits.value);
+        assertEquals(mulFactor*collector.totalHits + NUM_EXTRA_DOCS/2, bigSearcher.count(q3.build()));
 
         // test diff (randomized) scorers produce the same results on bigSearcher as well
         collector = TopFieldCollector.create(sort, 1000 * mulFactor, 1);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/99dbe936/lucene/core/src/test/org/apache/lucene/search/TestBooleanScorer.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestBooleanScorer.java b/lucene/core/src/test/org/apache/lucene/search/TestBooleanScorer.java
index 4d3d8d8..86733a4 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestBooleanScorer.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestBooleanScorer.java
@@ -149,7 +149,7 @@ public class TestBooleanScorer extends LuceneTestCase {
     q2.add(q1.build(), BooleanClause.Occur.SHOULD);
     q2.add(new CrazyMustUseBulkScorerQuery(), BooleanClause.Occur.SHOULD);
 
-    assertEquals(1, s.search(q2.build(), 10).totalHits.value);
+    assertEquals(1, s.count(q2.build()));
     r.close();
     dir.close();
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/99dbe936/lucene/core/src/test/org/apache/lucene/search/TestLRUQueryCache.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestLRUQueryCache.java b/lucene/core/src/test/org/apache/lucene/search/TestLRUQueryCache.java
index a30e026..5633607 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestLRUQueryCache.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestLRUQueryCache.java
@@ -148,7 +148,13 @@ public class TestLRUQueryCache extends LuceneTestCase {
                 TotalHitCountCollector collector = new TotalHitCountCollector();
                 searcher.search(q, collector); // will use the cache
                 final int totalHits1 = collector.getTotalHits();
-                final long totalHits2 = searcher.search(q, 1).totalHits.value; // will not use the cache because of scores
+                TotalHitCountCollector collector2 = new TotalHitCountCollector();
+                searcher.search(q, new FilterCollector(collector2) {
+                  public ScoreMode scoreMode() {
+                    return ScoreMode.COMPLETE; // will not use the cache because of scores
+                  }
+                });
+                final long totalHits2 = collector2.getTotalHits();
                 assertEquals(totalHits2, totalHits1);
               } finally {
                 mgr.release(searcher);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/99dbe936/lucene/core/src/test/org/apache/lucene/search/TestNeedsScores.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestNeedsScores.java b/lucene/core/src/test/org/apache/lucene/search/TestNeedsScores.java
index 88860c0..75b6da1 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestNeedsScores.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestNeedsScores.java
@@ -62,7 +62,7 @@ public class TestNeedsScores extends LuceneTestCase {
     Query required = new TermQuery(new Term("field", "this"));
     Query prohibited = new TermQuery(new Term("field", "3"));
     BooleanQuery.Builder bq = new BooleanQuery.Builder();
-    bq.add(new AssertNeedsScores(required, ScoreMode.COMPLETE), BooleanClause.Occur.MUST);
+    bq.add(new AssertNeedsScores(required, ScoreMode.TOP_SCORES), BooleanClause.Occur.MUST);
     bq.add(new AssertNeedsScores(prohibited, ScoreMode.COMPLETE_NO_SCORES), BooleanClause.Occur.MUST_NOT);
     assertEquals(4, searcher.search(bq.build(), 5).totalHits.value); // we exclude 3
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/99dbe936/lucene/core/src/test/org/apache/lucene/search/TestShardSearching.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestShardSearching.java b/lucene/core/src/test/org/apache/lucene/search/TestShardSearching.java
index 043c943..5b9a62b 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestShardSearching.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestShardSearching.java
@@ -384,7 +384,7 @@ public class TestShardSearching extends ShardSearchingTestBase {
       sd.doc += base[sd.shardIndex];
     }
 
-    TestUtil.assertEquals(hits, shardHits);
+    TestUtil.assertConsistent(hits, shardHits);
 
     if (moreHits) {
       // Return a continuation:

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/99dbe936/lucene/core/src/test/org/apache/lucene/search/TestTopDocsMerge.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestTopDocsMerge.java b/lucene/core/src/test/org/apache/lucene/search/TestTopDocsMerge.java
index bf92642..43db2f2 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestTopDocsMerge.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestTopDocsMerge.java
@@ -372,7 +372,7 @@ public class TestTopDocsMerge extends LuceneTestCase {
         }
       }
 
-      TestUtil.assertEquals(topHits, mergedHits);
+      TestUtil.assertConsistent(topHits, mergedHits);
     }
     reader.close();
     dir.close();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/99dbe936/lucene/join/src/java/org/apache/lucene/search/join/ToParentBlockJoinQuery.java
----------------------------------------------------------------------
diff --git a/lucene/join/src/java/org/apache/lucene/search/join/ToParentBlockJoinQuery.java b/lucene/join/src/java/org/apache/lucene/search/join/ToParentBlockJoinQuery.java
index 3b99ccf..04e8959 100644
--- a/lucene/join/src/java/org/apache/lucene/search/join/ToParentBlockJoinQuery.java
+++ b/lucene/join/src/java/org/apache/lucene/search/join/ToParentBlockJoinQuery.java
@@ -311,13 +311,7 @@ public class ToParentBlockJoinQuery extends Query {
 
     @Override
     public float getMaxScore(int upTo) throws IOException {
-      switch(scoreMode) {
-        case Max:
-        case Min:
-          return childScorer.getMaxScore(DocIdSetIterator.NO_MORE_DOCS);
-        default:
-          return Float.POSITIVE_INFINITY;
-      }
+      return Float.POSITIVE_INFINITY;
     }
 
     private void setScoreAndFreq() throws IOException {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/99dbe936/lucene/test-framework/src/java/org/apache/lucene/util/TestUtil.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/util/TestUtil.java b/lucene/test-framework/src/java/org/apache/lucene/util/TestUtil.java
index bc31b44..b12d7b8 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/util/TestUtil.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/util/TestUtil.java
@@ -92,6 +92,7 @@ import org.apache.lucene.mockfile.WindowsFS;
 import org.apache.lucene.search.FieldDoc;
 import org.apache.lucene.search.ScoreDoc;
 import org.apache.lucene.search.TopDocs;
+import org.apache.lucene.search.TotalHits;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.FSDirectory;
 import org.apache.lucene.store.FilterDirectory;
@@ -1040,9 +1041,20 @@ public final class TestUtil {
     Assert.assertEquals("Reflection does not produce same map", reflectedValues, map);
   }
 
-  public static void assertEquals(TopDocs expected, TopDocs actual) {
-    Assert.assertEquals("wrong total hits", expected.totalHits.value, actual.totalHits.value);
-    Assert.assertEquals("wrong total hits", expected.totalHits.relation, actual.totalHits.relation);
+  /**
+   * Assert that the given {@link TopDocs} have the same top docs and consistent hit counts.
+   */
+  public static void assertConsistent(TopDocs expected, TopDocs actual) {
+    Assert.assertEquals("wrong total hits", expected.totalHits.value == 0, actual.totalHits.value == 0);
+    if (expected.totalHits.relation == TotalHits.Relation.EQUAL_TO) {
+      if (actual.totalHits.relation == TotalHits.Relation.EQUAL_TO) {
+        Assert.assertEquals("wrong total hits", expected.totalHits.value, actual.totalHits.value);
+      } else {
+        Assert.assertTrue("wrong total hits", expected.totalHits.value >= actual.totalHits.value);
+      }
+    } else if (actual.totalHits.relation == TotalHits.Relation.EQUAL_TO) {
+      Assert.assertTrue("wrong total hits", expected.totalHits.value <= actual.totalHits.value);
+    }
     Assert.assertEquals("wrong hit count", expected.scoreDocs.length, actual.scoreDocs.length);
     for(int hitIDX=0;hitIDX<expected.scoreDocs.length;hitIDX++) {
       final ScoreDoc expectedSD = expected.scoreDocs[hitIDX];